tcg/riscv: Remove branch-over-branch fallback

Since 7ecd02a06f, we are prepared to re-start code generation
with a smaller TB if a relocation is out of range.  We no longer
need to leave a nop in the stream Just In Case.

Reviewed-by: Alistair Francis <alistair.francis@wdc.com>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2020-11-04 12:43:20 -08:00
parent 844d0442a5
commit 4b6a52d01e

View file

@ -469,43 +469,16 @@ static bool reloc_call(tcg_insn_unit *code_ptr, const tcg_insn_unit *target)
static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
intptr_t value, intptr_t addend)
{
uint32_t insn = *code_ptr;
intptr_t diff;
bool short_jmp;
tcg_debug_assert(addend == 0);
switch (type) {
case R_RISCV_BRANCH:
diff = value - (uintptr_t)code_ptr;
short_jmp = diff == sextreg(diff, 0, 12);
if (short_jmp) {
return reloc_sbimm12(code_ptr, (tcg_insn_unit *)value);
} else {
/* Invert the condition */
insn = insn ^ (1 << 12);
/* Clear the offset */
insn &= 0x01fff07f;
/* Set the offset to the PC + 8 */
insn |= encode_sbimm12(8);
/* Move forward */
code_ptr[0] = insn;
/* Overwrite the NOP with jal x0,value */
diff = value - (uintptr_t)(code_ptr + 1);
insn = encode_uj(OPC_JAL, TCG_REG_ZERO, diff);
code_ptr[1] = insn;
return true;
}
break;
return reloc_sbimm12(code_ptr, (tcg_insn_unit *)value);
case R_RISCV_JAL:
return reloc_jimm20(code_ptr, (tcg_insn_unit *)value);
case R_RISCV_CALL:
return reloc_call(code_ptr, (tcg_insn_unit *)value);
default:
tcg_abort();
g_assert_not_reached();
}
}
@ -779,21 +752,8 @@ static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
arg2 = t;
}
if (l->has_value) {
intptr_t diff = tcg_pcrel_diff(s, l->u.value_ptr);
if (diff == sextreg(diff, 0, 12)) {
tcg_out_opc_branch(s, op, arg1, arg2, diff);
} else {
/* Invert the conditional branch. */
tcg_out_opc_branch(s, op ^ (1 << 12), arg1, arg2, 8);
tcg_out_opc_jump(s, OPC_JAL, TCG_REG_ZERO, diff - 4);
}
} else {
tcg_out_reloc(s, s->code_ptr, R_RISCV_BRANCH, l, 0);
tcg_out_opc_branch(s, op, arg1, arg2, 0);
/* NOP to allow patching later */
tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_ZERO, TCG_REG_ZERO, 0);
}
tcg_out_reloc(s, s->code_ptr, R_RISCV_BRANCH, l, 0);
tcg_out_opc_branch(s, op, arg1, arg2, 0);
}
static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
@ -1009,8 +969,6 @@ static void tcg_out_tlb_load(TCGContext *s, TCGReg addrl,
/* Compare masked address with the TLB entry. */
label_ptr[0] = s->code_ptr;
tcg_out_opc_branch(s, OPC_BNE, TCG_REG_TMP0, TCG_REG_TMP1, 0);
/* NOP to allow patching later */
tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_ZERO, TCG_REG_ZERO, 0);
/* TLB Hit - translate address using addend. */
if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
@ -1054,8 +1012,7 @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
}
/* resolve label address */
if (!patch_reloc(l->label_ptr[0], R_RISCV_BRANCH,
(intptr_t) s->code_ptr, 0)) {
if (!reloc_sbimm12(l->label_ptr[0], s->code_ptr)) {
return false;
}
@ -1089,8 +1046,7 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
}
/* resolve label address */
if (!patch_reloc(l->label_ptr[0], R_RISCV_BRANCH,
(intptr_t) s->code_ptr, 0)) {
if (!reloc_sbimm12(l->label_ptr[0], s->code_ptr)) {
return false;
}