tcg/aarch64: Use LDP to load tlb mask+table
This changes the code generation for the tlb from e.g. ldur x0, [x19, #0xffffffffffffffe0] ldur x1, [x19, #0xffffffffffffffe8] and x0, x0, x20, lsr #8 add x1, x1, x0 ldr x0, [x1] ldr x1, [x1, #0x18] to ldp x0, x1, [x19, #-0x20] and x0, x0, x20, lsr #8 add x1, x1, x0 ldr x0, [x1] ldr x1, [x1, #0x18] Acked-by: Alistair Francis <alistair.francis@wdc.com> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
e8b5fae516
commit
65b23204d6
@ -1641,6 +1641,10 @@ static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOpIdx oi,
|
||||
QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) > 0);
|
||||
QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) < -512);
|
||||
|
||||
/* These offsets are built into the LDP below. */
|
||||
QEMU_BUILD_BUG_ON(offsetof(CPUTLBDescFast, mask) != 0);
|
||||
QEMU_BUILD_BUG_ON(offsetof(CPUTLBDescFast, table) != 8);
|
||||
|
||||
/* Load and compare a TLB entry, emitting the conditional jump to the
|
||||
slow path for the failure case, which will be patched later when finalizing
|
||||
the slow path. Generated code returns the host addend in X1,
|
||||
@ -1649,23 +1653,20 @@ static void tcg_out_tlb_read(TCGContext *s, TCGReg addr_reg, TCGMemOp opc,
|
||||
tcg_insn_unit **label_ptr, int mem_index,
|
||||
bool is_read)
|
||||
{
|
||||
int fast_ofs = TLB_MASK_TABLE_OFS(mem_index);
|
||||
int mask_ofs = fast_ofs + offsetof(CPUTLBDescFast, mask);
|
||||
int table_ofs = fast_ofs + offsetof(CPUTLBDescFast, table);
|
||||
unsigned a_bits = get_alignment_bits(opc);
|
||||
unsigned s_bits = opc & MO_SIZE;
|
||||
unsigned a_mask = (1u << a_bits) - 1;
|
||||
unsigned s_mask = (1u << s_bits) - 1;
|
||||
TCGReg mask_base = TCG_AREG0, table_base = TCG_AREG0, x3;
|
||||
TCGReg x3;
|
||||
TCGType mask_type;
|
||||
uint64_t compare_mask;
|
||||
|
||||
mask_type = (TARGET_PAGE_BITS + CPU_TLB_DYN_MAX_BITS > 32
|
||||
? TCG_TYPE_I64 : TCG_TYPE_I32);
|
||||
|
||||
/* Load tlb_mask[mmu_idx] and tlb_table[mmu_idx]. */
|
||||
tcg_out_ld(s, mask_type, TCG_REG_X0, mask_base, mask_ofs);
|
||||
tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_X1, table_base, table_ofs);
|
||||
/* Load env_tlb(env)->f[mmu_idx].{mask,table} into {x0,x1}. */
|
||||
tcg_out_insn(s, 3314, LDP, TCG_REG_X0, TCG_REG_X1, TCG_AREG0,
|
||||
TLB_MASK_TABLE_OFS(mem_index), 1, 0);
|
||||
|
||||
/* Extract the TLB index from the address into X0. */
|
||||
tcg_out_insn(s, 3502S, AND_LSR, mask_type == TCG_TYPE_I64,
|
||||
|
Loading…
Reference in New Issue
Block a user