target/arm: Add wrapper macros for accessing tbflags

We're about to split tbflags into two parts.  These macros
will ensure that the correct part is used with the correct
set of bits.

Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Message-id: 20210419202257.161730-5-richard.henderson@linaro.org
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Richard Henderson 2021-04-19 13:22:30 -07:00 committed by Peter Maydell
parent ae6eb1e9b3
commit a729a46b05
5 changed files with 101 additions and 92 deletions

View File

@ -3462,6 +3462,26 @@ FIELD(TBFLAG_A64, TCMA, 16, 2)
FIELD(TBFLAG_A64, MTE_ACTIVE, 18, 1) FIELD(TBFLAG_A64, MTE_ACTIVE, 18, 1)
FIELD(TBFLAG_A64, MTE0_ACTIVE, 19, 1) FIELD(TBFLAG_A64, MTE0_ACTIVE, 19, 1)
/*
* Helpers for using the above.
*/
#define DP_TBFLAG_ANY(DST, WHICH, VAL) \
(DST = FIELD_DP32(DST, TBFLAG_ANY, WHICH, VAL))
#define DP_TBFLAG_A64(DST, WHICH, VAL) \
(DST = FIELD_DP32(DST, TBFLAG_A64, WHICH, VAL))
#define DP_TBFLAG_A32(DST, WHICH, VAL) \
(DST = FIELD_DP32(DST, TBFLAG_A32, WHICH, VAL))
#define DP_TBFLAG_M32(DST, WHICH, VAL) \
(DST = FIELD_DP32(DST, TBFLAG_M32, WHICH, VAL))
#define DP_TBFLAG_AM32(DST, WHICH, VAL) \
(DST = FIELD_DP32(DST, TBFLAG_AM32, WHICH, VAL))
#define EX_TBFLAG_ANY(IN, WHICH) FIELD_EX32(IN, TBFLAG_ANY, WHICH)
#define EX_TBFLAG_A64(IN, WHICH) FIELD_EX32(IN, TBFLAG_A64, WHICH)
#define EX_TBFLAG_A32(IN, WHICH) FIELD_EX32(IN, TBFLAG_A32, WHICH)
#define EX_TBFLAG_M32(IN, WHICH) FIELD_EX32(IN, TBFLAG_M32, WHICH)
#define EX_TBFLAG_AM32(IN, WHICH) FIELD_EX32(IN, TBFLAG_AM32, WHICH)
/** /**
* cpu_mmu_index: * cpu_mmu_index:
* @env: The cpu environment * @env: The cpu environment
@ -3472,7 +3492,7 @@ FIELD(TBFLAG_A64, MTE0_ACTIVE, 19, 1)
*/ */
static inline int cpu_mmu_index(CPUARMState *env, bool ifetch) static inline int cpu_mmu_index(CPUARMState *env, bool ifetch)
{ {
return FIELD_EX32(env->hflags, TBFLAG_ANY, MMUIDX); return EX_TBFLAG_ANY(env->hflags, MMUIDX);
} }
static inline bool bswap_code(bool sctlr_b) static inline bool bswap_code(bool sctlr_b)

View File

@ -1020,7 +1020,7 @@ void HELPER(exception_return)(CPUARMState *env, uint64_t new_pc)
* the hflags rebuild, since we can pull the composite TBII field * the hflags rebuild, since we can pull the composite TBII field
* from there. * from there.
*/ */
tbii = FIELD_EX32(env->hflags, TBFLAG_A64, TBII); tbii = EX_TBFLAG_A64(env->hflags, TBII);
if ((tbii >> extract64(new_pc, 55, 1)) & 1) { if ((tbii >> extract64(new_pc, 55, 1)) & 1) {
/* TBI is enabled. */ /* TBI is enabled. */
int core_mmu_idx = cpu_mmu_index(env, false); int core_mmu_idx = cpu_mmu_index(env, false);

View File

@ -12987,12 +12987,11 @@ ARMMMUIdx arm_stage1_mmu_idx(CPUARMState *env)
static uint32_t rebuild_hflags_common(CPUARMState *env, int fp_el, static uint32_t rebuild_hflags_common(CPUARMState *env, int fp_el,
ARMMMUIdx mmu_idx, uint32_t flags) ARMMMUIdx mmu_idx, uint32_t flags)
{ {
flags = FIELD_DP32(flags, TBFLAG_ANY, FPEXC_EL, fp_el); DP_TBFLAG_ANY(flags, FPEXC_EL, fp_el);
flags = FIELD_DP32(flags, TBFLAG_ANY, MMUIDX, DP_TBFLAG_ANY(flags, MMUIDX, arm_to_core_mmu_idx(mmu_idx));
arm_to_core_mmu_idx(mmu_idx));
if (arm_singlestep_active(env)) { if (arm_singlestep_active(env)) {
flags = FIELD_DP32(flags, TBFLAG_ANY, SS_ACTIVE, 1); DP_TBFLAG_ANY(flags, SS_ACTIVE, 1);
} }
return flags; return flags;
} }
@ -13003,12 +13002,12 @@ static uint32_t rebuild_hflags_common_32(CPUARMState *env, int fp_el,
bool sctlr_b = arm_sctlr_b(env); bool sctlr_b = arm_sctlr_b(env);
if (sctlr_b) { if (sctlr_b) {
flags = FIELD_DP32(flags, TBFLAG_A32, SCTLR__B, 1); DP_TBFLAG_A32(flags, SCTLR__B, 1);
} }
if (arm_cpu_data_is_big_endian_a32(env, sctlr_b)) { if (arm_cpu_data_is_big_endian_a32(env, sctlr_b)) {
flags = FIELD_DP32(flags, TBFLAG_ANY, BE_DATA, 1); DP_TBFLAG_ANY(flags, BE_DATA, 1);
} }
flags = FIELD_DP32(flags, TBFLAG_A32, NS, !access_secure_reg(env)); DP_TBFLAG_A32(flags, NS, !access_secure_reg(env));
return rebuild_hflags_common(env, fp_el, mmu_idx, flags); return rebuild_hflags_common(env, fp_el, mmu_idx, flags);
} }
@ -13019,7 +13018,7 @@ static uint32_t rebuild_hflags_m32(CPUARMState *env, int fp_el,
uint32_t flags = 0; uint32_t flags = 0;
if (arm_v7m_is_handler_mode(env)) { if (arm_v7m_is_handler_mode(env)) {
flags = FIELD_DP32(flags, TBFLAG_M32, HANDLER, 1); DP_TBFLAG_M32(flags, HANDLER, 1);
} }
/* /*
@ -13030,7 +13029,7 @@ static uint32_t rebuild_hflags_m32(CPUARMState *env, int fp_el,
if (arm_feature(env, ARM_FEATURE_V8) && if (arm_feature(env, ARM_FEATURE_V8) &&
!((mmu_idx & ARM_MMU_IDX_M_NEGPRI) && !((mmu_idx & ARM_MMU_IDX_M_NEGPRI) &&
(env->v7m.ccr[env->v7m.secure] & R_V7M_CCR_STKOFHFNMIGN_MASK))) { (env->v7m.ccr[env->v7m.secure] & R_V7M_CCR_STKOFHFNMIGN_MASK))) {
flags = FIELD_DP32(flags, TBFLAG_M32, STACKCHECK, 1); DP_TBFLAG_M32(flags, STACKCHECK, 1);
} }
return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags); return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags);
@ -13040,8 +13039,7 @@ static uint32_t rebuild_hflags_aprofile(CPUARMState *env)
{ {
int flags = 0; int flags = 0;
flags = FIELD_DP32(flags, TBFLAG_ANY, DEBUG_TARGET_EL, DP_TBFLAG_ANY(flags, DEBUG_TARGET_EL, arm_debug_target_el(env));
arm_debug_target_el(env));
return flags; return flags;
} }
@ -13051,12 +13049,12 @@ static uint32_t rebuild_hflags_a32(CPUARMState *env, int fp_el,
uint32_t flags = rebuild_hflags_aprofile(env); uint32_t flags = rebuild_hflags_aprofile(env);
if (arm_el_is_aa64(env, 1)) { if (arm_el_is_aa64(env, 1)) {
flags = FIELD_DP32(flags, TBFLAG_A32, VFPEN, 1); DP_TBFLAG_A32(flags, VFPEN, 1);
} }
if (arm_current_el(env) < 2 && env->cp15.hstr_el2 && if (arm_current_el(env) < 2 && env->cp15.hstr_el2 &&
(arm_hcr_el2_eff(env) & (HCR_E2H | HCR_TGE)) != (HCR_E2H | HCR_TGE)) { (arm_hcr_el2_eff(env) & (HCR_E2H | HCR_TGE)) != (HCR_E2H | HCR_TGE)) {
flags = FIELD_DP32(flags, TBFLAG_A32, HSTR_ACTIVE, 1); DP_TBFLAG_A32(flags, HSTR_ACTIVE, 1);
} }
return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags); return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags);
@ -13071,14 +13069,14 @@ static uint32_t rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
uint64_t sctlr; uint64_t sctlr;
int tbii, tbid; int tbii, tbid;
flags = FIELD_DP32(flags, TBFLAG_ANY, AARCH64_STATE, 1); DP_TBFLAG_ANY(flags, AARCH64_STATE, 1);
/* Get control bits for tagged addresses. */ /* Get control bits for tagged addresses. */
tbid = aa64_va_parameter_tbi(tcr, mmu_idx); tbid = aa64_va_parameter_tbi(tcr, mmu_idx);
tbii = tbid & ~aa64_va_parameter_tbid(tcr, mmu_idx); tbii = tbid & ~aa64_va_parameter_tbid(tcr, mmu_idx);
flags = FIELD_DP32(flags, TBFLAG_A64, TBII, tbii); DP_TBFLAG_A64(flags, TBII, tbii);
flags = FIELD_DP32(flags, TBFLAG_A64, TBID, tbid); DP_TBFLAG_A64(flags, TBID, tbid);
if (cpu_isar_feature(aa64_sve, env_archcpu(env))) { if (cpu_isar_feature(aa64_sve, env_archcpu(env))) {
int sve_el = sve_exception_el(env, el); int sve_el = sve_exception_el(env, el);
@ -13093,14 +13091,14 @@ static uint32_t rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
} else { } else {
zcr_len = sve_zcr_len_for_el(env, el); zcr_len = sve_zcr_len_for_el(env, el);
} }
flags = FIELD_DP32(flags, TBFLAG_A64, SVEEXC_EL, sve_el); DP_TBFLAG_A64(flags, SVEEXC_EL, sve_el);
flags = FIELD_DP32(flags, TBFLAG_A64, ZCR_LEN, zcr_len); DP_TBFLAG_A64(flags, ZCR_LEN, zcr_len);
} }
sctlr = regime_sctlr(env, stage1); sctlr = regime_sctlr(env, stage1);
if (arm_cpu_data_is_big_endian_a64(el, sctlr)) { if (arm_cpu_data_is_big_endian_a64(el, sctlr)) {
flags = FIELD_DP32(flags, TBFLAG_ANY, BE_DATA, 1); DP_TBFLAG_ANY(flags, BE_DATA, 1);
} }
if (cpu_isar_feature(aa64_pauth, env_archcpu(env))) { if (cpu_isar_feature(aa64_pauth, env_archcpu(env))) {
@ -13111,14 +13109,14 @@ static uint32_t rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
* The decision of which action to take is left to a helper. * The decision of which action to take is left to a helper.
*/ */
if (sctlr & (SCTLR_EnIA | SCTLR_EnIB | SCTLR_EnDA | SCTLR_EnDB)) { if (sctlr & (SCTLR_EnIA | SCTLR_EnIB | SCTLR_EnDA | SCTLR_EnDB)) {
flags = FIELD_DP32(flags, TBFLAG_A64, PAUTH_ACTIVE, 1); DP_TBFLAG_A64(flags, PAUTH_ACTIVE, 1);
} }
} }
if (cpu_isar_feature(aa64_bti, env_archcpu(env))) { if (cpu_isar_feature(aa64_bti, env_archcpu(env))) {
/* Note that SCTLR_EL[23].BT == SCTLR_BT1. */ /* Note that SCTLR_EL[23].BT == SCTLR_BT1. */
if (sctlr & (el == 0 ? SCTLR_BT0 : SCTLR_BT1)) { if (sctlr & (el == 0 ? SCTLR_BT0 : SCTLR_BT1)) {
flags = FIELD_DP32(flags, TBFLAG_A64, BT, 1); DP_TBFLAG_A64(flags, BT, 1);
} }
} }
@ -13130,7 +13128,7 @@ static uint32_t rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
case ARMMMUIdx_SE10_1: case ARMMMUIdx_SE10_1:
case ARMMMUIdx_SE10_1_PAN: case ARMMMUIdx_SE10_1_PAN:
/* TODO: ARMv8.3-NV */ /* TODO: ARMv8.3-NV */
flags = FIELD_DP32(flags, TBFLAG_A64, UNPRIV, 1); DP_TBFLAG_A64(flags, UNPRIV, 1);
break; break;
case ARMMMUIdx_E20_2: case ARMMMUIdx_E20_2:
case ARMMMUIdx_E20_2_PAN: case ARMMMUIdx_E20_2_PAN:
@ -13141,7 +13139,7 @@ static uint32_t rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
* gated by HCR_EL2.<E2H,TGE> == '11', and so is LDTR. * gated by HCR_EL2.<E2H,TGE> == '11', and so is LDTR.
*/ */
if (env->cp15.hcr_el2 & HCR_TGE) { if (env->cp15.hcr_el2 & HCR_TGE) {
flags = FIELD_DP32(flags, TBFLAG_A64, UNPRIV, 1); DP_TBFLAG_A64(flags, UNPRIV, 1);
} }
break; break;
default: default:
@ -13159,24 +13157,23 @@ static uint32_t rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
* 4) If no Allocation Tag Access, then all accesses are Unchecked. * 4) If no Allocation Tag Access, then all accesses are Unchecked.
*/ */
if (allocation_tag_access_enabled(env, el, sctlr)) { if (allocation_tag_access_enabled(env, el, sctlr)) {
flags = FIELD_DP32(flags, TBFLAG_A64, ATA, 1); DP_TBFLAG_A64(flags, ATA, 1);
if (tbid if (tbid
&& !(env->pstate & PSTATE_TCO) && !(env->pstate & PSTATE_TCO)
&& (sctlr & (el == 0 ? SCTLR_TCF0 : SCTLR_TCF))) { && (sctlr & (el == 0 ? SCTLR_TCF0 : SCTLR_TCF))) {
flags = FIELD_DP32(flags, TBFLAG_A64, MTE_ACTIVE, 1); DP_TBFLAG_A64(flags, MTE_ACTIVE, 1);
} }
} }
/* And again for unprivileged accesses, if required. */ /* And again for unprivileged accesses, if required. */
if (FIELD_EX32(flags, TBFLAG_A64, UNPRIV) if (EX_TBFLAG_A64(flags, UNPRIV)
&& tbid && tbid
&& !(env->pstate & PSTATE_TCO) && !(env->pstate & PSTATE_TCO)
&& (sctlr & SCTLR_TCF0) && (sctlr & SCTLR_TCF0)
&& allocation_tag_access_enabled(env, 0, sctlr)) { && allocation_tag_access_enabled(env, 0, sctlr)) {
flags = FIELD_DP32(flags, TBFLAG_A64, MTE0_ACTIVE, 1); DP_TBFLAG_A64(flags, MTE0_ACTIVE, 1);
} }
/* Cache TCMA as well as TBI. */ /* Cache TCMA as well as TBI. */
flags = FIELD_DP32(flags, TBFLAG_A64, TCMA, DP_TBFLAG_A64(flags, TCMA, aa64_va_parameter_tcma(tcr, mmu_idx));
aa64_va_parameter_tcma(tcr, mmu_idx));
} }
return rebuild_hflags_common(env, fp_el, mmu_idx, flags); return rebuild_hflags_common(env, fp_el, mmu_idx, flags);
@ -13272,10 +13269,10 @@ void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
*cs_base = 0; *cs_base = 0;
assert_hflags_rebuild_correctly(env); assert_hflags_rebuild_correctly(env);
if (FIELD_EX32(flags, TBFLAG_ANY, AARCH64_STATE)) { if (EX_TBFLAG_ANY(flags, AARCH64_STATE)) {
*pc = env->pc; *pc = env->pc;
if (cpu_isar_feature(aa64_bti, env_archcpu(env))) { if (cpu_isar_feature(aa64_bti, env_archcpu(env))) {
flags = FIELD_DP32(flags, TBFLAG_A64, BTYPE, env->btype); DP_TBFLAG_A64(flags, BTYPE, env->btype);
} }
} else { } else {
*pc = env->regs[15]; *pc = env->regs[15];
@ -13284,7 +13281,7 @@ void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
if (arm_feature(env, ARM_FEATURE_M_SECURITY) && if (arm_feature(env, ARM_FEATURE_M_SECURITY) &&
FIELD_EX32(env->v7m.fpccr[M_REG_S], V7M_FPCCR, S) FIELD_EX32(env->v7m.fpccr[M_REG_S], V7M_FPCCR, S)
!= env->v7m.secure) { != env->v7m.secure) {
flags = FIELD_DP32(flags, TBFLAG_M32, FPCCR_S_WRONG, 1); DP_TBFLAG_M32(flags, FPCCR_S_WRONG, 1);
} }
if ((env->v7m.fpccr[env->v7m.secure] & R_V7M_FPCCR_ASPEN_MASK) && if ((env->v7m.fpccr[env->v7m.secure] & R_V7M_FPCCR_ASPEN_MASK) &&
@ -13296,12 +13293,12 @@ void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
* active FP context; we must create a new FP context before * active FP context; we must create a new FP context before
* executing any FP insn. * executing any FP insn.
*/ */
flags = FIELD_DP32(flags, TBFLAG_M32, NEW_FP_CTXT_NEEDED, 1); DP_TBFLAG_M32(flags, NEW_FP_CTXT_NEEDED, 1);
} }
bool is_secure = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_S_MASK; bool is_secure = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_S_MASK;
if (env->v7m.fpccr[is_secure] & R_V7M_FPCCR_LSPACT_MASK) { if (env->v7m.fpccr[is_secure] & R_V7M_FPCCR_LSPACT_MASK) {
flags = FIELD_DP32(flags, TBFLAG_M32, LSPACT, 1); DP_TBFLAG_M32(flags, LSPACT, 1);
} }
} else { } else {
/* /*
@ -13309,21 +13306,18 @@ void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
* Note that VECLEN+VECSTRIDE are RES0 for M-profile. * Note that VECLEN+VECSTRIDE are RES0 for M-profile.
*/ */
if (arm_feature(env, ARM_FEATURE_XSCALE)) { if (arm_feature(env, ARM_FEATURE_XSCALE)) {
flags = FIELD_DP32(flags, TBFLAG_A32, DP_TBFLAG_A32(flags, XSCALE_CPAR, env->cp15.c15_cpar);
XSCALE_CPAR, env->cp15.c15_cpar);
} else { } else {
flags = FIELD_DP32(flags, TBFLAG_A32, VECLEN, DP_TBFLAG_A32(flags, VECLEN, env->vfp.vec_len);
env->vfp.vec_len); DP_TBFLAG_A32(flags, VECSTRIDE, env->vfp.vec_stride);
flags = FIELD_DP32(flags, TBFLAG_A32, VECSTRIDE,
env->vfp.vec_stride);
} }
if (env->vfp.xregs[ARM_VFP_FPEXC] & (1 << 30)) { if (env->vfp.xregs[ARM_VFP_FPEXC] & (1 << 30)) {
flags = FIELD_DP32(flags, TBFLAG_A32, VFPEN, 1); DP_TBFLAG_A32(flags, VFPEN, 1);
} }
} }
flags = FIELD_DP32(flags, TBFLAG_AM32, THUMB, env->thumb); DP_TBFLAG_AM32(flags, THUMB, env->thumb);
flags = FIELD_DP32(flags, TBFLAG_AM32, CONDEXEC, env->condexec_bits); DP_TBFLAG_AM32(flags, CONDEXEC, env->condexec_bits);
} }
/* /*
@ -13335,9 +13329,8 @@ void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
* 1 1 Active-not-pending * 1 1 Active-not-pending
* SS_ACTIVE is set in hflags; PSTATE__SS is computed every TB. * SS_ACTIVE is set in hflags; PSTATE__SS is computed every TB.
*/ */
if (FIELD_EX32(flags, TBFLAG_ANY, SS_ACTIVE) && if (EX_TBFLAG_ANY(flags, SS_ACTIVE) && (env->pstate & PSTATE_SS)) {
(env->pstate & PSTATE_SS)) { DP_TBFLAG_ANY(flags, PSTATE__SS, 1);
flags = FIELD_DP32(flags, TBFLAG_ANY, PSTATE__SS, 1);
} }
*pflags = flags; *pflags = flags;

View File

@ -14684,28 +14684,28 @@ static void aarch64_tr_init_disas_context(DisasContextBase *dcbase,
!arm_el_is_aa64(env, 3); !arm_el_is_aa64(env, 3);
dc->thumb = 0; dc->thumb = 0;
dc->sctlr_b = 0; dc->sctlr_b = 0;
dc->be_data = FIELD_EX32(tb_flags, TBFLAG_ANY, BE_DATA) ? MO_BE : MO_LE; dc->be_data = EX_TBFLAG_ANY(tb_flags, BE_DATA) ? MO_BE : MO_LE;
dc->condexec_mask = 0; dc->condexec_mask = 0;
dc->condexec_cond = 0; dc->condexec_cond = 0;
core_mmu_idx = FIELD_EX32(tb_flags, TBFLAG_ANY, MMUIDX); core_mmu_idx = EX_TBFLAG_ANY(tb_flags, MMUIDX);
dc->mmu_idx = core_to_aa64_mmu_idx(core_mmu_idx); dc->mmu_idx = core_to_aa64_mmu_idx(core_mmu_idx);
dc->tbii = FIELD_EX32(tb_flags, TBFLAG_A64, TBII); dc->tbii = EX_TBFLAG_A64(tb_flags, TBII);
dc->tbid = FIELD_EX32(tb_flags, TBFLAG_A64, TBID); dc->tbid = EX_TBFLAG_A64(tb_flags, TBID);
dc->tcma = FIELD_EX32(tb_flags, TBFLAG_A64, TCMA); dc->tcma = EX_TBFLAG_A64(tb_flags, TCMA);
dc->current_el = arm_mmu_idx_to_el(dc->mmu_idx); dc->current_el = arm_mmu_idx_to_el(dc->mmu_idx);
#if !defined(CONFIG_USER_ONLY) #if !defined(CONFIG_USER_ONLY)
dc->user = (dc->current_el == 0); dc->user = (dc->current_el == 0);
#endif #endif
dc->fp_excp_el = FIELD_EX32(tb_flags, TBFLAG_ANY, FPEXC_EL); dc->fp_excp_el = EX_TBFLAG_ANY(tb_flags, FPEXC_EL);
dc->sve_excp_el = FIELD_EX32(tb_flags, TBFLAG_A64, SVEEXC_EL); dc->sve_excp_el = EX_TBFLAG_A64(tb_flags, SVEEXC_EL);
dc->sve_len = (FIELD_EX32(tb_flags, TBFLAG_A64, ZCR_LEN) + 1) * 16; dc->sve_len = (EX_TBFLAG_A64(tb_flags, ZCR_LEN) + 1) * 16;
dc->pauth_active = FIELD_EX32(tb_flags, TBFLAG_A64, PAUTH_ACTIVE); dc->pauth_active = EX_TBFLAG_A64(tb_flags, PAUTH_ACTIVE);
dc->bt = FIELD_EX32(tb_flags, TBFLAG_A64, BT); dc->bt = EX_TBFLAG_A64(tb_flags, BT);
dc->btype = FIELD_EX32(tb_flags, TBFLAG_A64, BTYPE); dc->btype = EX_TBFLAG_A64(tb_flags, BTYPE);
dc->unpriv = FIELD_EX32(tb_flags, TBFLAG_A64, UNPRIV); dc->unpriv = EX_TBFLAG_A64(tb_flags, UNPRIV);
dc->ata = FIELD_EX32(tb_flags, TBFLAG_A64, ATA); dc->ata = EX_TBFLAG_A64(tb_flags, ATA);
dc->mte_active[0] = FIELD_EX32(tb_flags, TBFLAG_A64, MTE_ACTIVE); dc->mte_active[0] = EX_TBFLAG_A64(tb_flags, MTE_ACTIVE);
dc->mte_active[1] = FIELD_EX32(tb_flags, TBFLAG_A64, MTE0_ACTIVE); dc->mte_active[1] = EX_TBFLAG_A64(tb_flags, MTE0_ACTIVE);
dc->vec_len = 0; dc->vec_len = 0;
dc->vec_stride = 0; dc->vec_stride = 0;
dc->cp_regs = arm_cpu->cp_regs; dc->cp_regs = arm_cpu->cp_regs;
@ -14732,10 +14732,10 @@ static void aarch64_tr_init_disas_context(DisasContextBase *dcbase,
* emit code to generate a software step exception * emit code to generate a software step exception
* end the TB * end the TB
*/ */
dc->ss_active = FIELD_EX32(tb_flags, TBFLAG_ANY, SS_ACTIVE); dc->ss_active = EX_TBFLAG_ANY(tb_flags, SS_ACTIVE);
dc->pstate_ss = FIELD_EX32(tb_flags, TBFLAG_ANY, PSTATE__SS); dc->pstate_ss = EX_TBFLAG_ANY(tb_flags, PSTATE__SS);
dc->is_ldex = false; dc->is_ldex = false;
dc->debug_target_el = FIELD_EX32(tb_flags, TBFLAG_ANY, DEBUG_TARGET_EL); dc->debug_target_el = EX_TBFLAG_ANY(tb_flags, DEBUG_TARGET_EL);
/* Bound the number of insns to execute to those left on the page. */ /* Bound the number of insns to execute to those left on the page. */
bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4; bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;

View File

@ -8864,46 +8864,42 @@ static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
*/ */
dc->secure_routed_to_el3 = arm_feature(env, ARM_FEATURE_EL3) && dc->secure_routed_to_el3 = arm_feature(env, ARM_FEATURE_EL3) &&
!arm_el_is_aa64(env, 3); !arm_el_is_aa64(env, 3);
dc->thumb = FIELD_EX32(tb_flags, TBFLAG_AM32, THUMB); dc->thumb = EX_TBFLAG_AM32(tb_flags, THUMB);
dc->be_data = FIELD_EX32(tb_flags, TBFLAG_ANY, BE_DATA) ? MO_BE : MO_LE; dc->be_data = EX_TBFLAG_ANY(tb_flags, BE_DATA) ? MO_BE : MO_LE;
condexec = FIELD_EX32(tb_flags, TBFLAG_AM32, CONDEXEC); condexec = EX_TBFLAG_AM32(tb_flags, CONDEXEC);
dc->condexec_mask = (condexec & 0xf) << 1; dc->condexec_mask = (condexec & 0xf) << 1;
dc->condexec_cond = condexec >> 4; dc->condexec_cond = condexec >> 4;
core_mmu_idx = FIELD_EX32(tb_flags, TBFLAG_ANY, MMUIDX); core_mmu_idx = EX_TBFLAG_ANY(tb_flags, MMUIDX);
dc->mmu_idx = core_to_arm_mmu_idx(env, core_mmu_idx); dc->mmu_idx = core_to_arm_mmu_idx(env, core_mmu_idx);
dc->current_el = arm_mmu_idx_to_el(dc->mmu_idx); dc->current_el = arm_mmu_idx_to_el(dc->mmu_idx);
#if !defined(CONFIG_USER_ONLY) #if !defined(CONFIG_USER_ONLY)
dc->user = (dc->current_el == 0); dc->user = (dc->current_el == 0);
#endif #endif
dc->fp_excp_el = FIELD_EX32(tb_flags, TBFLAG_ANY, FPEXC_EL); dc->fp_excp_el = EX_TBFLAG_ANY(tb_flags, FPEXC_EL);
if (arm_feature(env, ARM_FEATURE_M)) { if (arm_feature(env, ARM_FEATURE_M)) {
dc->vfp_enabled = 1; dc->vfp_enabled = 1;
dc->be_data = MO_TE; dc->be_data = MO_TE;
dc->v7m_handler_mode = FIELD_EX32(tb_flags, TBFLAG_M32, HANDLER); dc->v7m_handler_mode = EX_TBFLAG_M32(tb_flags, HANDLER);
dc->v8m_secure = arm_feature(env, ARM_FEATURE_M_SECURITY) && dc->v8m_secure = arm_feature(env, ARM_FEATURE_M_SECURITY) &&
regime_is_secure(env, dc->mmu_idx); regime_is_secure(env, dc->mmu_idx);
dc->v8m_stackcheck = FIELD_EX32(tb_flags, TBFLAG_M32, STACKCHECK); dc->v8m_stackcheck = EX_TBFLAG_M32(tb_flags, STACKCHECK);
dc->v8m_fpccr_s_wrong = dc->v8m_fpccr_s_wrong = EX_TBFLAG_M32(tb_flags, FPCCR_S_WRONG);
FIELD_EX32(tb_flags, TBFLAG_M32, FPCCR_S_WRONG);
dc->v7m_new_fp_ctxt_needed = dc->v7m_new_fp_ctxt_needed =
FIELD_EX32(tb_flags, TBFLAG_M32, NEW_FP_CTXT_NEEDED); EX_TBFLAG_M32(tb_flags, NEW_FP_CTXT_NEEDED);
dc->v7m_lspact = FIELD_EX32(tb_flags, TBFLAG_M32, LSPACT); dc->v7m_lspact = EX_TBFLAG_M32(tb_flags, LSPACT);
} else { } else {
dc->be_data = dc->debug_target_el = EX_TBFLAG_ANY(tb_flags, DEBUG_TARGET_EL);
FIELD_EX32(tb_flags, TBFLAG_ANY, BE_DATA) ? MO_BE : MO_LE; dc->sctlr_b = EX_TBFLAG_A32(tb_flags, SCTLR__B);
dc->debug_target_el = dc->hstr_active = EX_TBFLAG_A32(tb_flags, HSTR_ACTIVE);
FIELD_EX32(tb_flags, TBFLAG_ANY, DEBUG_TARGET_EL); dc->ns = EX_TBFLAG_A32(tb_flags, NS);
dc->sctlr_b = FIELD_EX32(tb_flags, TBFLAG_A32, SCTLR__B); dc->vfp_enabled = EX_TBFLAG_A32(tb_flags, VFPEN);
dc->hstr_active = FIELD_EX32(tb_flags, TBFLAG_A32, HSTR_ACTIVE);
dc->ns = FIELD_EX32(tb_flags, TBFLAG_A32, NS);
dc->vfp_enabled = FIELD_EX32(tb_flags, TBFLAG_A32, VFPEN);
if (arm_feature(env, ARM_FEATURE_XSCALE)) { if (arm_feature(env, ARM_FEATURE_XSCALE)) {
dc->c15_cpar = FIELD_EX32(tb_flags, TBFLAG_A32, XSCALE_CPAR); dc->c15_cpar = EX_TBFLAG_A32(tb_flags, XSCALE_CPAR);
} else { } else {
dc->vec_len = FIELD_EX32(tb_flags, TBFLAG_A32, VECLEN); dc->vec_len = EX_TBFLAG_A32(tb_flags, VECLEN);
dc->vec_stride = FIELD_EX32(tb_flags, TBFLAG_A32, VECSTRIDE); dc->vec_stride = EX_TBFLAG_A32(tb_flags, VECSTRIDE);
} }
} }
dc->cp_regs = cpu->cp_regs; dc->cp_regs = cpu->cp_regs;
@ -8924,8 +8920,8 @@ static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
* emit code to generate a software step exception * emit code to generate a software step exception
* end the TB * end the TB
*/ */
dc->ss_active = FIELD_EX32(tb_flags, TBFLAG_ANY, SS_ACTIVE); dc->ss_active = EX_TBFLAG_ANY(tb_flags, SS_ACTIVE);
dc->pstate_ss = FIELD_EX32(tb_flags, TBFLAG_ANY, PSTATE__SS); dc->pstate_ss = EX_TBFLAG_ANY(tb_flags, PSTATE__SS);
dc->is_ldex = false; dc->is_ldex = false;
dc->page_start = dc->base.pc_first & TARGET_PAGE_MASK; dc->page_start = dc->base.pc_first & TARGET_PAGE_MASK;
@ -9364,11 +9360,11 @@ void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
DisasContext dc = { }; DisasContext dc = { };
const TranslatorOps *ops = &arm_translator_ops; const TranslatorOps *ops = &arm_translator_ops;
if (FIELD_EX32(tb->flags, TBFLAG_AM32, THUMB)) { if (EX_TBFLAG_AM32(tb->flags, THUMB)) {
ops = &thumb_translator_ops; ops = &thumb_translator_ops;
} }
#ifdef TARGET_AARCH64 #ifdef TARGET_AARCH64
if (FIELD_EX32(tb->flags, TBFLAG_ANY, AARCH64_STATE)) { if (EX_TBFLAG_ANY(tb->flags, AARCH64_STATE)) {
ops = &aarch64_translator_ops; ops = &aarch64_translator_ops;
} }
#endif #endif