target/arm: use arm_hcr_el2_eff() where applicable

This will simplify accessing HCR conditionally in secure state.

Signed-off-by: Rémi Denis-Courmont <remi.denis.courmont@huawei.com>
Reviewed-by: Richard Henderson <richard.henderson@linaro.org>
Message-id: 20210112104511.36576-4-remi.denis.courmont@huawei.com
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Rémi Denis-Courmont 2021-01-12 12:44:56 +02:00 committed by Peter Maydell
parent e6ef016926
commit e04a5752cb

View File

@ -4433,16 +4433,16 @@ static CPAccessResult aa64_cacheop_pou_access(CPUARMState *env,
static int vae1_tlbmask(CPUARMState *env)
{
/* Since we exclude secure first, we may read HCR_EL2 directly. */
if (arm_is_secure_below_el3(env)) {
return ARMMMUIdxBit_SE10_1 |
ARMMMUIdxBit_SE10_1_PAN |
ARMMMUIdxBit_SE10_0;
} else if ((env->cp15.hcr_el2 & (HCR_E2H | HCR_TGE))
== (HCR_E2H | HCR_TGE)) {
uint64_t hcr = arm_hcr_el2_eff(env);
if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) {
return ARMMMUIdxBit_E20_2 |
ARMMMUIdxBit_E20_2_PAN |
ARMMMUIdxBit_E20_0;
} else if (arm_is_secure_below_el3(env)) {
return ARMMMUIdxBit_SE10_1 |
ARMMMUIdxBit_SE10_1_PAN |
ARMMMUIdxBit_SE10_0;
} else {
return ARMMMUIdxBit_E10_1 |
ARMMMUIdxBit_E10_1_PAN |
@ -9965,6 +9965,8 @@ static inline uint64_t regime_sctlr(CPUARMState *env, ARMMMUIdx mmu_idx)
static inline bool regime_translation_disabled(CPUARMState *env,
ARMMMUIdx mmu_idx)
{
uint64_t hcr_el2;
if (arm_feature(env, ARM_FEATURE_M)) {
switch (env->v7m.mpu_ctrl[regime_is_secure(env, mmu_idx)] &
(R_V7M_MPU_CTRL_ENABLE_MASK | R_V7M_MPU_CTRL_HFNMIENA_MASK)) {
@ -9983,19 +9985,21 @@ static inline bool regime_translation_disabled(CPUARMState *env,
}
}
hcr_el2 = arm_hcr_el2_eff(env);
if (mmu_idx == ARMMMUIdx_Stage2) {
/* HCR.DC means HCR.VM behaves as 1 */
return (env->cp15.hcr_el2 & (HCR_DC | HCR_VM)) == 0;
return (hcr_el2 & (HCR_DC | HCR_VM)) == 0;
}
if (env->cp15.hcr_el2 & HCR_TGE) {
if (hcr_el2 & HCR_TGE) {
/* TGE means that NS EL0/1 act as if SCTLR_EL1.M is zero */
if (!regime_is_secure(env, mmu_idx) && regime_el(env, mmu_idx) == 1) {
return true;
}
}
if ((env->cp15.hcr_el2 & HCR_DC) && arm_mmu_idx_is_stage1_of_2(mmu_idx)) {
if ((hcr_el2 & HCR_DC) && arm_mmu_idx_is_stage1_of_2(mmu_idx)) {
/* HCR.DC means SCTLR_EL1.M behaves as 0 */
return true;
}
@ -10346,7 +10350,8 @@ static hwaddr S1_ptw_translate(CPUARMState *env, ARMMMUIdx mmu_idx,
fi->s1ptw = true;
return ~0;
}
if ((env->cp15.hcr_el2 & HCR_PTW) && (cacheattrs.attrs & 0xf0) == 0) {
if ((arm_hcr_el2_eff(env) & HCR_PTW) &&
(cacheattrs.attrs & 0xf0) == 0) {
/*
* PTW set and S1 walk touched S2 Device memory:
* generate Permission fault.
@ -10779,7 +10784,7 @@ static uint8_t convert_stage2_attrs(CPUARMState *env, uint8_t s2attrs)
uint8_t hihint = 0, lohint = 0;
if (hiattr != 0) { /* normal memory */
if ((env->cp15.hcr_el2 & HCR_CD) != 0) { /* cache disabled */
if (arm_hcr_el2_eff(env) & HCR_CD) { /* cache disabled */
hiattr = loattr = 1; /* non-cacheable */
} else {
if (hiattr != 1) { /* Write-through or write-back */
@ -12112,7 +12117,7 @@ bool get_phys_addr(CPUARMState *env, target_ulong address,
}
/* Combine the S1 and S2 cache attributes. */
if (env->cp15.hcr_el2 & HCR_DC) {
if (arm_hcr_el2_eff(env) & HCR_DC) {
/*
* HCR.DC forces the first stage attributes to
* Normal Non-Shareable,