target/riscv: Adjust vsetvl according to XLEN
Signed-off-by: LIU Zhiwei <zhiwei_liu@c-sky.com> Reviewed-by: Richard Henderson <richard.henderson@linaro.org> Reviewed-by: Alistair Francis <alistair.francis@wdc.com> Message-id: 20220120122050.41546-17-zhiwei_liu@c-sky.com Signed-off-by: Alistair Francis <alistair.francis@wdc.com>
This commit is contained in:
parent
d96a271a8d
commit
31961cfe50
@ -491,6 +491,11 @@ static inline RISCVMXL cpu_recompute_xl(CPURISCVState *env)
|
||||
}
|
||||
#endif
|
||||
|
||||
static inline int riscv_cpu_xlen(CPURISCVState *env)
|
||||
{
|
||||
return 16 << env->xl;
|
||||
}
|
||||
|
||||
/*
|
||||
* Encode LMUL to lmul as follows:
|
||||
* LMUL vlmul lmul
|
||||
|
@ -36,8 +36,11 @@ target_ulong HELPER(vsetvl)(CPURISCVState *env, target_ulong s1,
|
||||
uint64_t lmul = FIELD_EX64(s2, VTYPE, VLMUL);
|
||||
uint16_t sew = 8 << FIELD_EX64(s2, VTYPE, VSEW);
|
||||
uint8_t ediv = FIELD_EX64(s2, VTYPE, VEDIV);
|
||||
bool vill = FIELD_EX64(s2, VTYPE, VILL);
|
||||
target_ulong reserved = FIELD_EX64(s2, VTYPE, RESERVED);
|
||||
int xlen = riscv_cpu_xlen(env);
|
||||
bool vill = (s2 >> (xlen - 1)) & 0x1;
|
||||
target_ulong reserved = s2 &
|
||||
MAKE_64BIT_MASK(R_VTYPE_RESERVED_SHIFT,
|
||||
xlen - 1 - R_VTYPE_RESERVED_SHIFT);
|
||||
|
||||
if (lmul & 4) {
|
||||
/* Fractional LMUL. */
|
||||
|
Loading…
x
Reference in New Issue
Block a user