From 31961cfe505e11cc4ec4cfde52c851957e1bf605 Mon Sep 17 00:00:00 2001 From: LIU Zhiwei Date: Thu, 20 Jan 2022 20:20:43 +0800 Subject: [PATCH] target/riscv: Adjust vsetvl according to XLEN Signed-off-by: LIU Zhiwei Reviewed-by: Richard Henderson Reviewed-by: Alistair Francis Message-id: 20220120122050.41546-17-zhiwei_liu@c-sky.com Signed-off-by: Alistair Francis --- target/riscv/cpu.h | 5 +++++ target/riscv/vector_helper.c | 7 +++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/target/riscv/cpu.h b/target/riscv/cpu.h index 6c740b92c1..fe58ccaeae 100644 --- a/target/riscv/cpu.h +++ b/target/riscv/cpu.h @@ -491,6 +491,11 @@ static inline RISCVMXL cpu_recompute_xl(CPURISCVState *env) } #endif +static inline int riscv_cpu_xlen(CPURISCVState *env) +{ + return 16 << env->xl; +} + /* * Encode LMUL to lmul as follows: * LMUL vlmul lmul diff --git a/target/riscv/vector_helper.c b/target/riscv/vector_helper.c index a9484c22ea..8b7c9ec890 100644 --- a/target/riscv/vector_helper.c +++ b/target/riscv/vector_helper.c @@ -36,8 +36,11 @@ target_ulong HELPER(vsetvl)(CPURISCVState *env, target_ulong s1, uint64_t lmul = FIELD_EX64(s2, VTYPE, VLMUL); uint16_t sew = 8 << FIELD_EX64(s2, VTYPE, VSEW); uint8_t ediv = FIELD_EX64(s2, VTYPE, VEDIV); - bool vill = FIELD_EX64(s2, VTYPE, VILL); - target_ulong reserved = FIELD_EX64(s2, VTYPE, RESERVED); + int xlen = riscv_cpu_xlen(env); + bool vill = (s2 >> (xlen - 1)) & 0x1; + target_ulong reserved = s2 & + MAKE_64BIT_MASK(R_VTYPE_RESERVED_SHIFT, + xlen - 1 - R_VTYPE_RESERVED_SHIFT); if (lmul & 4) { /* Fractional LMUL. */