env->cp15.cptr_el[3] |= R_CPTR_EL3_ESM_MASK;
env->cp15.scr_el3 |= SCR_ENTP2;
env->vfp.smcr_el[3] = 0xf;
+ if (cpu_isar_feature(aa64_sme2, cpu)) {
+ env->vfp.smcr_el[3] |= R_SMCR_EZT0_MASK;
+ }
}
if (cpu_isar_feature(aa64_hcx, cpu)) {
env->cp15.scr_el3 |= SCR_HXEN;
/* Fields for SMCR_ELx. */
FIELD(SMCR, LEN, 0, 4)
+FIELD(SMCR, EZT0, 30, 1)
FIELD(SMCR, FA64, 31, 1)
/* Write a new value to v7m.exception, thus transitioning into or out
FIELD(TBFLAG_A64, NV2_MEM_BE, 36, 1)
FIELD(TBFLAG_A64, AH, 37, 1) /* FPCR.AH */
FIELD(TBFLAG_A64, NEP, 38, 1) /* FPCR.NEP */
+FIELD(TBFLAG_A64, ZT0EXC_EL, 39, 2)
/*
* Helpers for using the above. Note that only the A64 accessors use
{
int cur_el = arm_current_el(env);
int old_len = sve_vqm1_for_el(env, cur_el);
+ uint64_t valid_mask = R_SMCR_LEN_MASK | R_SMCR_FA64_MASK;
int new_len;
QEMU_BUILD_BUG_ON(ARM_MAX_VQ > R_SMCR_LEN_MASK + 1);
- value &= R_SMCR_LEN_MASK | R_SMCR_FA64_MASK;
+ if (cpu_isar_feature(aa64_sme2, env_archcpu(env))) {
+ valid_mask |= R_SMCR_EZT0_MASK;
+ }
+ value &= valid_mask;
raw_write(env, ri, value);
/*
return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags);
}
+/*
+ * Return the exception level to which exceptions should be taken for ZT0.
+ * C.f. the ARM pseudocode function CheckSMEZT0Enabled, after the ZA check.
+ */
+static int zt0_exception_el(CPUARMState *env, int el)
+{
+#ifndef CONFIG_USER_ONLY
+ if (el <= 1
+ && !el_is_in_host(env, el)
+ && !FIELD_EX64(env->vfp.smcr_el[1], SMCR, EZT0)) {
+ return 1;
+ }
+ if (el <= 2
+ && arm_is_el2_enabled(env)
+ && !FIELD_EX64(env->vfp.smcr_el[2], SMCR, EZT0)) {
+ return 2;
+ }
+ if (arm_feature(env, ARM_FEATURE_EL3)
+ && !FIELD_EX64(env->vfp.smcr_el[3], SMCR, EZT0)) {
+ return 3;
+ }
+#endif
+ return 0;
+}
+
static CPUARMTBFlags rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
ARMMMUIdx mmu_idx)
{
DP_TBFLAG_A64(flags, PSTATE_SM, 1);
DP_TBFLAG_A64(flags, SME_TRAP_NONSTREAMING, !sme_fa64(env, el));
}
- DP_TBFLAG_A64(flags, PSTATE_ZA, FIELD_EX64(env->svcr, SVCR, ZA));
+
+ if (FIELD_EX64(env->svcr, SVCR, ZA)) {
+ DP_TBFLAG_A64(flags, PSTATE_ZA, 1);
+ if (cpu_isar_feature(aa64_sme2, env_archcpu(env))) {
+ int zt0_el = zt0_exception_el(env, el);
+ DP_TBFLAG_A64(flags, ZT0EXC_EL, zt0_el);
+ }
+ }
}
sctlr = regime_sctlr(env, stage1);
dc->trap_eret = EX_TBFLAG_A64(tb_flags, TRAP_ERET);
dc->sve_excp_el = EX_TBFLAG_A64(tb_flags, SVEEXC_EL);
dc->sme_excp_el = EX_TBFLAG_A64(tb_flags, SMEEXC_EL);
+ dc->zt0_excp_el = EX_TBFLAG_A64(tb_flags, ZT0EXC_EL);
dc->vl = (EX_TBFLAG_A64(tb_flags, VL) + 1) * 16;
dc->svl = (EX_TBFLAG_A64(tb_flags, SVL) + 1) * 16;
dc->pauth_active = EX_TBFLAG_A64(tb_flags, PAUTH_ACTIVE);
int fp_excp_el; /* FP exception EL or 0 if enabled */
int sve_excp_el; /* SVE exception EL or 0 if enabled */
int sme_excp_el; /* SME exception EL or 0 if enabled */
+ int zt0_excp_el; /* ZT0 exception EL or 0 if enabled */
int vl; /* current vector length in bytes */
int svl; /* current streaming vector length in bytes */
bool vfp_enabled; /* FP enabled via FPSCR.EN */