}
}
-static u64 __compute_fixed_bits(struct kvm *kvm,
- const struct reg_bits_to_feat_map *map,
- int map_size,
- u64 *fixed_bits,
- unsigned long require,
- unsigned long exclude)
+static struct resx __compute_fixed_bits(struct kvm *kvm,
+ const struct reg_bits_to_feat_map *map,
+ int map_size,
+ u64 *fixed_bits,
+ unsigned long require,
+ unsigned long exclude)
{
- u64 val = 0;
+ struct resx resx = {};
for (int i = 0; i < map_size; i++) {
bool match;
match = idreg_feat_match(kvm, &map[i]);
if (!match || (map[i].flags & FIXED_VALUE))
- val |= reg_feat_map_bits(&map[i]);
+ resx.res0 |= reg_feat_map_bits(&map[i]);
}
- return val;
+ return resx;
}
-static u64 compute_res0_bits(struct kvm *kvm,
- const struct reg_bits_to_feat_map *map,
- int map_size,
- unsigned long require,
- unsigned long exclude)
+static struct resx compute_resx_bits(struct kvm *kvm,
+ const struct reg_bits_to_feat_map *map,
+ int map_size,
+ unsigned long require,
+ unsigned long exclude)
{
return __compute_fixed_bits(kvm, map, map_size, NULL,
require, exclude | FIXED_VALUE);
}
-static u64 compute_reg_res0_bits(struct kvm *kvm,
- const struct reg_feat_map_desc *r,
- unsigned long require, unsigned long exclude)
+static struct resx compute_reg_resx_bits(struct kvm *kvm,
+ const struct reg_feat_map_desc *r,
+ unsigned long require,
+ unsigned long exclude)
{
- u64 res0;
+ struct resx resx, tmp;
- res0 = compute_res0_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
+ resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
require, exclude);
- res0 |= compute_res0_bits(kvm, &r->feat_map, 1, require, exclude);
- res0 |= ~reg_feat_map_bits(&r->feat_map);
+ tmp = compute_resx_bits(kvm, &r->feat_map, 1, require, exclude);
+
+ resx.res0 |= tmp.res0;
+ resx.res0 |= ~reg_feat_map_bits(&r->feat_map);
+ resx.res1 |= tmp.res1;
- return res0;
+ return resx;
}
static u64 compute_fgu_bits(struct kvm *kvm, const struct reg_feat_map_desc *r)
{
+ struct resx resx;
+
/*
* If computing FGUs, we collect the unsupported feature bits as
- * RES0 bits, but don't take the actual RES0 bits or register
+ * RESx bits, but don't take the actual RESx bits or register
* existence into account -- we're not computing bits for the
* register itself.
*/
- return compute_res0_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
+ resx = compute_resx_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
0, NEVER_FGU);
+
+ return resx.res0 | resx.res1;
}
-static u64 compute_reg_fixed_bits(struct kvm *kvm,
- const struct reg_feat_map_desc *r,
- u64 *fixed_bits, unsigned long require,
- unsigned long exclude)
+static struct resx compute_reg_fixed_bits(struct kvm *kvm,
+ const struct reg_feat_map_desc *r,
+ u64 *fixed_bits,
+ unsigned long require,
+ unsigned long exclude)
{
return __compute_fixed_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz,
fixed_bits, require | FIXED_VALUE, exclude);
kvm->arch.fgu[fgt] = val;
}
-void get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg, u64 *res0, u64 *res1)
+struct resx get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg)
{
u64 fixed = 0, mask;
+ struct resx resx;
switch (reg) {
case HFGRTR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hfgrtr_desc, 0, 0);
- *res1 = HFGRTR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hfgrtr_desc, 0, 0);
+ resx.res1 |= HFGRTR_EL2_RES1;
break;
case HFGWTR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hfgwtr_desc, 0, 0);
- *res1 = HFGWTR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hfgwtr_desc, 0, 0);
+ resx.res1 |= HFGWTR_EL2_RES1;
break;
case HFGITR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hfgitr_desc, 0, 0);
- *res1 = HFGITR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hfgitr_desc, 0, 0);
+ resx.res1 |= HFGITR_EL2_RES1;
break;
case HDFGRTR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hdfgrtr_desc, 0, 0);
- *res1 = HDFGRTR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hdfgrtr_desc, 0, 0);
+ resx.res1 |= HDFGRTR_EL2_RES1;
break;
case HDFGWTR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hdfgwtr_desc, 0, 0);
- *res1 = HDFGWTR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hdfgwtr_desc, 0, 0);
+ resx.res1 |= HDFGWTR_EL2_RES1;
break;
case HAFGRTR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hafgrtr_desc, 0, 0);
- *res1 = HAFGRTR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hafgrtr_desc, 0, 0);
+ resx.res1 |= HAFGRTR_EL2_RES1;
break;
case HFGRTR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hfgrtr2_desc, 0, 0);
- *res1 = HFGRTR2_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hfgrtr2_desc, 0, 0);
+ resx.res1 |= HFGRTR2_EL2_RES1;
break;
case HFGWTR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hfgwtr2_desc, 0, 0);
- *res1 = HFGWTR2_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hfgwtr2_desc, 0, 0);
+ resx.res1 |= HFGWTR2_EL2_RES1;
break;
case HFGITR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hfgitr2_desc, 0, 0);
- *res1 = HFGITR2_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hfgitr2_desc, 0, 0);
+ resx.res1 |= HFGITR2_EL2_RES1;
break;
case HDFGRTR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hdfgrtr2_desc, 0, 0);
- *res1 = HDFGRTR2_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hdfgrtr2_desc, 0, 0);
+ resx.res1 |= HDFGRTR2_EL2_RES1;
break;
case HDFGWTR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hdfgwtr2_desc, 0, 0);
- *res1 = HDFGWTR2_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hdfgwtr2_desc, 0, 0);
+ resx.res1 |= HDFGWTR2_EL2_RES1;
break;
case HCRX_EL2:
- *res0 = compute_reg_res0_bits(kvm, &hcrx_desc, 0, 0);
- *res1 = __HCRX_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &hcrx_desc, 0, 0);
+ resx.res1 |= __HCRX_EL2_RES1;
break;
case HCR_EL2:
- mask = compute_reg_fixed_bits(kvm, &hcr_desc, &fixed, 0, 0);
- *res0 = compute_reg_res0_bits(kvm, &hcr_desc, 0, 0);
- *res0 |= (mask & ~fixed);
- *res1 = HCR_EL2_RES1 | (mask & fixed);
+ mask = compute_reg_fixed_bits(kvm, &hcr_desc, &fixed, 0, 0).res0;
+ resx = compute_reg_resx_bits(kvm, &hcr_desc, 0, 0);
+ resx.res0 |= (mask & ~fixed);
+ resx.res1 |= HCR_EL2_RES1 | (mask & fixed);
break;
case SCTLR2_EL1:
case SCTLR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &sctlr2_desc, 0, 0);
- *res1 = SCTLR2_EL1_RES1;
+ resx = compute_reg_resx_bits(kvm, &sctlr2_desc, 0, 0);
+ resx.res1 |= SCTLR2_EL1_RES1;
break;
case TCR2_EL2:
- *res0 = compute_reg_res0_bits(kvm, &tcr2_el2_desc, 0, 0);
- *res1 = TCR2_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &tcr2_el2_desc, 0, 0);
+ resx.res1 |= TCR2_EL2_RES1;
break;
case SCTLR_EL1:
- *res0 = compute_reg_res0_bits(kvm, &sctlr_el1_desc, 0, 0);
- *res1 = SCTLR_EL1_RES1;
+ resx = compute_reg_resx_bits(kvm, &sctlr_el1_desc, 0, 0);
+ resx.res1 |= SCTLR_EL1_RES1;
break;
case MDCR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &mdcr_el2_desc, 0, 0);
- *res1 = MDCR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &mdcr_el2_desc, 0, 0);
+ resx.res1 |= MDCR_EL2_RES1;
break;
case VTCR_EL2:
- *res0 = compute_reg_res0_bits(kvm, &vtcr_el2_desc, 0, 0);
- *res1 = VTCR_EL2_RES1;
+ resx = compute_reg_resx_bits(kvm, &vtcr_el2_desc, 0, 0);
+ resx.res1 |= VTCR_EL2_RES1;
break;
default:
WARN_ON_ONCE(1);
- *res0 = *res1 = 0;
+ resx = (typeof(resx)){};
break;
}
+
+ return resx;
}
static __always_inline struct fgt_masks *__fgt_reg_to_masks(enum vcpu_sysreg reg)
return v;
}
-static __always_inline void set_sysreg_masks(struct kvm *kvm, int sr, u64 res0, u64 res1)
+static __always_inline void set_sysreg_masks(struct kvm *kvm, int sr, struct resx resx)
{
- int i = sr - __SANITISED_REG_START__;
-
BUILD_BUG_ON(!__builtin_constant_p(sr));
BUILD_BUG_ON(sr < __SANITISED_REG_START__);
BUILD_BUG_ON(sr >= NR_SYS_REGS);
- kvm->arch.sysreg_masks->mask[i].res0 = res0;
- kvm->arch.sysreg_masks->mask[i].res1 = res1;
+ kvm_set_sysreg_resx(kvm, sr, resx);
}
int kvm_init_nv_sysregs(struct kvm_vcpu *vcpu)
{
struct kvm *kvm = vcpu->kvm;
- u64 res0, res1;
+ struct resx resx;
lockdep_assert_held(&kvm->arch.config_lock);
return -ENOMEM;
/* VTTBR_EL2 */
- res0 = res1 = 0;
+ resx = (typeof(resx)){};
if (!kvm_has_feat_enum(kvm, ID_AA64MMFR1_EL1, VMIDBits, 16))
- res0 |= GENMASK(63, 56);
+ resx.res0 |= GENMASK(63, 56);
if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, CnP, IMP))
- res0 |= VTTBR_CNP_BIT;
- set_sysreg_masks(kvm, VTTBR_EL2, res0, res1);
+ resx.res0 |= VTTBR_CNP_BIT;
+ set_sysreg_masks(kvm, VTTBR_EL2, resx);
/* VTCR_EL2 */
- get_reg_fixed_bits(kvm, VTCR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, VTCR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, VTCR_EL2);
+ set_sysreg_masks(kvm, VTCR_EL2, resx);
/* VMPIDR_EL2 */
- res0 = GENMASK(63, 40) | GENMASK(30, 24);
- res1 = BIT(31);
- set_sysreg_masks(kvm, VMPIDR_EL2, res0, res1);
+ resx.res0 = GENMASK(63, 40) | GENMASK(30, 24);
+ resx.res1 = BIT(31);
+ set_sysreg_masks(kvm, VMPIDR_EL2, resx);
/* HCR_EL2 */
- get_reg_fixed_bits(kvm, HCR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HCR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HCR_EL2);
+ set_sysreg_masks(kvm, HCR_EL2, resx);
/* HCRX_EL2 */
- get_reg_fixed_bits(kvm, HCRX_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HCRX_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HCRX_EL2);
+ set_sysreg_masks(kvm, HCRX_EL2, resx);
/* HFG[RW]TR_EL2 */
- get_reg_fixed_bits(kvm, HFGRTR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HFGRTR_EL2, res0, res1);
- get_reg_fixed_bits(kvm, HFGWTR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HFGWTR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HFGRTR_EL2);
+ set_sysreg_masks(kvm, HFGRTR_EL2, resx);
+ resx = get_reg_fixed_bits(kvm, HFGWTR_EL2);
+ set_sysreg_masks(kvm, HFGWTR_EL2, resx);
/* HDFG[RW]TR_EL2 */
- get_reg_fixed_bits(kvm, HDFGRTR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HDFGRTR_EL2, res0, res1);
- get_reg_fixed_bits(kvm, HDFGWTR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HDFGWTR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HDFGRTR_EL2);
+ set_sysreg_masks(kvm, HDFGRTR_EL2, resx);
+ resx = get_reg_fixed_bits(kvm, HDFGWTR_EL2);
+ set_sysreg_masks(kvm, HDFGWTR_EL2, resx);
/* HFGITR_EL2 */
- get_reg_fixed_bits(kvm, HFGITR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HFGITR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HFGITR_EL2);
+ set_sysreg_masks(kvm, HFGITR_EL2, resx);
/* HAFGRTR_EL2 - not a lot to see here */
- get_reg_fixed_bits(kvm, HAFGRTR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HAFGRTR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HAFGRTR_EL2);
+ set_sysreg_masks(kvm, HAFGRTR_EL2, resx);
/* HFG[RW]TR2_EL2 */
- get_reg_fixed_bits(kvm, HFGRTR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HFGRTR2_EL2, res0, res1);
- get_reg_fixed_bits(kvm, HFGWTR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HFGWTR2_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HFGRTR2_EL2);
+ set_sysreg_masks(kvm, HFGRTR2_EL2, resx);
+ resx = get_reg_fixed_bits(kvm, HFGWTR2_EL2);
+ set_sysreg_masks(kvm, HFGWTR2_EL2, resx);
/* HDFG[RW]TR2_EL2 */
- get_reg_fixed_bits(kvm, HDFGRTR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HDFGRTR2_EL2, res0, res1);
- get_reg_fixed_bits(kvm, HDFGWTR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HDFGWTR2_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HDFGRTR2_EL2);
+ set_sysreg_masks(kvm, HDFGRTR2_EL2, resx);
+ resx = get_reg_fixed_bits(kvm, HDFGWTR2_EL2);
+ set_sysreg_masks(kvm, HDFGWTR2_EL2, resx);
/* HFGITR2_EL2 */
- get_reg_fixed_bits(kvm, HFGITR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, HFGITR2_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, HFGITR2_EL2);
+ set_sysreg_masks(kvm, HFGITR2_EL2, resx);
/* TCR2_EL2 */
- get_reg_fixed_bits(kvm, TCR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, TCR2_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, TCR2_EL2);
+ set_sysreg_masks(kvm, TCR2_EL2, resx);
/* SCTLR_EL1 */
- get_reg_fixed_bits(kvm, SCTLR_EL1, &res0, &res1);
- set_sysreg_masks(kvm, SCTLR_EL1, res0, res1);
+ resx = get_reg_fixed_bits(kvm, SCTLR_EL1);
+ set_sysreg_masks(kvm, SCTLR_EL1, resx);
/* SCTLR2_ELx */
- get_reg_fixed_bits(kvm, SCTLR2_EL1, &res0, &res1);
- set_sysreg_masks(kvm, SCTLR2_EL1, res0, res1);
- get_reg_fixed_bits(kvm, SCTLR2_EL2, &res0, &res1);
- set_sysreg_masks(kvm, SCTLR2_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, SCTLR2_EL1);
+ set_sysreg_masks(kvm, SCTLR2_EL1, resx);
+ resx = get_reg_fixed_bits(kvm, SCTLR2_EL2);
+ set_sysreg_masks(kvm, SCTLR2_EL2, resx);
/* MDCR_EL2 */
- get_reg_fixed_bits(kvm, MDCR_EL2, &res0, &res1);
- set_sysreg_masks(kvm, MDCR_EL2, res0, res1);
+ resx = get_reg_fixed_bits(kvm, MDCR_EL2);
+ set_sysreg_masks(kvm, MDCR_EL2, resx);
/* CNTHCTL_EL2 */
- res0 = GENMASK(63, 20);
- res1 = 0;
+ resx.res0 = GENMASK(63, 20);
+ resx.res1 = 0;
if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RME, IMP))
- res0 |= CNTHCTL_CNTPMASK | CNTHCTL_CNTVMASK;
+ resx.res0 |= CNTHCTL_CNTPMASK | CNTHCTL_CNTVMASK;
if (!kvm_has_feat(kvm, ID_AA64MMFR0_EL1, ECV, CNTPOFF)) {
- res0 |= CNTHCTL_ECV;
+ resx.res0 |= CNTHCTL_ECV;
if (!kvm_has_feat(kvm, ID_AA64MMFR0_EL1, ECV, IMP))
- res0 |= (CNTHCTL_EL1TVT | CNTHCTL_EL1TVCT |
- CNTHCTL_EL1NVPCT | CNTHCTL_EL1NVVCT);
+ resx.res0 |= (CNTHCTL_EL1TVT | CNTHCTL_EL1TVCT |
+ CNTHCTL_EL1NVPCT | CNTHCTL_EL1NVVCT);
}
if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, VH, IMP))
- res0 |= GENMASK(11, 8);
- set_sysreg_masks(kvm, CNTHCTL_EL2, res0, res1);
+ resx.res0 |= GENMASK(11, 8);
+ set_sysreg_masks(kvm, CNTHCTL_EL2, resx);
/* ICH_HCR_EL2 */
- res0 = ICH_HCR_EL2_RES0;
- res1 = ICH_HCR_EL2_RES1;
+ resx.res0 = ICH_HCR_EL2_RES0;
+ resx.res1 = ICH_HCR_EL2_RES1;
if (!(kvm_vgic_global_state.ich_vtr_el2 & ICH_VTR_EL2_TDS))
- res0 |= ICH_HCR_EL2_TDIR;
+ resx.res0 |= ICH_HCR_EL2_TDIR;
/* No GICv4 is presented to the guest */
- res0 |= ICH_HCR_EL2_DVIM | ICH_HCR_EL2_vSGIEOICount;
- set_sysreg_masks(kvm, ICH_HCR_EL2, res0, res1);
+ resx.res0 |= ICH_HCR_EL2_DVIM | ICH_HCR_EL2_vSGIEOICount;
+ set_sysreg_masks(kvm, ICH_HCR_EL2, resx);
/* VNCR_EL2 */
- set_sysreg_masks(kvm, VNCR_EL2, VNCR_EL2_RES0, VNCR_EL2_RES1);
+ resx.res0 = VNCR_EL2_RES0;
+ resx.res1 = VNCR_EL2_RES1;
+ set_sysreg_masks(kvm, VNCR_EL2, resx);
out:
for (enum vcpu_sysreg sr = __SANITISED_REG_START__; sr < NR_SYS_REGS; sr++)