From: Marc Zyngier Date: Mon, 2 Feb 2026 18:43:12 +0000 (+0000) Subject: KVM: arm64: Introduce standalone FGU computing primitive X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=a3c92001812358c35c55c844c1e0d9de8caf13fe;p=thirdparty%2Flinux.git KVM: arm64: Introduce standalone FGU computing primitive Computing the FGU bits is made oddly complicated, as we use the RES0 helper instead of using a specific abstraction. Introduce such an abstraction, which is going to make things significantly simpler in the future. Reviewed-by: Fuad Tabba Tested-by: Fuad Tabba Link: https://patch.msgid.link/20260202184329.2724080-4-maz@kernel.org Signed-off-by: Marc Zyngier --- diff --git a/arch/arm64/kvm/config.c b/arch/arm64/kvm/config.c index 0bcdb39885734..2122599f7cbbd 100644 --- a/arch/arm64/kvm/config.c +++ b/arch/arm64/kvm/config.c @@ -1335,26 +1335,30 @@ static u64 compute_res0_bits(struct kvm *kvm, static u64 compute_reg_res0_bits(struct kvm *kvm, const struct reg_feat_map_desc *r, unsigned long require, unsigned long exclude) - { u64 res0; res0 = compute_res0_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz, require, exclude); - /* - * If computing FGUs, don't take RES0 or register existence - * into account -- we're not computing bits for the register - * itself. - */ - if (!(exclude & NEVER_FGU)) { - res0 |= compute_res0_bits(kvm, &r->feat_map, 1, require, exclude); - res0 |= ~reg_feat_map_bits(&r->feat_map); - } + res0 |= compute_res0_bits(kvm, &r->feat_map, 1, require, exclude); + res0 |= ~reg_feat_map_bits(&r->feat_map); return res0; } +static u64 compute_fgu_bits(struct kvm *kvm, const struct reg_feat_map_desc *r) +{ + /* + * If computing FGUs, we collect the unsupported feature bits as + * RES0 bits, but don't take the actual RES0 bits or register + * existence into account -- we're not computing bits for the + * register itself. + */ + return compute_res0_bits(kvm, r->bit_feat_map, r->bit_feat_map_sz, + 0, NEVER_FGU); +} + static u64 compute_reg_fixed_bits(struct kvm *kvm, const struct reg_feat_map_desc *r, u64 *fixed_bits, unsigned long require, @@ -1370,40 +1374,29 @@ void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt) switch (fgt) { case HFGRTR_GROUP: - val |= compute_reg_res0_bits(kvm, &hfgrtr_desc, - 0, NEVER_FGU); - val |= compute_reg_res0_bits(kvm, &hfgwtr_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hfgrtr_desc); + val |= compute_fgu_bits(kvm, &hfgwtr_desc); break; case HFGITR_GROUP: - val |= compute_reg_res0_bits(kvm, &hfgitr_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hfgitr_desc); break; case HDFGRTR_GROUP: - val |= compute_reg_res0_bits(kvm, &hdfgrtr_desc, - 0, NEVER_FGU); - val |= compute_reg_res0_bits(kvm, &hdfgwtr_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hdfgrtr_desc); + val |= compute_fgu_bits(kvm, &hdfgwtr_desc); break; case HAFGRTR_GROUP: - val |= compute_reg_res0_bits(kvm, &hafgrtr_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hafgrtr_desc); break; case HFGRTR2_GROUP: - val |= compute_reg_res0_bits(kvm, &hfgrtr2_desc, - 0, NEVER_FGU); - val |= compute_reg_res0_bits(kvm, &hfgwtr2_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hfgrtr2_desc); + val |= compute_fgu_bits(kvm, &hfgwtr2_desc); break; case HFGITR2_GROUP: - val |= compute_reg_res0_bits(kvm, &hfgitr2_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hfgitr2_desc); break; case HDFGRTR2_GROUP: - val |= compute_reg_res0_bits(kvm, &hdfgrtr2_desc, - 0, NEVER_FGU); - val |= compute_reg_res0_bits(kvm, &hdfgwtr2_desc, - 0, NEVER_FGU); + val |= compute_fgu_bits(kvm, &hdfgrtr2_desc); + val |= compute_fgu_bits(kvm, &hdfgwtr2_desc); break; default: BUG();