static int
cpuid4_cache_lookup_regs(int index, struct _cpuid4_info_regs *id4)
{
- union _cpuid4_leaf_eax eax;
- union _cpuid4_leaf_ebx ebx;
- union _cpuid4_leaf_ecx ecx;
- unsigned edx;
-
- if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) {
- if (boot_cpu_has(X86_FEATURE_TOPOEXT))
- cpuid_count(0x8000001d, index, &eax.full,
- &ebx.full, &ecx.full, &edx);
- else
+ u8 cpu_vendor = boot_cpu_data.x86_vendor;
+ union _cpuid4_leaf_eax eax;
+ union _cpuid4_leaf_ebx ebx;
+ union _cpuid4_leaf_ecx ecx;
+ u32 edx;
+
+ if (cpu_vendor == X86_VENDOR_AMD || cpu_vendor == X86_VENDOR_HYGON) {
+ if (boot_cpu_has(X86_FEATURE_TOPOEXT) || cpu_vendor == X86_VENDOR_HYGON) {
+ /* AMD with TOPOEXT, or HYGON */
+ cpuid_count(0x8000001d, index, &eax.full, &ebx.full, &ecx.full, &edx);
+ } else {
+ /* Legacy AMD fallback */
amd_cpuid4(index, &eax, &ebx, &ecx);
- amd_init_l3_cache(id4, index);
- } else if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) {
- cpuid_count(0x8000001d, index, &eax.full,
- &ebx.full, &ecx.full, &edx);
+ }
amd_init_l3_cache(id4, index);
} else {
+ /* Intel */
cpuid_count(4, index, &eax.full, &ebx.full, &ecx.full, &edx);
}
if (eax.split.type == CTYPE_NULL)
- return -EIO; /* better error ? */
+ return -EIO;
id4->eax = eax;
id4->ebx = ebx;