]> git.ipfire.org Git - thirdparty/glibc.git/commitdiff
x86: Support usable check for all CPU features
authorH.J. Lu <hjl.tools@gmail.com>
Mon, 29 Jun 2020 23:36:08 +0000 (16:36 -0700)
committerH.J. Lu <hjl.tools@gmail.com>
Mon, 13 Jul 2020 13:05:16 +0000 (06:05 -0700)
Support usable check for all CPU features with the following changes:

1. Change struct cpu_features to

struct cpuid_features
{
  struct cpuid_registers cpuid;
  struct cpuid_registers usable;
};

struct cpu_features
{
  struct cpu_features_basic basic;
  struct cpuid_features features[COMMON_CPUID_INDEX_MAX];
  unsigned int preferred[PREFERRED_FEATURE_INDEX_MAX];
...
};

so that there is a usable bit for each cpuid bit.
2. After the cpuid bits have been initialized, copy the known bits to the
usable bits.  EAX/EBX from INDEX_1 and EAX from INDEX_7 aren't used for
CPU feature detection.
3. Clear the usable bits which require OS support.
4. If the feature is supported by OS, copy its cpuid bit to its usable
bit.
5. Replace HAS_CPU_FEATURE and CPU_FEATURES_CPU_P with CPU_FEATURE_USABLE
and CPU_FEATURE_USABLE_P to check if a feature is usable.
6. Add DEPR_FPU_CS_DS for INDEX_7_EBX_13.
7. Unset MPX feature since it has been deprecated.

The results are

1. If the feature is known and doesn't requre OS support, its usable bit
is copied from the cpuid bit.
2. Otherwise, its usable bit is copied from the cpuid bit only if the
feature is known to supported by OS.
3. CPU_FEATURE_USABLE/CPU_FEATURE_USABLE_P are used to check if the
feature can be used.
4. HAS_CPU_FEATURE/CPU_FEATURE_CPU_P are used to check if CPU supports
the feature.

63 files changed:
sysdeps/i386/fpu/fclrexcpt.c
sysdeps/i386/fpu/fedisblxcpt.c
sysdeps/i386/fpu/feenablxcpt.c
sysdeps/i386/fpu/fegetenv.c
sysdeps/i386/fpu/fegetmode.c
sysdeps/i386/fpu/feholdexcpt.c
sysdeps/i386/fpu/fesetenv.c
sysdeps/i386/fpu/fesetmode.c
sysdeps/i386/fpu/fesetround.c
sysdeps/i386/fpu/feupdateenv.c
sysdeps/i386/fpu/fgetexcptflg.c
sysdeps/i386/fpu/fsetexcptflg.c
sysdeps/i386/fpu/ftestexcept.c
sysdeps/i386/i686/fpu/multiarch/s_cosf.c
sysdeps/i386/i686/fpu/multiarch/s_sincosf.c
sysdeps/i386/i686/fpu/multiarch/s_sinf.c
sysdeps/i386/i686/multiarch/ifunc-impl-list.c
sysdeps/i386/i686/multiarch/ifunc-memmove.h
sysdeps/i386/i686/multiarch/ifunc-memset.h
sysdeps/i386/i686/multiarch/ifunc-sse2-bsf.h
sysdeps/i386/i686/multiarch/ifunc-sse2-ssse3.h
sysdeps/i386/i686/multiarch/ifunc-sse2.h
sysdeps/i386/i686/multiarch/ifunc-sse4_2.h
sysdeps/i386/i686/multiarch/ifunc-ssse3-sse4_2.h
sysdeps/i386/i686/multiarch/s_fma.c
sysdeps/i386/i686/multiarch/s_fmaf.c
sysdeps/i386/i686/multiarch/wcscpy.c
sysdeps/i386/setfpucw.c
sysdeps/unix/sysv/linux/x86/elision-conf.c
sysdeps/x86/cacheinfo.c
sysdeps/x86/cpu-features.c
sysdeps/x86/cpu-features.h
sysdeps/x86/cpu-tunables.c
sysdeps/x86/dl-cet.c
sysdeps/x86/tst-get-cpu-features.c
sysdeps/x86_64/Makefile
sysdeps/x86_64/dl-machine.h
sysdeps/x86_64/fpu/math-tests-arch.h
sysdeps/x86_64/fpu/multiarch/ifunc-avx-fma4.h
sysdeps/x86_64/fpu/multiarch/ifunc-fma.h
sysdeps/x86_64/fpu/multiarch/ifunc-fma4.h
sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx2.h
sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx512.h
sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-sse4_1.h
sysdeps/x86_64/fpu/multiarch/ifunc-sse4_1.h
sysdeps/x86_64/fpu/multiarch/s_fma.c
sysdeps/x86_64/fpu/multiarch/s_fmaf.c
sysdeps/x86_64/multiarch/ifunc-avx2.h
sysdeps/x86_64/multiarch/ifunc-impl-list.c
sysdeps/x86_64/multiarch/ifunc-memcmp.h
sysdeps/x86_64/multiarch/ifunc-memmove.h
sysdeps/x86_64/multiarch/ifunc-memset.h
sysdeps/x86_64/multiarch/ifunc-sse4_2.h
sysdeps/x86_64/multiarch/ifunc-strcasecmp.h
sysdeps/x86_64/multiarch/ifunc-strcpy.h
sysdeps/x86_64/multiarch/ifunc-wmemset.h
sysdeps/x86_64/multiarch/sched_cpucount.c
sysdeps/x86_64/multiarch/strchr.c
sysdeps/x86_64/multiarch/strcmp.c
sysdeps/x86_64/multiarch/strncmp.c
sysdeps/x86_64/multiarch/test-multiarch.c
sysdeps/x86_64/multiarch/wcscpy.c
sysdeps/x86_64/multiarch/wcsnlen.c

index 7bf7dd0a8ac3faac4cf55c2acb40ffbdc9aec7da..7dc357f2d62040166e52bf3d21623ef4e4a1c956 100644 (file)
@@ -41,7 +41,7 @@ __feclearexcept (int excepts)
   __asm__ ("fldenv %0" : : "m" (*&temp));
 
   /* If the CPU supports SSE, we clear the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xnew_exc;
 
index 0e518f7f3dbcb4d237c05198e8630ffd0142f94c..5399bc1f251a6db154694baa8b0f9f59f0e33a97 100644 (file)
@@ -38,7 +38,7 @@ fedisableexcept (int excepts)
   __asm__ ("fldcw %0" : : "m" (*&new_exc));
 
   /* If the CPU supports SSE we set the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xnew_exc;
 
index b1f70815b1a8b70727d907a593e58a0a454fc5ec..b9d7e65668fc922245121302d4c6d79f26cb8708 100644 (file)
@@ -38,7 +38,7 @@ feenableexcept (int excepts)
   __asm__ ("fldcw %0" : : "m" (*&new_exc));
 
   /* If the CPU supports SSE we set the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xnew_exc;
 
index cb6ef35ac4c8419180443feea9271471ea219abd..637bc854545f15e94561b203b00ba7beab5c3691 100644 (file)
@@ -31,7 +31,7 @@ __fegetenv (fenv_t *envp)
      would block all exceptions.  */
   __asm__ ("fldenv %0" : : "m" (*envp));
 
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     __asm__ ("stmxcsr %0" : "=m" (envp->__eip));
 
   /* Success.  */
index e14768976c2a37d132633441109a9396f3628eef..e5154eab02f47fbbc75ad9c1dd93cde1a68c4d06 100644 (file)
@@ -26,7 +26,7 @@ int
 fegetmode (femode_t *modep)
 {
   _FPU_GETCW (modep->__control_word);
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     __asm__ ("stmxcsr %0" : "=m" (modep->__mxcsr));
   return 0;
 }
index ad25339b4e38df617e0f64ef82224675cdeee8b1..8d2d0ee27537e462288878f693de3fa22e58420e 100644 (file)
@@ -30,7 +30,7 @@ __feholdexcept (fenv_t *envp)
   __asm__ volatile ("fnstenv %0; fnclex" : "=m" (*envp));
 
   /* If the CPU supports SSE we set the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xwork;
 
index 5ec7bd6126a921337028a37a6aedfba23533209a..cd9afeae28531ca34668878693af841f82f99db3 100644 (file)
@@ -79,7 +79,7 @@ __fesetenv (const fenv_t *envp)
 
   __asm__ ("fldenv %0" : : "m" (temp));
 
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int mxcsr;
       __asm__ ("stmxcsr %0" : "=m" (mxcsr));
index 4563da09012043b62e82f39dfb225a0fe37ce8d9..e3b30657b10fea4688b0a4f678e8b73f9b86b21e 100644 (file)
@@ -35,7 +35,7 @@ fesetmode (const femode_t *modep)
   else
     cw = modep->__control_word;
   _FPU_SETCW (cw);
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int mxcsr;
       __asm__ ("stmxcsr %0" : "=m" (mxcsr));
index 18320a646b09b6ac7d12a7ea78315b4ca06be91d..5c3fd34cd4ee6a87c1bacc955aabb4a3166fccf5 100644 (file)
@@ -37,7 +37,7 @@ __fesetround (int round)
   __asm__ ("fldcw %0" : : "m" (*&cw));
 
   /* If the CPU supports SSE we set the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xcw;
 
index 7387831decbcaf1e7876f7ca1942cb47b0025d7c..ef7132e4f078f41da1c1111b20b1c547bd56a249 100644 (file)
@@ -32,7 +32,7 @@ __feupdateenv (const fenv_t *envp)
   __asm__ ("fnstsw %0" : "=m" (*&temp));
 
   /* If the CPU supports SSE we test the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     __asm__ ("stmxcsr %0" : "=m" (*&xtemp));
 
   temp = (temp | xtemp) & FE_ALL_EXCEPT;
index 82b2aa53ded7a09c1ab224d2e8316ac706bb24be..2c32c83636db30deaad6968bf93c243af7346697 100644 (file)
@@ -34,7 +34,7 @@ __fegetexceptflag (fexcept_t *flagp, int excepts)
   *flagp = temp & excepts & FE_ALL_EXCEPT;
 
   /* If the CPU supports SSE, we clear the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int sse_exc;
 
index dc257b80771eb3e0d059a91bcf1826bad4c60cad..02a1bd526d001c295e7e65c042af3144d4634a59 100644 (file)
@@ -41,7 +41,7 @@ __fesetexceptflag (const fexcept_t *flagp, int excepts)
   __asm__ ("fldenv %0" : : "m" (*&temp));
 
   /* If the CPU supports SSE, we set the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xnew_exc;
 
index 9c22689ca5e7d63d92d963ff8d3b844edbb80989..a00c44e6db6c7d971e17cb6a2ecaf4ed93f6b235 100644 (file)
@@ -32,7 +32,7 @@ fetestexcept (int excepts)
   __asm__ ("fnstsw %0" : "=a" (temp));
 
   /* If the CPU supports SSE we test the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     __asm__ ("stmxcsr %0" : "=m" (*&xtemp));
 
   return (temp | xtemp) & excepts & FE_ALL_EXCEPT;
index 8da7d4bd66439d61a085d32aea4b91bc0c798d9d..9cd14a103b3007c58237bb1324aec4e23f0dd304 100644 (file)
@@ -23,7 +23,7 @@
 extern float __cosf_sse2 (float);
 extern float __cosf_ia32 (float);
 
-libm_ifunc (__cosf, HAS_CPU_FEATURE (SSE2) ? __cosf_sse2 : __cosf_ia32);
+libm_ifunc (__cosf, CPU_FEATURE_USABLE (SSE2) ? __cosf_sse2 : __cosf_ia32);
 libm_alias_float (__cos, cos);
 
 #define COSF __cosf_ia32
index 06d094dced94d267a324639155b6365e85995493..9b479142d048c36958327fe8fba18a901c2e865e 100644 (file)
@@ -24,7 +24,7 @@ extern void __sincosf_sse2 (float, float *, float *);
 extern void __sincosf_ia32 (float, float *, float *);
 
 libm_ifunc (__sincosf,
-           HAS_CPU_FEATURE (SSE2) ? __sincosf_sse2 : __sincosf_ia32);
+           CPU_FEATURE_USABLE (SSE2) ? __sincosf_sse2 : __sincosf_ia32);
 libm_alias_float (__sincos, sincos);
 
 #define SINCOSF __sincosf_ia32
index abd355ebaca14326fbd7e7da2a9717553a8b2ea2..84977e63e89206687a97a80f43fa848ba532d0d4 100644 (file)
@@ -23,7 +23,7 @@
 extern float __sinf_sse2 (float);
 extern float __sinf_ia32 (float);
 
-libm_ifunc (__sinf, HAS_CPU_FEATURE (SSE2) ? __sinf_sse2 : __sinf_ia32);
+libm_ifunc (__sinf, CPU_FEATURE_USABLE (SSE2) ? __sinf_sse2 : __sinf_ia32);
 libm_alias_float (__sin, sin);
 #define SINF __sinf_ia32
 #include <sysdeps/ieee754/flt-32/s_sinf.c>
index 23774fbe8a7fe872c5f37469677c21c9308cc6ec..89afdc03262f6be382f0596cab4c704101a76280 100644 (file)
@@ -38,35 +38,35 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
 
   /* Support sysdeps/i386/i686/multiarch/bcopy.S.  */
   IFUNC_IMPL (i, name, bcopy,
-             IFUNC_IMPL_ADD (array, i, bcopy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, bcopy, CPU_FEATURE_USABLE (SSSE3),
                              __bcopy_ssse3_rep)
-             IFUNC_IMPL_ADD (array, i, bcopy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, bcopy, CPU_FEATURE_USABLE (SSSE3),
                              __bcopy_ssse3)
-             IFUNC_IMPL_ADD (array, i, bcopy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, bcopy, CPU_FEATURE_USABLE (SSE2),
                              __bcopy_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, bcopy, 1, __bcopy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/bzero.S.  */
   IFUNC_IMPL (i, name, bzero,
-             IFUNC_IMPL_ADD (array, i, bzero, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, bzero, CPU_FEATURE_USABLE (SSE2),
                              __bzero_sse2_rep)
-             IFUNC_IMPL_ADD (array, i, bzero, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, bzero, CPU_FEATURE_USABLE (SSE2),
                              __bzero_sse2)
              IFUNC_IMPL_ADD (array, i, bzero, 1, __bzero_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/memchr.S.  */
   IFUNC_IMPL (i, name, memchr,
-             IFUNC_IMPL_ADD (array, i, memchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memchr, CPU_FEATURE_USABLE (SSE2),
                              __memchr_sse2_bsf)
-             IFUNC_IMPL_ADD (array, i, memchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memchr, CPU_FEATURE_USABLE (SSE2),
                              __memchr_sse2)
              IFUNC_IMPL_ADD (array, i, memchr, 1, __memchr_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/memcmp.S.  */
   IFUNC_IMPL (i, name, memcmp,
-             IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSE4_2),
                              __memcmp_sse4_2)
-             IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSSE3),
                              __memcmp_ssse3)
              IFUNC_IMPL_ADD (array, i, memcmp, 1, __memcmp_ia32))
 
@@ -74,13 +74,13 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/i386/i686/multiarch/memmove_chk.S.  */
   IFUNC_IMPL (i, name, __memmove_chk,
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memmove_chk_ssse3_rep)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memmove_chk_ssse3)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_CPU_FEATURE (SSE2),
+                             CPU_FEATURE_USABLE (SSE2),
                              __memmove_chk_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, __memmove_chk, 1,
                              __memmove_chk_ia32))
@@ -88,19 +88,19 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
 
   /* Support sysdeps/i386/i686/multiarch/memmove.S.  */
   IFUNC_IMPL (i, name, memmove,
-             IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
                              __memmove_ssse3_rep)
-             IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
                              __memmove_ssse3)
-             IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSE2),
                              __memmove_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, memmove, 1, __memmove_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/memrchr.S.  */
   IFUNC_IMPL (i, name, memrchr,
-             IFUNC_IMPL_ADD (array, i, memrchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memrchr, CPU_FEATURE_USABLE (SSE2),
                              __memrchr_sse2_bsf)
-             IFUNC_IMPL_ADD (array, i, memrchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memrchr, CPU_FEATURE_USABLE (SSE2),
                              __memrchr_sse2)
              IFUNC_IMPL_ADD (array, i, memrchr, 1, __memrchr_ia32))
 
@@ -108,10 +108,10 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/i386/i686/multiarch/memset_chk.S.  */
   IFUNC_IMPL (i, name, __memset_chk,
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_CPU_FEATURE (SSE2),
+                             CPU_FEATURE_USABLE (SSE2),
                              __memset_chk_sse2_rep)
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_CPU_FEATURE (SSE2),
+                             CPU_FEATURE_USABLE (SSE2),
                              __memset_chk_sse2)
              IFUNC_IMPL_ADD (array, i, __memset_chk, 1,
                              __memset_chk_ia32))
@@ -119,102 +119,102 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
 
   /* Support sysdeps/i386/i686/multiarch/memset.S.  */
   IFUNC_IMPL (i, name, memset,
-             IFUNC_IMPL_ADD (array, i, memset, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memset, CPU_FEATURE_USABLE (SSE2),
                              __memset_sse2_rep)
-             IFUNC_IMPL_ADD (array, i, memset, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memset, CPU_FEATURE_USABLE (SSE2),
                              __memset_sse2)
              IFUNC_IMPL_ADD (array, i, memset, 1, __memset_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/rawmemchr.S.  */
   IFUNC_IMPL (i, name, rawmemchr,
-             IFUNC_IMPL_ADD (array, i, rawmemchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, rawmemchr, CPU_FEATURE_USABLE (SSE2),
                              __rawmemchr_sse2_bsf)
-             IFUNC_IMPL_ADD (array, i, rawmemchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, rawmemchr, CPU_FEATURE_USABLE (SSE2),
                              __rawmemchr_sse2)
              IFUNC_IMPL_ADD (array, i, rawmemchr, 1, __rawmemchr_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/stpncpy.S.  */
   IFUNC_IMPL (i, name, stpncpy,
-             IFUNC_IMPL_ADD (array, i, stpncpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (SSSE3),
                              __stpncpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, stpncpy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (SSE2),
                              __stpncpy_sse2)
              IFUNC_IMPL_ADD (array, i, stpncpy, 1, __stpncpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/stpcpy.S.  */
   IFUNC_IMPL (i, name, stpcpy,
-             IFUNC_IMPL_ADD (array, i, stpcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (SSSE3),
                              __stpcpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, stpcpy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (SSE2),
                              __stpcpy_sse2)
              IFUNC_IMPL_ADD (array, i, stpcpy, 1, __stpcpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strcasecmp.S.  */
   IFUNC_IMPL (i, name, strcasecmp,
              IFUNC_IMPL_ADD (array, i, strcasecmp,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strcasecmp_sse4_2)
              IFUNC_IMPL_ADD (array, i, strcasecmp,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strcasecmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strcasecmp, 1, __strcasecmp_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strcasecmp_l.S.  */
   IFUNC_IMPL (i, name, strcasecmp_l,
              IFUNC_IMPL_ADD (array, i, strcasecmp_l,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strcasecmp_l_sse4_2)
              IFUNC_IMPL_ADD (array, i, strcasecmp_l,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strcasecmp_l_ssse3)
              IFUNC_IMPL_ADD (array, i, strcasecmp_l, 1,
                              __strcasecmp_l_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strcat.S.  */
   IFUNC_IMPL (i, name, strcat,
-             IFUNC_IMPL_ADD (array, i, strcat, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (SSSE3),
                              __strcat_ssse3)
-             IFUNC_IMPL_ADD (array, i, strcat, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (SSE2),
                              __strcat_sse2)
              IFUNC_IMPL_ADD (array, i, strcat, 1, __strcat_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strchr.S.  */
   IFUNC_IMPL (i, name, strchr,
-             IFUNC_IMPL_ADD (array, i, strchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strchr, CPU_FEATURE_USABLE (SSE2),
                              __strchr_sse2_bsf)
-             IFUNC_IMPL_ADD (array, i, strchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strchr, CPU_FEATURE_USABLE (SSE2),
                              __strchr_sse2)
              IFUNC_IMPL_ADD (array, i, strchr, 1, __strchr_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strcmp.S.  */
   IFUNC_IMPL (i, name, strcmp,
-             IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSE4_2),
                              __strcmp_sse4_2)
-             IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSSE3),
                              __strcmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strcmp, 1, __strcmp_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strcpy.S.  */
   IFUNC_IMPL (i, name, strcpy,
-             IFUNC_IMPL_ADD (array, i, strcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (SSSE3),
                              __strcpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, strcpy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (SSE2),
                              __strcpy_sse2)
              IFUNC_IMPL_ADD (array, i, strcpy, 1, __strcpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strcspn.S.  */
   IFUNC_IMPL (i, name, strcspn,
-             IFUNC_IMPL_ADD (array, i, strcspn, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strcspn, CPU_FEATURE_USABLE (SSE4_2),
                              __strcspn_sse42)
              IFUNC_IMPL_ADD (array, i, strcspn, 1, __strcspn_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strncase.S.  */
   IFUNC_IMPL (i, name, strncasecmp,
              IFUNC_IMPL_ADD (array, i, strncasecmp,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strncasecmp_sse4_2)
              IFUNC_IMPL_ADD (array, i, strncasecmp,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strncasecmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strncasecmp, 1,
                              __strncasecmp_ia32))
@@ -222,91 +222,91 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/i386/i686/multiarch/strncase_l.S.  */
   IFUNC_IMPL (i, name, strncasecmp_l,
              IFUNC_IMPL_ADD (array, i, strncasecmp_l,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strncasecmp_l_sse4_2)
              IFUNC_IMPL_ADD (array, i, strncasecmp_l,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strncasecmp_l_ssse3)
              IFUNC_IMPL_ADD (array, i, strncasecmp_l, 1,
                              __strncasecmp_l_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strncat.S.  */
   IFUNC_IMPL (i, name, strncat,
-             IFUNC_IMPL_ADD (array, i, strncat, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (SSSE3),
                              __strncat_ssse3)
-             IFUNC_IMPL_ADD (array, i, strncat, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (SSE2),
                              __strncat_sse2)
              IFUNC_IMPL_ADD (array, i, strncat, 1, __strncat_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strncpy.S.  */
   IFUNC_IMPL (i, name, strncpy,
-             IFUNC_IMPL_ADD (array, i, strncpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (SSSE3),
                              __strncpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, strncpy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (SSE2),
                              __strncpy_sse2)
              IFUNC_IMPL_ADD (array, i, strncpy, 1, __strncpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strnlen.S.  */
   IFUNC_IMPL (i, name, strnlen,
-             IFUNC_IMPL_ADD (array, i, strnlen, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strnlen, CPU_FEATURE_USABLE (SSE2),
                              __strnlen_sse2)
              IFUNC_IMPL_ADD (array, i, strnlen, 1, __strnlen_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strpbrk.S.  */
   IFUNC_IMPL (i, name, strpbrk,
-             IFUNC_IMPL_ADD (array, i, strpbrk, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strpbrk, CPU_FEATURE_USABLE (SSE4_2),
                              __strpbrk_sse42)
              IFUNC_IMPL_ADD (array, i, strpbrk, 1, __strpbrk_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strrchr.S.  */
   IFUNC_IMPL (i, name, strrchr,
-             IFUNC_IMPL_ADD (array, i, strrchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strrchr, CPU_FEATURE_USABLE (SSE2),
                              __strrchr_sse2_bsf)
-             IFUNC_IMPL_ADD (array, i, strrchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strrchr, CPU_FEATURE_USABLE (SSE2),
                              __strrchr_sse2)
              IFUNC_IMPL_ADD (array, i, strrchr, 1, __strrchr_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strspn.S.  */
   IFUNC_IMPL (i, name, strspn,
-             IFUNC_IMPL_ADD (array, i, strspn, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strspn, CPU_FEATURE_USABLE (SSE4_2),
                              __strspn_sse42)
              IFUNC_IMPL_ADD (array, i, strspn, 1, __strspn_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/wcschr.S.  */
   IFUNC_IMPL (i, name, wcschr,
-             IFUNC_IMPL_ADD (array, i, wcschr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, wcschr, CPU_FEATURE_USABLE (SSE2),
                              __wcschr_sse2)
              IFUNC_IMPL_ADD (array, i, wcschr, 1, __wcschr_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/wcscmp.S.  */
   IFUNC_IMPL (i, name, wcscmp,
-             IFUNC_IMPL_ADD (array, i, wcscmp, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, wcscmp, CPU_FEATURE_USABLE (SSE2),
                              __wcscmp_sse2)
              IFUNC_IMPL_ADD (array, i, wcscmp, 1, __wcscmp_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/wcscpy.S.  */
   IFUNC_IMPL (i, name, wcscpy,
-             IFUNC_IMPL_ADD (array, i, wcscpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, wcscpy, CPU_FEATURE_USABLE (SSSE3),
                              __wcscpy_ssse3)
              IFUNC_IMPL_ADD (array, i, wcscpy, 1, __wcscpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/wcslen.S.  */
   IFUNC_IMPL (i, name, wcslen,
-             IFUNC_IMPL_ADD (array, i, wcslen, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, wcslen, CPU_FEATURE_USABLE (SSE2),
                              __wcslen_sse2)
              IFUNC_IMPL_ADD (array, i, wcslen, 1, __wcslen_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/wcsrchr.S.  */
   IFUNC_IMPL (i, name, wcsrchr,
-             IFUNC_IMPL_ADD (array, i, wcsrchr, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, wcsrchr, CPU_FEATURE_USABLE (SSE2),
                              __wcsrchr_sse2)
              IFUNC_IMPL_ADD (array, i, wcsrchr, 1, __wcsrchr_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/wmemcmp.S.  */
   IFUNC_IMPL (i, name, wmemcmp,
-             IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSE4_2),
                              __wmemcmp_sse4_2)
-             IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSSE3),
                              __wmemcmp_ssse3)
              IFUNC_IMPL_ADD (array, i, wmemcmp, 1, __wmemcmp_ia32))
 
@@ -314,64 +314,64 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/i386/i686/multiarch/memcpy_chk.S.  */
   IFUNC_IMPL (i, name, __memcpy_chk,
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_chk_ssse3_rep)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_chk_ssse3)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_CPU_FEATURE (SSE2),
+                             CPU_FEATURE_USABLE (SSE2),
                              __memcpy_chk_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk, 1,
                              __memcpy_chk_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/memcpy.S.  */
   IFUNC_IMPL (i, name, memcpy,
-             IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_ssse3_rep)
-             IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSE2),
                              __memcpy_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, memcpy, 1, __memcpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/mempcpy_chk.S.  */
   IFUNC_IMPL (i, name, __mempcpy_chk,
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_chk_ssse3_rep)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_chk_ssse3)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_CPU_FEATURE (SSE2),
+                             CPU_FEATURE_USABLE (SSE2),
                              __mempcpy_chk_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk, 1,
                              __mempcpy_chk_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/mempcpy.S.  */
   IFUNC_IMPL (i, name, mempcpy,
-             IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_ssse3_rep)
-             IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSE2),
                              __mempcpy_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, mempcpy, 1, __mempcpy_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strlen.S.  */
   IFUNC_IMPL (i, name, strlen,
-             IFUNC_IMPL_ADD (array, i, strlen, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strlen, CPU_FEATURE_USABLE (SSE2),
                              __strlen_sse2_bsf)
-             IFUNC_IMPL_ADD (array, i, strlen, HAS_CPU_FEATURE (SSE2),
+             IFUNC_IMPL_ADD (array, i, strlen, CPU_FEATURE_USABLE (SSE2),
                              __strlen_sse2)
              IFUNC_IMPL_ADD (array, i, strlen, 1, __strlen_ia32))
 
   /* Support sysdeps/i386/i686/multiarch/strncmp.S.  */
   IFUNC_IMPL (i, name, strncmp,
-             IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSE4_2),
                              __strncmp_sse4_2)
-             IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSSE3),
                              __strncmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strncmp, 1, __strncmp_ia32))
 #endif
index a590048d1dc16a20a1311355f1aadac4b6d91ec4..c05cb6dd4f7ae0f50b8d7e3b62b4626d70161749 100644 (file)
@@ -33,7 +33,7 @@ IFUNC_SELECTOR (void)
   if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Load))
     return OPTIMIZE (sse2_unaligned);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     {
       if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Rep_String))
        return OPTIMIZE (ssse3_rep);
index 14199c30fd3db5b7fa410dfe01f38e757884d035..bead331a9d23025eccf5ddd1598b2b3219934d6a 100644 (file)
@@ -28,7 +28,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
     {
       if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Rep_String))
        return OPTIMIZE (sse2_rep);
index 8b6fa6447d77db4ca3865fcd6af4be1c79317eb1..0d302a3dcd3a206de519b446fb7c19c4fbec63be 100644 (file)
@@ -28,7 +28,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
     {
       if (CPU_FEATURES_ARCH_P (cpu_features, Slow_BSF))
        return OPTIMIZE (sse2);
index 77b615e40d57e136b905ad075ea7ac3a52851aa6..c10ca4a9df137ae77926314b455cb63d5dddb146 100644 (file)
@@ -29,11 +29,11 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2)
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2)
       && CPU_FEATURES_ARCH_P (cpu_features, Fast_Rep_String))
     return OPTIMIZE (sse2);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (ia32);
index c0dd85e2bb9828eee701bf5ab3565f559637136b..58794a28064816a5164df7e6a406ec4fb66833cf 100644 (file)
@@ -27,7 +27,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
     return OPTIMIZE (sse2);
 
   return OPTIMIZE (ia32);
index b9b06d59961d2dcd8d3fb2613835433aa46c0b59..014be1d5f7c93baacf404c144c4576dbc2f36418 100644 (file)
@@ -27,7 +27,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2))
     return OPTIMIZE (sse42);
 
   return OPTIMIZE (ia32);
index b4074f3f8f16f7c6ae949c4fb96893f58539d59c..39bfea986d3eecb013b756387ed9fd244b43bb6f 100644 (file)
@@ -29,10 +29,10 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2))
     return OPTIMIZE (sse4_2);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (ia32);
index 90f649f52a831edfddd709fecdcec6c0e446440f..0729853e21f69bc1239875cfe87f4050f73902cd 100644 (file)
@@ -27,7 +27,7 @@ extern double __fma_ia32 (double x, double y, double z) attribute_hidden;
 extern double __fma_fma (double x, double y, double z) attribute_hidden;
 
 libm_ifunc (__fma,
-           HAS_ARCH_FEATURE (FMA_Usable) ? __fma_fma : __fma_ia32);
+           CPU_FEATURE_USABLE (FMA) ? __fma_fma : __fma_ia32);
 libm_alias_double (__fma, fma)
 
 #define __fma __fma_ia32
index 27757eca9d23eebc882763b05e0f99a00af2d3be..20f965c3422895404ac68ebf6debb89ed91f29ff 100644 (file)
@@ -27,7 +27,7 @@ extern float __fmaf_ia32 (float x, float y, float z) attribute_hidden;
 extern float __fmaf_fma (float x, float y, float z) attribute_hidden;
 
 libm_ifunc (__fmaf,
-           HAS_ARCH_FEATURE (FMA_Usable) ? __fmaf_fma : __fmaf_ia32);
+           CPU_FEATURE_USABLE (FMA) ? __fmaf_fma : __fmaf_ia32);
 libm_alias_float (__fma, fma)
 
 #define __fmaf __fmaf_ia32
index 51347d70f5b6715c0241feb0df70542a5a30d059..f0038bc4a2b91285534c1582a34d99a5d17d4eb3 100644 (file)
@@ -34,7 +34,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (ia32);
index 68f5b2e86c417de30e0fe85d35eedc89d08b1edf..c640a72cc2a9539f049129205cf849e973f008a2 100644 (file)
@@ -39,7 +39,7 @@ __setfpucw (fpu_control_t set)
   __asm__ ("fldcw %0" : : "m" (*&cw));
 
   /* If the CPU supports SSE, we set the MXCSR as well.  */
-  if (HAS_CPU_FEATURE (SSE))
+  if (CPU_FEATURE_USABLE (SSE))
     {
       unsigned int xnew_exc;
 
index b38b4250e807d3c44453833be537140016908db2..ecdb0378e3152942f3c26dfb57d7ec726004bc87 100644 (file)
@@ -63,7 +63,7 @@ do_set_elision_enable (int32_t elision_enable)
      if __libc_enable_secure isn't enabled since elision_enable will be set
      according to the default, which is disabled.  */
   if (elision_enable == 1)
-    __pthread_force_elision = HAS_CPU_FEATURE (RTM) ? 1 : 0;
+    __pthread_force_elision = CPU_FEATURE_USABLE (RTM) ? 1 : 0;
 }
 
 /* The pthread->elision_enable tunable is 0 or 1 indicating that elision
index 5366a37ea0369efac6bb2429fa95fe148c133efc..217c21c34ff2b5273a5d969a2a627d98ec524527 100644 (file)
@@ -583,7 +583,7 @@ get_common_cache_info (long int *shared_ptr, unsigned int *threads_ptr,
 
   /* A value of 0 for the HTT bit indicates there is only a single
      logical processor.  */
-  if (HAS_CPU_FEATURE (HTT))
+  if (CPU_FEATURE_USABLE (HTT))
     {
       /* Figure out the number of logical threads that share the
          highest cache level.  */
@@ -732,7 +732,7 @@ intel_bug_no_cache_info:
           /* Assume that all logical threads share the highest cache
              level.  */
           threads
-            = ((cpu_features->cpuid[COMMON_CPUID_INDEX_1].ebx
+            = ((cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ebx
                 >> 16) & 0xff);
         }
 
@@ -867,14 +867,14 @@ init_cacheinfo (void)
   unsigned int minimum_rep_movsb_threshold;
   /* NB: The default REP MOVSB threshold is 2048 * (VEC_SIZE / 16).  */
   unsigned int rep_movsb_threshold;
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
-      && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
+      && !CPU_FEATURE_PREFERRED_P (cpu_features, Prefer_No_AVX512))
     {
       rep_movsb_threshold = 2048 * (64 / 16);
       minimum_rep_movsb_threshold = 64 * 8;
     }
-  else if (CPU_FEATURES_ARCH_P (cpu_features,
-                               AVX_Fast_Unaligned_Load))
+  else if (CPU_FEATURE_PREFERRED_P (cpu_features,
+                                   AVX_Fast_Unaligned_Load))
     {
       rep_movsb_threshold = 2048 * (32 / 16);
       minimum_rep_movsb_threshold = 32 * 8;
index c7673a2eb92b4d6f15a2dc5c77074f5dc2faa5ec..4c24ba7c3143f3ab472432e4e179243cec3d3d04 100644 (file)
@@ -42,73 +42,109 @@ extern void TUNABLE_CALLBACK (set_x86_shstk) (tunable_val_t *)
 #endif
 
 static void
-get_extended_indices (struct cpu_features *cpu_features)
+update_usable (struct cpu_features *cpu_features)
 {
-  unsigned int eax, ebx, ecx, edx;
-  __cpuid (0x80000000, eax, ebx, ecx, edx);
-  if (eax >= 0x80000001)
-    __cpuid (0x80000001,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].eax,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].ebx,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].ecx,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].edx);
-  if (eax >= 0x80000007)
-    __cpuid (0x80000007,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].eax,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].ebx,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].ecx,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].edx);
-  if (eax >= 0x80000008)
-    __cpuid (0x80000008,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].eax,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].ebx,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].ecx,
-            cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].edx);
-}
-
-static void
-get_common_indices (struct cpu_features *cpu_features,
-                   unsigned int *family, unsigned int *model,
-                   unsigned int *extended_model, unsigned int *stepping)
-{
-  if (family)
-    {
-      unsigned int eax;
-      __cpuid (1, eax, cpu_features->cpuid[COMMON_CPUID_INDEX_1].ebx,
-              cpu_features->cpuid[COMMON_CPUID_INDEX_1].ecx,
-              cpu_features->cpuid[COMMON_CPUID_INDEX_1].edx);
-      cpu_features->cpuid[COMMON_CPUID_INDEX_1].eax = eax;
-      *family = (eax >> 8) & 0x0f;
-      *model = (eax >> 4) & 0x0f;
-      *extended_model = (eax >> 12) & 0xf0;
-      *stepping = eax & 0x0f;
-      if (*family == 0x0f)
-       {
-         *family += (eax >> 20) & 0xff;
-         *model += *extended_model;
-       }
-    }
-
-  if (cpu_features->basic.max_cpuid >= 7)
-    {
-      __cpuid_count (7, 0,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7].eax,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7].ebx,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7].ecx,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7].edx);
-      __cpuid_count (7, 1,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].eax,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].ebx,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].ecx,
-                    cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].edx);
-    }
-
-  if (cpu_features->basic.max_cpuid >= 0xd)
-    __cpuid_count (0xd, 1,
-                  cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].eax,
-                  cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].ebx,
-                  cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].ecx,
-                  cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].edx);
+  /* Before COMMON_CPUID_INDEX_80000001, copy the cpuid array elements to
+     the usable array.  */
+  unsigned int i;
+  for (i = 0; i < COMMON_CPUID_INDEX_80000001; i++)
+    cpu_features->features[i].usable = cpu_features->features[i].cpuid;
+
+  /* Before COMMON_CPUID_INDEX_80000001, clear the unknown usable bits
+     and the always zero bits.  */
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_ECX_16);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_ECX_31);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_EDX_10);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_EDX_20);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_EDX_30);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EBX_6);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EBX_22);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_13);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_15);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_16);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_23);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_24);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_26);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_0);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_1);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_5);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_6);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_7);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_9);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_11);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_12);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_13);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_17);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_19);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_21);
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_23);
+
+  /* EAX/EBX from COMMON_CPUID_INDEX_1 and EAX from COMMON_CPUID_INDEX_7
+     aren't used for CPU feature detection.  */
+  cpu_features->features[COMMON_CPUID_INDEX_1].usable.eax = 0;
+  cpu_features->features[COMMON_CPUID_INDEX_1].usable.ebx = 0;
+  cpu_features->features[COMMON_CPUID_INDEX_7].usable.eax = 0;
+
+  /* Starting from COMMON_CPUID_INDEX_80000001, copy the cpuid bits to
+     usable bits.  */
+  CPU_FEATURE_SET_USABLE (cpu_features, LAHF64_SAHF64);
+  CPU_FEATURE_SET_USABLE (cpu_features, SVM);
+  CPU_FEATURE_SET_USABLE (cpu_features, LZCNT);
+  CPU_FEATURE_SET_USABLE (cpu_features, SSE4A);
+  CPU_FEATURE_SET_USABLE (cpu_features, PREFETCHW);
+  CPU_FEATURE_SET_USABLE (cpu_features, XOP);
+  CPU_FEATURE_SET_USABLE (cpu_features, LWP);
+  CPU_FEATURE_SET_USABLE (cpu_features, FMA4);
+  CPU_FEATURE_SET_USABLE (cpu_features, TBM);
+  CPU_FEATURE_SET_USABLE (cpu_features, SYSCALL_SYSRET);
+  CPU_FEATURE_SET_USABLE (cpu_features, NX);
+  CPU_FEATURE_SET_USABLE (cpu_features, PAGE1GB);
+  CPU_FEATURE_SET_USABLE (cpu_features, RDTSCP);
+  CPU_FEATURE_SET_USABLE (cpu_features, LM);
+  CPU_FEATURE_SET_USABLE (cpu_features, XSAVEOPT);
+  CPU_FEATURE_SET_USABLE (cpu_features, XSAVEC);
+  CPU_FEATURE_SET_USABLE (cpu_features, XGETBV_ECX_1);
+  CPU_FEATURE_SET_USABLE (cpu_features, XSAVES);
+  CPU_FEATURE_SET_USABLE (cpu_features, XFD);
+  CPU_FEATURE_SET_USABLE (cpu_features, INVARIANT_TSC);
+  CPU_FEATURE_SET_USABLE (cpu_features, WBNOINVD);
+  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_BF16);
+
+  /* MPX has been deprecated.  */
+  CPU_FEATURE_UNSET (cpu_features, MPX);
+
+  /* Clear the usable bits which require OS support.  */
+  CPU_FEATURE_UNSET (cpu_features, FMA);
+  CPU_FEATURE_UNSET (cpu_features, AVX);
+  CPU_FEATURE_UNSET (cpu_features, F16C);
+  CPU_FEATURE_UNSET (cpu_features, AVX2);
+  CPU_FEATURE_UNSET (cpu_features, AVX512F);
+  CPU_FEATURE_UNSET (cpu_features, AVX512DQ);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_IFMA);
+  CPU_FEATURE_UNSET (cpu_features, AVX512PF);
+  CPU_FEATURE_UNSET (cpu_features, AVX512ER);
+  CPU_FEATURE_UNSET (cpu_features, AVX512CD);
+  CPU_FEATURE_UNSET (cpu_features, AVX512BW);
+  CPU_FEATURE_UNSET (cpu_features, AVX512VL);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VBMI);
+  CPU_FEATURE_UNSET (cpu_features, PKU);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VBMI2);
+  CPU_FEATURE_UNSET (cpu_features, VAES);
+  CPU_FEATURE_UNSET (cpu_features, VPCLMULQDQ);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VNNI);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_BITALG);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VPOPCNTDQ);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_4VNNIW);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_4FMAPS);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VP2INTERSECT);
+  CPU_FEATURE_UNSET (cpu_features, AMX_BF16);
+  CPU_FEATURE_UNSET (cpu_features, AMX_TILE);
+  CPU_FEATURE_UNSET (cpu_features, AMX_INT8);
+  CPU_FEATURE_UNSET (cpu_features, XOP);
+  CPU_FEATURE_UNSET (cpu_features, FMA4);
+  CPU_FEATURE_UNSET (cpu_features, XSAVEC);
+  CPU_FEATURE_UNSET (cpu_features, XFD);
+  CPU_FEATURE_UNSET (cpu_features, AVX512_BF16);
 
   /* Can we call xgetbv?  */
   if (CPU_FEATURES_CPU_P (cpu_features, OSXSAVE))
@@ -123,40 +159,28 @@ get_common_indices (struct cpu_features *cpu_features,
          /* Determine if AVX is usable.  */
          if (CPU_FEATURES_CPU_P (cpu_features, AVX))
            {
-             cpu_features->usable[index_arch_AVX_Usable]
-               |= bit_arch_AVX_Usable;
+             CPU_FEATURE_SET (cpu_features, AVX);
              /* The following features depend on AVX being usable.  */
              /* Determine if AVX2 is usable.  */
              if (CPU_FEATURES_CPU_P (cpu_features, AVX2))
-             {
-               cpu_features->usable[index_arch_AVX2_Usable]
-                 |= bit_arch_AVX2_Usable;
-
-               /* Unaligned load with 256-bit AVX registers are faster on
-                  Intel/AMD processors with AVX2.  */
-               cpu_features->preferred[index_arch_AVX_Fast_Unaligned_Load]
-                 |= bit_arch_AVX_Fast_Unaligned_Load;
-             }
+               {
+                 CPU_FEATURE_SET (cpu_features, AVX2);
+
+                 /* Unaligned load with 256-bit AVX registers are faster
+                    on Intel/AMD processors with AVX2.  */
+                 cpu_features->preferred[index_arch_AVX_Fast_Unaligned_Load]
+                   |= bit_arch_AVX_Fast_Unaligned_Load;
+               }
              /* Determine if FMA is usable.  */
-             if (CPU_FEATURES_CPU_P (cpu_features, FMA))
-               cpu_features->usable[index_arch_FMA_Usable]
-                 |= bit_arch_FMA_Usable;
+             CPU_FEATURE_SET_USABLE (cpu_features, FMA);
              /* Determine if VAES is usable.  */
-             if (CPU_FEATURES_CPU_P (cpu_features, VAES))
-               cpu_features->usable[index_arch_VAES_Usable]
-                 |= bit_arch_VAES_Usable;
+             CPU_FEATURE_SET_USABLE (cpu_features, VAES);
              /* Determine if VPCLMULQDQ is usable.  */
-             if (CPU_FEATURES_CPU_P (cpu_features, VPCLMULQDQ))
-               cpu_features->usable[index_arch_VPCLMULQDQ_Usable]
-                 |= bit_arch_VPCLMULQDQ_Usable;
+             CPU_FEATURE_SET_USABLE (cpu_features, VPCLMULQDQ);
              /* Determine if XOP is usable.  */
-             if (CPU_FEATURES_CPU_P (cpu_features, XOP))
-               cpu_features->usable[index_arch_XOP_Usable]
-                 |= bit_arch_XOP_Usable;
+             CPU_FEATURE_SET_USABLE (cpu_features, XOP);
              /* Determine if F16C is usable.  */
-             if (CPU_FEATURES_CPU_P (cpu_features, F16C))
-               cpu_features->usable[index_arch_F16C_Usable]
-                 |= bit_arch_F16C_Usable;
+             CPU_FEATURE_SET_USABLE (cpu_features, F16C);
            }
 
          /* Check if OPMASK state, upper 256-bit of ZMM0-ZMM15 and
@@ -168,73 +192,41 @@ get_common_indices (struct cpu_features *cpu_features,
              /* Determine if AVX512F is usable.  */
              if (CPU_FEATURES_CPU_P (cpu_features, AVX512F))
                {
-                 cpu_features->usable[index_arch_AVX512F_Usable]
-                   |= bit_arch_AVX512F_Usable;
+                 CPU_FEATURE_SET (cpu_features, AVX512F);
                  /* Determine if AVX512CD is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512CD))
-                   cpu_features->usable[index_arch_AVX512CD_Usable]
-                     |= bit_arch_AVX512CD_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512CD);
                  /* Determine if AVX512ER is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512ER))
-                   cpu_features->usable[index_arch_AVX512ER_Usable]
-                     |= bit_arch_AVX512ER_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512ER);
                  /* Determine if AVX512PF is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512PF))
-                   cpu_features->usable[index_arch_AVX512PF_Usable]
-                     |= bit_arch_AVX512PF_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512PF);
                  /* Determine if AVX512VL is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512VL))
-                   cpu_features->usable[index_arch_AVX512VL_Usable]
-                     |= bit_arch_AVX512VL_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512VL);
                  /* Determine if AVX512DQ is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512DQ))
-                   cpu_features->usable[index_arch_AVX512DQ_Usable]
-                     |= bit_arch_AVX512DQ_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512DQ);
                  /* Determine if AVX512BW is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512BW))
-                   cpu_features->usable[index_arch_AVX512BW_Usable]
-                     |= bit_arch_AVX512BW_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512BW);
                  /* Determine if AVX512_4FMAPS is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_4FMAPS))
-                   cpu_features->usable[index_arch_AVX512_4FMAPS_Usable]
-                     |= bit_arch_AVX512_4FMAPS_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_4FMAPS);
                  /* Determine if AVX512_4VNNIW is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_4VNNIW))
-                   cpu_features->usable[index_arch_AVX512_4VNNIW_Usable]
-                     |= bit_arch_AVX512_4VNNIW_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_4VNNIW);
                  /* Determine if AVX512_BITALG is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_BITALG))
-                   cpu_features->usable[index_arch_AVX512_BITALG_Usable]
-                     |= bit_arch_AVX512_BITALG_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_BITALG);
                  /* Determine if AVX512_IFMA is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_IFMA))
-                   cpu_features->usable[index_arch_AVX512_IFMA_Usable]
-                     |= bit_arch_AVX512_IFMA_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_IFMA);
                  /* Determine if AVX512_VBMI is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VBMI))
-                   cpu_features->usable[index_arch_AVX512_VBMI_Usable]
-                     |= bit_arch_AVX512_VBMI_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_VBMI);
                  /* Determine if AVX512_VBMI2 is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VBMI2))
-                   cpu_features->usable[index_arch_AVX512_VBMI2_Usable]
-                     |= bit_arch_AVX512_VBMI2_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_VBMI2);
                  /* Determine if is AVX512_VNNI usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VNNI))
-                   cpu_features->usable[index_arch_AVX512_VNNI_Usable]
-                     |= bit_arch_AVX512_VNNI_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_VNNI);
                  /* Determine if AVX512_VPOPCNTDQ is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VPOPCNTDQ))
-                   cpu_features->usable[index_arch_AVX512_VPOPCNTDQ_Usable]
-                     |= bit_arch_AVX512_VPOPCNTDQ_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features,
+                                         AVX512_VPOPCNTDQ);
                  /* Determine if AVX512_VP2INTERSECT is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features,
-                                         AVX512_VP2INTERSECT))
-                   cpu_features->usable[index_arch_AVX512_VP2INTERSECT_Usable]
-                     |= bit_arch_AVX512_VP2INTERSECT_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features,
+                                         AVX512_VP2INTERSECT);
                  /* Determine if AVX512_BF16 is usable.  */
-                 if (CPU_FEATURES_CPU_P (cpu_features, AVX512_BF16))
-                   cpu_features->usable[index_arch_AVX512_BF16_Usable]
-                     |= bit_arch_AVX512_BF16_Usable;
+                 CPU_FEATURE_SET_USABLE (cpu_features, AVX512_BF16);
                }
            }
        }
@@ -244,19 +236,17 @@ get_common_indices (struct cpu_features *cpu_features,
          == (bit_XTILECFG_state | bit_XTILEDATA_state))
        {
          /* Determine if AMX_BF16 is usable.  */
-         if (CPU_FEATURES_CPU_P (cpu_features, AMX_BF16))
-           cpu_features->usable[index_arch_AMX_BF16_Usable]
-             |= bit_arch_AMX_BF16_Usable;
+         CPU_FEATURE_SET_USABLE (cpu_features, AMX_BF16);
          /* Determine if AMX_TILE is usable.  */
-         if (CPU_FEATURES_CPU_P (cpu_features, AMX_TILE))
-           cpu_features->usable[index_arch_AMX_TILE_Usable]
-             |= bit_arch_AMX_TILE_Usable;
+         CPU_FEATURE_SET_USABLE (cpu_features, AMX_TILE);
          /* Determine if AMX_INT8 is usable.  */
-         if (CPU_FEATURES_CPU_P (cpu_features, AMX_INT8))
-           cpu_features->usable[index_arch_AMX_INT8_Usable]
-             |= bit_arch_AMX_INT8_Usable;
+         CPU_FEATURE_SET_USABLE (cpu_features, AMX_INT8);
        }
 
+
+      /* XFD is usable only when OSXSAVE is enabled.  */
+      CPU_FEATURE_SET_USABLE (cpu_features, XFD);
+
       /* For _dl_runtime_resolve, set xsave_state_size to xsave area
         size + integer register save size and align it to 64 bytes.  */
       if (cpu_features->basic.max_cpuid >= 0xd)
@@ -318,8 +308,7 @@ get_common_indices (struct cpu_features *cpu_features,
                    {
                      cpu_features->xsave_state_size
                        = ALIGN_UP (size + STATE_SAVE_OFFSET, 64);
-                     cpu_features->usable[index_arch_XSAVEC_Usable]
-                       |= bit_arch_XSAVEC_Usable;
+                     CPU_FEATURE_SET (cpu_features, XSAVEC);
                    }
                }
            }
@@ -328,8 +317,79 @@ get_common_indices (struct cpu_features *cpu_features,
 
   /* Determine if PKU is usable.  */
   if (CPU_FEATURES_CPU_P (cpu_features, OSPKE))
-    cpu_features->usable[index_arch_PKU_Usable]
-      |= bit_arch_PKU_Usable;
+    CPU_FEATURE_SET (cpu_features, PKU);
+}
+
+static void
+get_extended_indices (struct cpu_features *cpu_features)
+{
+  unsigned int eax, ebx, ecx, edx;
+  __cpuid (0x80000000, eax, ebx, ecx, edx);
+  if (eax >= 0x80000001)
+    __cpuid (0x80000001,
+            cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.eax,
+            cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.ebx,
+            cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.ecx,
+            cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.edx);
+  if (eax >= 0x80000007)
+    __cpuid (0x80000007,
+            cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.eax,
+            cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.ebx,
+            cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.ecx,
+            cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.edx);
+  if (eax >= 0x80000008)
+    __cpuid (0x80000008,
+            cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.eax,
+            cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.ebx,
+            cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.ecx,
+            cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.edx);
+}
+
+static void
+get_common_indices (struct cpu_features *cpu_features,
+                   unsigned int *family, unsigned int *model,
+                   unsigned int *extended_model, unsigned int *stepping)
+{
+  if (family)
+    {
+      unsigned int eax;
+      __cpuid (1, eax,
+              cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ebx,
+              cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ecx,
+              cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.edx);
+      cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.eax = eax;
+      *family = (eax >> 8) & 0x0f;
+      *model = (eax >> 4) & 0x0f;
+      *extended_model = (eax >> 12) & 0xf0;
+      *stepping = eax & 0x0f;
+      if (*family == 0x0f)
+       {
+         *family += (eax >> 20) & 0xff;
+         *model += *extended_model;
+       }
+    }
+
+  if (cpu_features->basic.max_cpuid >= 7)
+    {
+      __cpuid_count (7, 0,
+                    cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.eax,
+                    cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.ebx,
+                    cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.ecx,
+                    cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.edx);
+      __cpuid_count (7, 1,
+                    cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.eax,
+                    cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.ebx,
+                    cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.ecx,
+                    cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.edx);
+    }
+
+  if (cpu_features->basic.max_cpuid >= 0xd)
+    __cpuid_count (0xd, 1,
+                  cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.eax,
+                  cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.ebx,
+                  cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.ecx,
+                  cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.edx);
+
 }
 
 _Static_assert (((index_arch_Fast_Unaligned_Load
@@ -353,8 +413,6 @@ init_cpu_features (struct cpu_features *cpu_features)
   unsigned int stepping = 0;
   enum cpu_features_kind kind;
 
-  cpu_features->usable_p = cpu_features->usable;
-
 #if !HAS_CPUID
   if (__get_cpuid_max (0, 0) == 0)
     {
@@ -377,6 +435,8 @@ init_cpu_features (struct cpu_features *cpu_features)
 
       get_extended_indices (cpu_features);
 
+      update_usable (cpu_features);
+
       if (family == 0x06)
        {
          model += extended_model;
@@ -473,7 +533,7 @@ init_cpu_features (struct cpu_features *cpu_features)
                 with stepping >= 4) to avoid TSX on kernels that weren't
                 updated with the latest microcode package (which disables
                 broken feature by default).  */
-             cpu_features->cpuid[index_cpu_RTM].reg_RTM &= ~bit_cpu_RTM;
+             CPU_FEATURE_UNSET (cpu_features, RTM);
              break;
            }
        }
@@ -502,15 +562,15 @@ init_cpu_features (struct cpu_features *cpu_features)
 
       get_extended_indices (cpu_features);
 
-      ecx = cpu_features->cpuid[COMMON_CPUID_INDEX_1].ecx;
+      update_usable (cpu_features);
 
-      if (HAS_ARCH_FEATURE (AVX_Usable))
+      ecx = cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ecx;
+
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
        {
          /* Since the FMA4 bit is in COMMON_CPUID_INDEX_80000001 and
             FMA4 requires AVX, determine if FMA4 is usable here.  */
-         if (CPU_FEATURES_CPU_P (cpu_features, FMA4))
-           cpu_features->usable[index_arch_FMA4_Usable]
-             |= bit_arch_FMA4_Usable;
+         CPU_FEATURE_SET_USABLE (cpu_features, FMA4);
        }
 
       if (family == 0x15)
@@ -541,13 +601,15 @@ init_cpu_features (struct cpu_features *cpu_features)
 
       get_extended_indices (cpu_features);
 
+      update_usable (cpu_features);
+
       model += extended_model;
       if (family == 0x6)
         {
           if (model == 0xf || model == 0x19)
             {
-              cpu_features->usable[index_arch_AVX_Usable]
-                &= ~(bit_arch_AVX_Usable | bit_arch_AVX2_Usable);
+             CPU_FEATURE_UNSET (cpu_features, AVX);
+             CPU_FEATURE_UNSET (cpu_features, AVX2);
 
               cpu_features->preferred[index_arch_Slow_SSE4_2]
                 |= bit_arch_Slow_SSE4_2;
@@ -560,8 +622,8 @@ init_cpu_features (struct cpu_features *cpu_features)
         {
          if (model == 0x1b)
            {
-             cpu_features->usable[index_arch_AVX_Usable]
-               &= ~(bit_arch_AVX_Usable | bit_arch_AVX2_Usable);
+             CPU_FEATURE_UNSET (cpu_features, AVX);
+             CPU_FEATURE_UNSET (cpu_features, AVX2);
 
              cpu_features->preferred[index_arch_Slow_SSE4_2]
                |= bit_arch_Slow_SSE4_2;
@@ -571,8 +633,8 @@ init_cpu_features (struct cpu_features *cpu_features)
            }
          else if (model == 0x3b)
            {
-             cpu_features->usable[index_arch_AVX_Usable]
-               &= ~(bit_arch_AVX_Usable | bit_arch_AVX2_Usable);
+             CPU_FEATURE_UNSET (cpu_features, AVX);
+             CPU_FEATURE_UNSET (cpu_features, AVX2);
 
              cpu_features->preferred[index_arch_AVX_Fast_Unaligned_Load]
                &= ~bit_arch_AVX_Fast_Unaligned_Load;
@@ -583,6 +645,7 @@ init_cpu_features (struct cpu_features *cpu_features)
     {
       kind = arch_kind_other;
       get_common_indices (cpu_features, NULL, NULL, NULL, NULL);
+      update_usable (cpu_features);
     }
 
   /* Support i586 if CX8 is available.  */
@@ -629,31 +692,30 @@ no_cpuid:
     {
       const char *platform = NULL;
 
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
-         && CPU_FEATURES_CPU_P (cpu_features, AVX512CD))
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512CD))
        {
-         if (CPU_FEATURES_CPU_P (cpu_features, AVX512ER))
+         if (CPU_FEATURE_USABLE_P (cpu_features, AVX512ER))
            {
-             if (CPU_FEATURES_CPU_P (cpu_features, AVX512PF))
+             if (CPU_FEATURE_USABLE_P (cpu_features, AVX512PF))
                platform = "xeon_phi";
            }
          else
            {
-             if (CPU_FEATURES_CPU_P (cpu_features, AVX512BW)
-                 && CPU_FEATURES_CPU_P (cpu_features, AVX512DQ)
-                 && CPU_FEATURES_CPU_P (cpu_features, AVX512VL))
+             if (CPU_FEATURE_USABLE_P (cpu_features, AVX512BW)
+                 && CPU_FEATURE_USABLE_P (cpu_features, AVX512DQ)
+                 && CPU_FEATURE_USABLE_P (cpu_features, AVX512VL))
                GLRO(dl_hwcap) |= HWCAP_X86_AVX512_1;
            }
        }
 
       if (platform == NULL
-         && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
-         && CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
-         && CPU_FEATURES_CPU_P (cpu_features, BMI1)
-         && CPU_FEATURES_CPU_P (cpu_features, BMI2)
-         && CPU_FEATURES_CPU_P (cpu_features, LZCNT)
-         && CPU_FEATURES_CPU_P (cpu_features, MOVBE)
-         && CPU_FEATURES_CPU_P (cpu_features, POPCNT))
+         && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
+         && CPU_FEATURE_USABLE_P (cpu_features, FMA)
+         && CPU_FEATURE_USABLE_P (cpu_features, BMI1)
+         && CPU_FEATURE_USABLE_P (cpu_features, BMI2)
+         && CPU_FEATURE_USABLE_P (cpu_features, LZCNT)
+         && CPU_FEATURE_USABLE_P (cpu_features, MOVBE)
+         && CPU_FEATURE_USABLE_P (cpu_features, POPCNT))
        platform = "haswell";
 
       if (platform != NULL)
@@ -661,7 +723,7 @@ no_cpuid:
     }
 #else
   GLRO(dl_hwcap) = 0;
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
     GLRO(dl_hwcap) |= HWCAP_X86_SSE2;
 
   if (CPU_FEATURES_ARCH_P (cpu_features, I686))
@@ -696,9 +758,9 @@ no_cpuid:
             GLIBC_TUNABLES=glibc.cpu.hwcaps=-IBT,-SHSTK
           */
          unsigned int cet_feature = 0;
-         if (!HAS_CPU_FEATURE (IBT))
+         if (!CPU_FEATURE_USABLE (IBT))
            cet_feature |= GNU_PROPERTY_X86_FEATURE_1_IBT;
-         if (!HAS_CPU_FEATURE (SHSTK))
+         if (!CPU_FEATURE_USABLE (SHSTK))
            cet_feature |= GNU_PROPERTY_X86_FEATURE_1_SHSTK;
 
          if (cet_feature)
index 03831310578f21e35115b52b69dff847e3e1f0b7..a0b9b9177c827b75f1405714851286cebd90e5e0 100644 (file)
 #ifndef cpu_features_h
 #define cpu_features_h
 
-enum
-{
-  /* The integer bit array index for the first set of usable feature
-     bits.  */
-  USABLE_FEATURE_INDEX_1 = 0,
-  /* The current maximum size of the feature integer bit array.  */
-  USABLE_FEATURE_INDEX_MAX
-};
-
 enum
 {
   /* The integer bit array index for the first set of preferred feature
@@ -57,6 +48,12 @@ struct cpuid_registers
   unsigned int edx;
 };
 
+struct cpuid_features
+{
+  struct cpuid_registers cpuid;
+  struct cpuid_registers usable;
+};
+
 enum cpu_features_kind
 {
   arch_kind_unknown = 0,
@@ -78,9 +75,7 @@ struct cpu_features_basic
 struct cpu_features
 {
   struct cpu_features_basic basic;
-  unsigned int *usable_p;
-  struct cpuid_registers cpuid[COMMON_CPUID_INDEX_MAX];
-  unsigned int usable[USABLE_FEATURE_INDEX_MAX];
+  struct cpuid_features features[COMMON_CPUID_INDEX_MAX];
   unsigned int preferred[PREFERRED_FEATURE_INDEX_MAX];
   /* The state size for XSAVEC or XSAVE.  The type must be unsigned long
      int so that we use
@@ -91,7 +86,7 @@ struct cpu_features
   unsigned long int xsave_state_size;
   /* The full state size for XSAVE when XSAVEC is disabled by
 
-     GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC_Usable
+     GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC
    */
   unsigned int xsave_state_full_size;
   /* Data cache size for use in memory and string routines, typically
@@ -114,117 +109,40 @@ extern const struct cpu_features *__get_cpu_features (void)
      __attribute__ ((const));
 
 /* Only used directly in cpu-features.c.  */
-# define CPU_FEATURES_CPU_P(ptr, name) \
-  ((ptr->cpuid[index_cpu_##name].reg_##name & (bit_cpu_##name)) != 0)
-# define CPU_FEATURES_ARCH_P(ptr, name) \
-  ((ptr->feature_##name[index_arch_##name] & (bit_arch_##name)) != 0)
+#define CPU_FEATURE_CHECK_P(ptr, name, check) \
+  ((ptr->features[index_cpu_##name].check.reg_##name \
+    & bit_cpu_##name) != 0)
+#define CPU_FEATURE_SET(ptr, name) \
+  ptr->features[index_cpu_##name].usable.reg_##name |= bit_cpu_##name;
+#define CPU_FEATURE_UNSET(ptr, name) \
+  ptr->features[index_cpu_##name].usable.reg_##name &= ~bit_cpu_##name;
+#define CPU_FEATURE_SET_USABLE(ptr, name) \
+  ptr->features[index_cpu_##name].usable.reg_##name \
+     |= ptr->features[index_cpu_##name].cpuid.reg_##name & bit_cpu_##name;
+#define CPU_FEATURE_PREFERRED_P(ptr, name) \
+  ((ptr->preferred[index_arch_##name] & bit_arch_##name) != 0)
+#define CPU_FEATURE_CPU_P(ptr, name) \
+  CPU_FEATURE_CHECK_P (ptr, name, cpuid)
+#define CPU_FEATURE_USABLE_P(ptr, name) \
+  CPU_FEATURE_CHECK_P (ptr, name, usable)
 
 /* HAS_CPU_FEATURE evaluates to true if CPU supports the feature.  */
 #define HAS_CPU_FEATURE(name) \
-  CPU_FEATURES_CPU_P (__get_cpu_features (), name)
-/* HAS_ARCH_FEATURE evaluates to true if we may use the feature at
-   runtime.  */
-# define HAS_ARCH_FEATURE(name) \
-  CPU_FEATURES_ARCH_P (__get_cpu_features (), name)
+  CPU_FEATURE_CPU_P (__get_cpu_features (), name)
 /* CPU_FEATURE_USABLE evaluates to true if the feature is usable.  */
 #define CPU_FEATURE_USABLE(name) \
-  HAS_ARCH_FEATURE (name##_Usable)
-
-/* Architecture features.  */
-
-/* USABLE_FEATURE_INDEX_1.  */
-#define bit_arch_AVX_Usable                    (1u << 0)
-#define bit_arch_AVX2_Usable                   (1u << 1)
-#define bit_arch_AVX512F_Usable                        (1u << 2)
-#define bit_arch_AVX512CD_Usable               (1u << 3)
-#define bit_arch_AVX512ER_Usable               (1u << 4)
-#define bit_arch_AVX512PF_Usable               (1u << 5)
-#define bit_arch_AVX512VL_Usable               (1u << 6)
-#define bit_arch_AVX512DQ_Usable               (1u << 7)
-#define bit_arch_AVX512BW_Usable               (1u << 8)
-#define bit_arch_AVX512_4FMAPS_Usable          (1u << 9)
-#define bit_arch_AVX512_4VNNIW_Usable          (1u << 10)
-#define bit_arch_AVX512_BITALG_Usable          (1u << 11)
-#define bit_arch_AVX512_IFMA_Usable            (1u << 12)
-#define bit_arch_AVX512_VBMI_Usable            (1u << 13)
-#define bit_arch_AVX512_VBMI2_Usable           (1u << 14)
-#define bit_arch_AVX512_VNNI_Usable            (1u << 15)
-#define bit_arch_AVX512_VPOPCNTDQ_Usable       (1u << 16)
-#define bit_arch_FMA_Usable                    (1u << 17)
-#define bit_arch_FMA4_Usable                   (1u << 18)
-#define bit_arch_VAES_Usable                   (1u << 19)
-#define bit_arch_VPCLMULQDQ_Usable             (1u << 20)
-#define bit_arch_XOP_Usable                    (1u << 21)
-#define bit_arch_XSAVEC_Usable                 (1u << 22)
-#define bit_arch_F16C_Usable                   (1u << 23)
-#define bit_arch_AVX512_VP2INTERSECT_Usable    (1u << 24)
-#define bit_arch_AVX512_BF16_Usable            (1u << 25)
-#define bit_arch_PKU_Usable                    (1u << 26)
-#define bit_arch_AMX_BF16_Usable               (1u << 27)
-#define bit_arch_AMX_TILE_Usable               (1u << 28)
-#define bit_arch_AMX_INT8_Usable               (1u << 29)
-
-#define index_arch_AVX_Usable                  USABLE_FEATURE_INDEX_1
-#define index_arch_AVX2_Usable                 USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512F_Usable              USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512CD_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512ER_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512PF_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512VL_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512BW_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512DQ_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_4FMAPS_Usable                USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_4VNNIW_Usable                USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_BITALG_Usable                USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_IFMA_Usable          USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_VBMI_Usable          USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_VBMI2_Usable         USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_VNNI_Usable          USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_VPOPCNTDQ_Usable     USABLE_FEATURE_INDEX_1
-#define index_arch_FMA_Usable                  USABLE_FEATURE_INDEX_1
-#define index_arch_FMA4_Usable                 USABLE_FEATURE_INDEX_1
-#define index_arch_VAES_Usable                 USABLE_FEATURE_INDEX_1
-#define index_arch_VPCLMULQDQ_Usable           USABLE_FEATURE_INDEX_1
-#define index_arch_XOP_Usable                  USABLE_FEATURE_INDEX_1
-#define index_arch_XSAVEC_Usable               USABLE_FEATURE_INDEX_1
-#define index_arch_F16C_Usable                 USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_VP2INTERSECT_Usable  USABLE_FEATURE_INDEX_1
-#define index_arch_AVX512_BF16_Usable          USABLE_FEATURE_INDEX_1
-#define index_arch_PKU_Usable                  USABLE_FEATURE_INDEX_1
-#define index_arch_AMX_BF16_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AMX_TILE_Usable             USABLE_FEATURE_INDEX_1
-#define index_arch_AMX_INT8_Usable             USABLE_FEATURE_INDEX_1
-
-#define feature_AVX_Usable                     usable
-#define feature_AVX2_Usable                    usable
-#define feature_AVX512F_Usable                 usable
-#define feature_AVX512CD_Usable                        usable
-#define feature_AVX512ER_Usable                        usable
-#define feature_AVX512PF_Usable                        usable
-#define feature_AVX512VL_Usable                        usable
-#define feature_AVX512BW_Usable                        usable
-#define feature_AVX512DQ_Usable                        usable
-#define feature_AVX512_4FMAPS_Usable           usable
-#define feature_AVX512_4VNNIW_Usable           usable
-#define feature_AVX512_BITALG_Usable           usable
-#define feature_AVX512_IFMA_Usable             usable
-#define feature_AVX512_VBMI_Usable             usable
-#define feature_AVX512_VBMI2_Usable            usable
-#define feature_AVX512_VNNI_Usable             usable
-#define feature_AVX512_VPOPCNTDQ_Usable                usable
-#define feature_FMA_Usable                     usable
-#define feature_FMA4_Usable                    usable
-#define feature_VAES_Usable                    usable
-#define feature_VPCLMULQDQ_Usable              usable
-#define feature_XOP_Usable                     usable
-#define feature_XSAVEC_Usable                  usable
-#define feature_F16C_Usable                    usable
-#define feature_AVX512_VP2INTERSECT_Usable     usable
-#define feature_AVX512_BF16_Usable             usable
-#define feature_PKU_Usable                     usable
-#define feature_AMX_BF16_Usable                        usable
-#define feature_AMX_TILE_Usable                        usable
-#define feature_AMX_INT8_Usable                        usable
+  CPU_FEATURE_USABLE_P (__get_cpu_features (), name)
+/* CPU_FEATURE_PREFER evaluates to true if we prefer the feature at
+   runtime.  */
+#define CPU_FEATURE_PREFERRED(name) \
+  CPU_FEATURE_PREFERRED_P(__get_cpu_features (), name)
+
+#define CPU_FEATURES_CPU_P(ptr, name) \
+  CPU_FEATURE_CPU_P (ptr, name)
+#define CPU_FEATURES_ARCH_P(ptr, name) \
+  CPU_FEATURE_PREFERRED_P (ptr, name)
+#define HAS_ARCH_FEATURE(name) \
+  CPU_FEATURE_PREFERRED (name)
 
 /* CPU features.  */
 
@@ -247,6 +165,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_CMPXCHG16B     (1u << 13)
 #define bit_cpu_XTPRUPDCTRL    (1u << 14)
 #define bit_cpu_PDCM           (1u << 15)
+#define bit_cpu_INDEX_1_ECX_16 (1u << 16)
 #define bit_cpu_PCID           (1u << 17)
 #define bit_cpu_DCA            (1u << 18)
 #define bit_cpu_SSE4_1         (1u << 19)
@@ -261,6 +180,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_AVX            (1u << 28)
 #define bit_cpu_F16C           (1u << 29)
 #define bit_cpu_RDRAND         (1u << 30)
+#define bit_cpu_INDEX_1_ECX_31 (1u << 31)
 
 /* EDX.  */
 #define bit_cpu_FPU            (1u << 0)
@@ -273,6 +193,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_MCE            (1u << 7)
 #define bit_cpu_CX8            (1u << 8)
 #define bit_cpu_APIC           (1u << 9)
+#define bit_cpu_INDEX_1_EDX_10 (1u << 10)
 #define bit_cpu_SEP            (1u << 11)
 #define bit_cpu_MTRR           (1u << 12)
 #define bit_cpu_PGE            (1u << 13)
@@ -282,6 +203,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_PSE_36         (1u << 17)
 #define bit_cpu_PSN            (1u << 18)
 #define bit_cpu_CLFSH          (1u << 19)
+#define bit_cpu_INDEX_1_EDX_20 (1u << 20)
 #define bit_cpu_DS             (1u << 21)
 #define bit_cpu_ACPI           (1u << 22)
 #define bit_cpu_MMX            (1u << 23)
@@ -291,6 +213,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_SS             (1u << 27)
 #define bit_cpu_HTT            (1u << 28)
 #define bit_cpu_TM             (1u << 29)
+#define bit_cpu_INDEX_1_EDX_30 (1u << 30)
 #define bit_cpu_PBE            (1u << 31)
 
 /* COMMON_CPUID_INDEX_7.  */
@@ -302,12 +225,14 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_BMI1           (1u << 3)
 #define bit_cpu_HLE            (1u << 4)
 #define bit_cpu_AVX2           (1u << 5)
+#define bit_cpu_INDEX_7_EBX_6  (1u << 6)
 #define bit_cpu_SMEP           (1u << 7)
 #define bit_cpu_BMI2           (1u << 8)
 #define bit_cpu_ERMS           (1u << 9)
 #define bit_cpu_INVPCID                (1u << 10)
 #define bit_cpu_RTM            (1u << 11)
 #define bit_cpu_PQM            (1u << 12)
+#define bit_cpu_DEPR_FPU_CS_DS (1u << 13)
 #define bit_cpu_MPX            (1u << 14)
 #define bit_cpu_PQE            (1u << 15)
 #define bit_cpu_AVX512F                (1u << 16)
@@ -316,6 +241,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_ADX            (1u << 19)
 #define bit_cpu_SMAP           (1u << 20)
 #define bit_cpu_AVX512_IFMA    (1u << 21)
+#define bit_cpu_INDEX_7_EBX_22 (1u << 22)
 #define bit_cpu_CLFLUSHOPT     (1u << 23)
 #define bit_cpu_CLWB           (1u << 24)
 #define bit_cpu_TRACE          (1u << 25)
@@ -340,9 +266,17 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_VPCLMULQDQ     (1u << 10)
 #define bit_cpu_AVX512_VNNI    (1u << 11)
 #define bit_cpu_AVX512_BITALG  (1u << 12)
+#define bit_cpu_INDEX_7_ECX_13 (1u << 13)
 #define bit_cpu_AVX512_VPOPCNTDQ (1u << 14)
+#define bit_cpu_INDEX_7_ECX_15 (1u << 15)
+#define bit_cpu_INDEX_7_ECX_16 (1u << 16)
+/* Note: Bits 17-21: The value of MAWAU used by the BNDLDX and BNDSTX
+   instructions in 64-bit mode.  */
 #define bit_cpu_RDPID          (1u << 22)
+#define bit_cpu_INDEX_7_ECX_23 (1u << 23)
+#define bit_cpu_INDEX_7_ECX_24 (1u << 24)
 #define bit_cpu_CLDEMOTE       (1u << 25)
+#define bit_cpu_INDEX_7_ECX_26 (1u << 26)
 #define bit_cpu_MOVDIRI                (1u << 27)
 #define bit_cpu_MOVDIR64B      (1u << 28)
 #define bit_cpu_ENQCMD         (1u << 29)
@@ -350,17 +284,30 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define bit_cpu_PKS            (1u << 31)
 
 /* EDX.  */
+#define bit_cpu_INDEX_7_EDX_0  (1u << 0)
+#define bit_cpu_INDEX_7_EDX_1  (1u << 1)
 #define bit_cpu_AVX512_4VNNIW  (1u << 2)
 #define bit_cpu_AVX512_4FMAPS  (1u << 3)
 #define bit_cpu_FSRM           (1u << 4)
+#define bit_cpu_INDEX_7_EDX_5  (1u << 5)
+#define bit_cpu_INDEX_7_EDX_6  (1u << 6)
+#define bit_cpu_INDEX_7_EDX_7  (1u << 7)
 #define bit_cpu_AVX512_VP2INTERSECT (1u << 8)
+#define bit_cpu_INDEX_7_EDX_9  (1u << 9)
 #define bit_cpu_MD_CLEAR       (1u << 10)
+#define bit_cpu_INDEX_7_EDX_11 (1u << 11)
+#define bit_cpu_INDEX_7_EDX_12 (1u << 12)
+#define bit_cpu_INDEX_7_EDX_13 (1u << 13)
 #define bit_cpu_SERIALIZE      (1u << 14)
 #define bit_cpu_HYBRID         (1u << 15)
 #define bit_cpu_TSXLDTRK       (1u << 16)
+#define bit_cpu_INDEX_7_EDX_17 (1u << 17)
 #define bit_cpu_PCONFIG                (1u << 18)
+#define bit_cpu_INDEX_7_EDX_19 (1u << 19)
 #define bit_cpu_IBT            (1u << 20)
+#define bit_cpu_INDEX_7_EDX_21 (1u << 21)
 #define bit_cpu_AMX_BF16       (1u << 22)
+#define bit_cpu_INDEX_7_EDX_23 (1u << 23)
 #define bit_cpu_AMX_TILE       (1u << 24)
 #define bit_cpu_AMX_INT8       (1u << 25)
 #define bit_cpu_IBRS_IBPB      (1u << 26)
@@ -433,6 +380,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_CMPXCHG16B   COMMON_CPUID_INDEX_1
 #define index_cpu_XTPRUPDCTRL  COMMON_CPUID_INDEX_1
 #define index_cpu_PDCM         COMMON_CPUID_INDEX_1
+#define index_cpu_INDEX_1_ECX_16 COMMON_CPUID_INDEX_1
 #define index_cpu_PCID         COMMON_CPUID_INDEX_1
 #define index_cpu_DCA          COMMON_CPUID_INDEX_1
 #define index_cpu_SSE4_1       COMMON_CPUID_INDEX_1
@@ -447,6 +395,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_AVX          COMMON_CPUID_INDEX_1
 #define index_cpu_F16C         COMMON_CPUID_INDEX_1
 #define index_cpu_RDRAND       COMMON_CPUID_INDEX_1
+#define index_cpu_INDEX_1_ECX_31 COMMON_CPUID_INDEX_1
 
 /* ECX.  */
 #define index_cpu_FPU          COMMON_CPUID_INDEX_1
@@ -459,6 +408,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_MCE          COMMON_CPUID_INDEX_1
 #define index_cpu_CX8          COMMON_CPUID_INDEX_1
 #define index_cpu_APIC         COMMON_CPUID_INDEX_1
+#define index_cpu_INDEX_1_EDX_10 COMMON_CPUID_INDEX_1
 #define index_cpu_SEP          COMMON_CPUID_INDEX_1
 #define index_cpu_MTRR         COMMON_CPUID_INDEX_1
 #define index_cpu_PGE          COMMON_CPUID_INDEX_1
@@ -468,6 +418,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_PSE_36       COMMON_CPUID_INDEX_1
 #define index_cpu_PSN          COMMON_CPUID_INDEX_1
 #define index_cpu_CLFSH                COMMON_CPUID_INDEX_1
+#define index_cpu_INDEX_1_EDX_20 COMMON_CPUID_INDEX_1
 #define index_cpu_DS           COMMON_CPUID_INDEX_1
 #define index_cpu_ACPI         COMMON_CPUID_INDEX_1
 #define index_cpu_MMX          COMMON_CPUID_INDEX_1
@@ -477,6 +428,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_SS           COMMON_CPUID_INDEX_1
 #define index_cpu_HTT          COMMON_CPUID_INDEX_1
 #define index_cpu_TM           COMMON_CPUID_INDEX_1
+#define index_cpu_INDEX_1_EDX_30 COMMON_CPUID_INDEX_1
 #define index_cpu_PBE          COMMON_CPUID_INDEX_1
 
 /* COMMON_CPUID_INDEX_7.  */
@@ -488,12 +440,14 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_BMI1         COMMON_CPUID_INDEX_7
 #define index_cpu_HLE          COMMON_CPUID_INDEX_7
 #define index_cpu_AVX2         COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EBX_6        COMMON_CPUID_INDEX_7
 #define index_cpu_SMEP         COMMON_CPUID_INDEX_7
 #define index_cpu_BMI2         COMMON_CPUID_INDEX_7
 #define index_cpu_ERMS         COMMON_CPUID_INDEX_7
 #define index_cpu_INVPCID      COMMON_CPUID_INDEX_7
 #define index_cpu_RTM          COMMON_CPUID_INDEX_7
 #define index_cpu_PQM          COMMON_CPUID_INDEX_7
+#define index_cpu_DEPR_FPU_CS_DS COMMON_CPUID_INDEX_7
 #define index_cpu_MPX          COMMON_CPUID_INDEX_7
 #define index_cpu_PQE          COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512F      COMMON_CPUID_INDEX_7
@@ -502,6 +456,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_ADX          COMMON_CPUID_INDEX_7
 #define index_cpu_SMAP         COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_IFMA  COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EBX_22 COMMON_CPUID_INDEX_7
 #define index_cpu_CLFLUSHOPT   COMMON_CPUID_INDEX_7
 #define index_cpu_CLWB         COMMON_CPUID_INDEX_7
 #define index_cpu_TRACE                COMMON_CPUID_INDEX_7
@@ -526,9 +481,15 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_VPCLMULQDQ   COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_VNNI  COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_BITALG COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_ECX_13 COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_VPOPCNTDQ COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_ECX_15 COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_ECX_16 COMMON_CPUID_INDEX_7
 #define index_cpu_RDPID                COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_ECX_23 COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_ECX_24 COMMON_CPUID_INDEX_7
 #define index_cpu_CLDEMOTE     COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_ECX_26 COMMON_CPUID_INDEX_7
 #define index_cpu_MOVDIRI      COMMON_CPUID_INDEX_7
 #define index_cpu_MOVDIR64B    COMMON_CPUID_INDEX_7
 #define index_cpu_ENQCMD       COMMON_CPUID_INDEX_7
@@ -536,17 +497,30 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_cpu_PKS          COMMON_CPUID_INDEX_7
 
 /* EDX.  */
+#define index_cpu_INDEX_7_EDX_0        COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_1        COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_4VNNIW COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_4FMAPS        COMMON_CPUID_INDEX_7
 #define index_cpu_FSRM         COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_5        COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_6        COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_7        COMMON_CPUID_INDEX_7
 #define index_cpu_AVX512_VP2INTERSECT COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_9        COMMON_CPUID_INDEX_7
 #define index_cpu_MD_CLEAR     COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_11 COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_12 COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_13 COMMON_CPUID_INDEX_7
 #define index_cpu_SERIALIZE    COMMON_CPUID_INDEX_7
 #define index_cpu_HYBRID       COMMON_CPUID_INDEX_7
 #define index_cpu_TSXLDTRK     COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_17 COMMON_CPUID_INDEX_7
 #define index_cpu_PCONFIG      COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_19 COMMON_CPUID_INDEX_7
 #define index_cpu_IBT          COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_21 COMMON_CPUID_INDEX_7
 #define index_cpu_AMX_BF16     COMMON_CPUID_INDEX_7
+#define index_cpu_INDEX_7_EDX_23 COMMON_CPUID_INDEX_7
 #define index_cpu_AMX_TILE     COMMON_CPUID_INDEX_7
 #define index_cpu_AMX_INT8     COMMON_CPUID_INDEX_7
 #define index_cpu_IBRS_IBPB    COMMON_CPUID_INDEX_7
@@ -619,6 +593,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_CMPXCHG16B         ecx
 #define reg_XTPRUPDCTRL                ecx
 #define reg_PDCM               ecx
+#define reg_INDEX_1_ECX_16     ecx
 #define reg_PCID               ecx
 #define reg_DCA                        ecx
 #define reg_SSE4_1             ecx
@@ -633,6 +608,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_AVX                        ecx
 #define reg_F16C               ecx
 #define reg_RDRAND             ecx
+#define reg_INDEX_1_ECX_31     ecx
 
 /* EDX.  */
 #define reg_FPU                        edx
@@ -645,6 +621,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_MCE                        edx
 #define reg_CX8                        edx
 #define reg_APIC               edx
+#define reg_INDEX_1_EDX_10     edx
 #define reg_SEP                        edx
 #define reg_MTRR               edx
 #define reg_PGE                        edx
@@ -654,6 +631,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_PSE_36             edx
 #define reg_PSN                        edx
 #define reg_CLFSH              edx
+#define reg_INDEX_1_EDX_20     edx
 #define reg_DS                 edx
 #define reg_ACPI               edx
 #define reg_MMX                        edx
@@ -663,6 +641,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_SS                 edx
 #define reg_HTT                        edx
 #define reg_TM                 edx
+#define reg_INDEX_1_EDX_30     edx
 #define reg_PBE                        edx
 
 /* COMMON_CPUID_INDEX_7.  */
@@ -675,11 +654,13 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_HLE                        ebx
 #define reg_BMI2               ebx
 #define reg_AVX2               ebx
+#define reg_INDEX_7_EBX_6      ebx
 #define reg_SMEP               ebx
 #define reg_ERMS               ebx
 #define reg_INVPCID            ebx
 #define reg_RTM                        ebx
 #define reg_PQM                        ebx
+#define reg_DEPR_FPU_CS_DS     ebx
 #define reg_MPX                        ebx
 #define reg_PQE                        ebx
 #define reg_AVX512F            ebx
@@ -688,6 +669,7 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_ADX                        ebx
 #define reg_SMAP               ebx
 #define reg_AVX512_IFMA                ebx
+#define reg_INDEX_7_EBX_22     ebx
 #define reg_CLFLUSHOPT         ebx
 #define reg_CLWB               ebx
 #define reg_TRACE              ebx
@@ -712,9 +694,15 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_VPCLMULQDQ         ecx
 #define reg_AVX512_VNNI                ecx
 #define reg_AVX512_BITALG      ecx
+#define reg_INDEX_7_ECX_13     ecx
 #define reg_AVX512_VPOPCNTDQ   ecx
+#define reg_INDEX_7_ECX_15     ecx
+#define reg_INDEX_7_ECX_16     ecx
 #define reg_RDPID              ecx
+#define reg_INDEX_7_ECX_23     ecx
+#define reg_INDEX_7_ECX_24     ecx
 #define reg_CLDEMOTE           ecx
+#define reg_INDEX_7_ECX_26     ecx
 #define reg_MOVDIRI            ecx
 #define reg_MOVDIR64B          ecx
 #define reg_ENQCMD             ecx
@@ -722,17 +710,30 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define reg_PKS                        ecx
 
 /* EDX.  */
+#define reg_INDEX_7_EDX_0      edx
+#define reg_INDEX_7_EDX_1      edx
 #define reg_AVX512_4VNNIW      edx
 #define reg_AVX512_4FMAPS      edx
 #define reg_FSRM               edx
+#define reg_INDEX_7_EDX_5      edx
+#define reg_INDEX_7_EDX_6      edx
+#define reg_INDEX_7_EDX_7      edx
 #define reg_AVX512_VP2INTERSECT        edx
+#define reg_INDEX_7_EDX_9      edx
 #define reg_MD_CLEAR           edx
+#define reg_INDEX_7_EDX_11     edx
+#define reg_INDEX_7_EDX_12     edx
+#define reg_INDEX_7_EDX_13     edx
 #define reg_SERIALIZE          edx
 #define reg_HYBRID             edx
 #define reg_TSXLDTRK           edx
+#define reg_INDEX_7_EDX_17     edx
 #define reg_PCONFIG            edx
+#define reg_INDEX_7_EDX_19     edx
 #define reg_IBT                        edx
+#define reg_INDEX_7_EDX_21     edx
 #define reg_AMX_BF16           edx
+#define reg_INDEX_7_EDX_23     edx
 #define reg_AMX_TILE           edx
 #define reg_AMX_INT8           edx
 #define reg_IBRS_IBPB          edx
@@ -821,23 +822,6 @@ extern const struct cpu_features *__get_cpu_features (void)
 #define index_arch_MathVec_Prefer_No_AVX512    PREFERRED_FEATURE_INDEX_1
 #define index_arch_Prefer_FSRM                 PREFERRED_FEATURE_INDEX_1
 
-#define feature_Fast_Rep_String                        preferred
-#define feature_Fast_Copy_Backward             preferred
-#define feature_Slow_BSF                       preferred
-#define feature_Fast_Unaligned_Load            preferred
-#define feature_Prefer_PMINUB_for_stringop     preferred
-#define feature_Fast_Unaligned_Copy            preferred
-#define feature_I586                           preferred
-#define feature_I686                           preferred
-#define feature_Slow_SSE4_2                    preferred
-#define feature_AVX_Fast_Unaligned_Load                preferred
-#define feature_Prefer_MAP_32BIT_EXEC          preferred
-#define feature_Prefer_No_VZEROUPPER           preferred
-#define feature_Prefer_ERMS                    preferred
-#define feature_Prefer_No_AVX512               preferred
-#define feature_MathVec_Prefer_No_AVX512       preferred
-#define feature_Prefer_FSRM                    preferred
-
 /* XCR0 Feature flags.  */
 #define bit_XMM_state          (1u << 1)
 #define bit_YMM_state          (1u << 2)
@@ -851,8 +835,6 @@ extern const struct cpu_features *__get_cpu_features (void)
 /* Unused for x86.  */
 #  define INIT_ARCH()
 #  define __get_cpu_features() (&GLRO(dl_x86_cpu_features))
-#  define x86_get_cpuid_registers(i) \
-       (&(GLRO(dl_x86_cpu_features).cpuid[i]))
 # endif
 
 #ifdef __x86_64__
index 666ec571f2ccce8db32fc8dee02a65ecc8d8face..588bbf9448bd6a44497818651ca3cfeaa84c8701 100644 (file)
@@ -43,66 +43,45 @@ extern __typeof (memcmp) DEFAULT_MEMCMP;
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);       \
   if (!DEFAULT_MEMCMP (f, #name, len))                                 \
     {                                                                  \
-      cpu_features->cpuid[index_cpu_##name].reg_##name                 \
-       &= ~bit_cpu_##name;                                             \
+      CPU_FEATURE_UNSET (cpu_features, name)                           \
       break;                                                           \
     }
 
-/* Disable an ARCH feature NAME.  We don't enable an ARCH feature which
-   isn't available.  */
-# define CHECK_GLIBC_IFUNC_ARCH_OFF(f, cpu_features, name, len)                \
+/* Disable a preferred feature NAME.  We don't enable a preferred feature
+   which isn't available.  */
+# define CHECK_GLIBC_IFUNC_PREFERRED_OFF(f, cpu_features, name, len)   \
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);       \
   if (!DEFAULT_MEMCMP (f, #name, len))                                 \
     {                                                                  \
-      cpu_features->feature_##name[index_arch_##name]                  \
+      cpu_features->preferred[index_arch_##name]                       \
        &= ~bit_arch_##name;                                            \
       break;                                                           \
     }
 
-/* Enable/disable an ARCH feature NAME.  */
-# define CHECK_GLIBC_IFUNC_ARCH_BOTH(f, cpu_features, name, disable,   \
-                                   len)                                \
+/* Enable/disable a preferred feature NAME.  */
+# define CHECK_GLIBC_IFUNC_PREFERRED_BOTH(f, cpu_features, name,       \
+                                         disable, len)                 \
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);       \
   if (!DEFAULT_MEMCMP (f, #name, len))                                 \
     {                                                                  \
       if (disable)                                                     \
-       cpu_features->feature_##name[index_arch_##name]                 \
-         &= ~bit_arch_##name;                                          \
+       cpu_features->preferred[index_arch_##name] &= ~bit_arch_##name; \
       else                                                             \
-       cpu_features->feature_##name[index_arch_##name]                 \
-         |= bit_arch_##name;                                           \
+       cpu_features->preferred[index_arch_##name] |= bit_arch_##name;  \
       break;                                                           \
     }
 
-/* Enable/disable an ARCH feature NAME.  Enable an ARCH feature only
-   if the ARCH feature NEED is also enabled.  */
-# define CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH(f, cpu_features, name,  \
+/* Enable/disable a preferred feature NAME.  Enable a preferred feature
+   only if the feature NEED is usable.  */
+# define CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH(f, cpu_features, name,  \
                                               need, disable, len)      \
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);       \
   if (!DEFAULT_MEMCMP (f, #name, len))                                 \
     {                                                                  \
       if (disable)                                                     \
-       cpu_features->feature_##name[index_arch_##name]                 \
-         &= ~bit_arch_##name;                                          \
-      else if (CPU_FEATURES_ARCH_P (cpu_features, need))               \
-       cpu_features->feature_##name[index_arch_##name]                 \
-         |= bit_arch_##name;                                           \
-      break;                                                           \
-    }
-
-/* Enable/disable an ARCH feature NAME.  Enable an ARCH feature only
-   if the CPU feature NEED is also enabled.  */
-# define CHECK_GLIBC_IFUNC_ARCH_NEED_CPU_BOTH(f, cpu_features, name,   \
-                                             need, disable, len)       \
-  _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);       \
-  if (!DEFAULT_MEMCMP (f, #name, len))                                 \
-    {                                                                  \
-      if (disable)                                                     \
-       cpu_features->feature_##name[index_arch_##name]                 \
-         &= ~bit_arch_##name;                                          \
-      else if (CPU_FEATURES_CPU_P (cpu_features, need))                        \
-       cpu_features->feature_##name[index_arch_##name]                 \
-         |= bit_arch_##name;                                           \
+       cpu_features->preferred[index_arch_##name] &= ~bit_arch_##name; \
+      else if (CPU_FEATURE_USABLE_P (cpu_features, need))              \
+       cpu_features->preferred[index_arch_##name] |= bit_arch_##name;  \
       break;                                                           \
     }
 
@@ -178,8 +157,8 @@ TUNABLE_CALLBACK (set_hwcaps) (tunable_val_t *valp)
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, ERMS, 4);
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, FMA4, 4);
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, SSE2, 4);
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, I586, 4);
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, I686, 4);
+             CHECK_GLIBC_IFUNC_PREFERRED_OFF (n, cpu_features, I586, 4);
+             CHECK_GLIBC_IFUNC_PREFERRED_OFF (n, cpu_features, I686, 4);
            }
          break;
        case 5:
@@ -197,6 +176,13 @@ TUNABLE_CALLBACK (set_hwcaps) (tunable_val_t *valp)
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, POPCNT, 6);
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, SSE4_1, 6);
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, SSE4_2, 6);
+             if (!DEFAULT_MEMCMP (n, "XSAVEC", 6))
+               {
+                 /* Update xsave_state_size to XSAVE state size.  */
+                 cpu_features->xsave_state_size
+                   = cpu_features->xsave_state_full_size;
+                 CPU_FEATURE_UNSET (cpu_features, XSAVEC);
+               }
            }
          break;
        case 7:
@@ -216,115 +202,85 @@ TUNABLE_CALLBACK (set_hwcaps) (tunable_val_t *valp)
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, AVX512PF, 8);
              CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, AVX512VL, 8);
            }
-         CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Slow_BSF,
-                                      disable, 8);
-         break;
-       case 10:
-         if (disable)
-           {
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, AVX_Usable,
-                                         10);
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, FMA_Usable,
-                                         10);
-           }
+         CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features, Slow_BSF,
+                                           disable, 8);
          break;
        case 11:
-         if (disable)
            {
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, AVX2_Usable,
-                                         11);
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, FMA4_Usable,
-                                         11);
-           }
-         CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Prefer_ERMS,
-                                      disable, 11);
-         CHECK_GLIBC_IFUNC_ARCH_NEED_CPU_BOTH (n, cpu_features,
-                                               Slow_SSE4_2, SSE4_2,
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Prefer_ERMS,
                                                disable, 11);
-         CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Prefer_FSRM,
-                                      disable, 11);
-         break;
-       case 13:
-         if (disable)
-           {
-             /* Update xsave_state_size to XSAVE state size.  */
-             cpu_features->xsave_state_size
-               = cpu_features->xsave_state_full_size;
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features,
-                                         XSAVEC_Usable, 13);
-           }
-         break;
-       case 14:
-         if (disable)
-           {
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features,
-                                         AVX512F_Usable, 14);
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Prefer_FSRM,
+                                               disable, 11);
+             CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH (n, cpu_features,
+                                                    Slow_SSE4_2,
+                                                    SSE4_2,
+                                                    disable, 11);
            }
          break;
        case 15:
-         if (disable)
            {
-             CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features,
-                                         AVX512DQ_Usable, 15);
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Fast_Rep_String,
+                                               disable, 15);
            }
-         CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Fast_Rep_String,
-                                      disable, 15);
          break;
        case 16:
            {
-             CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
-               (n, cpu_features, Prefer_No_AVX512, AVX512F_Usable,
+             CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
+               (n, cpu_features, Prefer_No_AVX512, AVX512F,
                 disable, 16);
            }
          break;
        case 18:
            {
-             CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
-                                          Fast_Copy_Backward, disable,
-                                          18);
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Fast_Copy_Backward,
+                                               disable, 18);
            }
          break;
        case 19:
            {
-             CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
-                                          Fast_Unaligned_Load, disable,
-                                          19);
-             CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
-                                          Fast_Unaligned_Copy, disable,
-                                          19);
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Fast_Unaligned_Load,
+                                               disable, 19);
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Fast_Unaligned_Copy,
+                                               disable, 19);
            }
          break;
        case 20:
            {
-             CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
-               (n, cpu_features, Prefer_No_VZEROUPPER, AVX_Usable,
-                disable, 20);
+             CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
+               (n, cpu_features, Prefer_No_VZEROUPPER, AVX, disable,
+                20);
            }
          break;
        case 21:
            {
-             CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
-                                          Prefer_MAP_32BIT_EXEC, disable,
-                                          21);
+             CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
+                                               Prefer_MAP_32BIT_EXEC,
+                                               disable, 21);
            }
          break;
        case 23:
            {
-             CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
-               (n, cpu_features, AVX_Fast_Unaligned_Load, AVX_Usable,
+             CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
+               (n, cpu_features, AVX_Fast_Unaligned_Load, AVX,
                 disable, 23);
            }
          break;
        case 24:
            {
-             CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
-               (n, cpu_features, MathVec_Prefer_No_AVX512,
-                AVX512F_Usable, disable, 24);
+             CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
+               (n, cpu_features, MathVec_Prefer_No_AVX512, AVX512F,
+                disable, 24);
            }
          break;
        case 26:
            {
-             CHECK_GLIBC_IFUNC_ARCH_NEED_CPU_BOTH
+             CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
                (n, cpu_features, Prefer_PMINUB_for_stringop, SSE2,
                 disable, 26);
            }
index 5524b66038767395be70ce893a71fa20f0d2a9c6..03572f7af6ff18d12ef48b263e7f4fb28d0e1cbd 100644 (file)
@@ -74,10 +74,10 @@ dl_cet_check (struct link_map *m, const char *program)
 
             GLIBC_TUNABLES=glibc.cpu.hwcaps=-IBT,-SHSTK
           */
-         enable_ibt &= (HAS_CPU_FEATURE (IBT)
+         enable_ibt &= (CPU_FEATURE_USABLE (IBT)
                         && (enable_ibt_type == cet_always_on
                             || (m->l_cet & lc_ibt) != 0));
-         enable_shstk &= (HAS_CPU_FEATURE (SHSTK)
+         enable_shstk &= (CPU_FEATURE_USABLE (SHSTK)
                           && (enable_shstk_type == cet_always_on
                               || (m->l_cet & lc_shstk) != 0));
        }
index 2cff2e86ba590ef8b831c228359d0e76b0bb2bd3..080c58e70bc420737c800ff222928fbe7539caa6 100644 (file)
@@ -139,6 +139,7 @@ do_test (void)
   CHECK_CPU_FEATURE (INVPCID);
   CHECK_CPU_FEATURE (RTM);
   CHECK_CPU_FEATURE (PQM);
+  CHECK_CPU_FEATURE (DEPR_FPU_CS_DS);
   CHECK_CPU_FEATURE (MPX);
   CHECK_CPU_FEATURE (PQE);
   CHECK_CPU_FEATURE (AVX512F);
@@ -220,35 +221,156 @@ do_test (void)
   CHECK_CPU_FEATURE (AVX512_BF16);
 
   printf ("Usable CPU features:\n");
+  CHECK_CPU_FEATURE_USABLE (SSE3);
+  CHECK_CPU_FEATURE_USABLE (PCLMULQDQ);
+  CHECK_CPU_FEATURE_USABLE (DTES64);
+  CHECK_CPU_FEATURE_USABLE (MONITOR);
+  CHECK_CPU_FEATURE_USABLE (DS_CPL);
+  CHECK_CPU_FEATURE_USABLE (VMX);
+  CHECK_CPU_FEATURE_USABLE (SMX);
+  CHECK_CPU_FEATURE_USABLE (EST);
+  CHECK_CPU_FEATURE_USABLE (TM2);
+  CHECK_CPU_FEATURE_USABLE (SSSE3);
+  CHECK_CPU_FEATURE_USABLE (CNXT_ID);
+  CHECK_CPU_FEATURE_USABLE (SDBG);
   CHECK_CPU_FEATURE_USABLE (FMA);
+  CHECK_CPU_FEATURE_USABLE (CMPXCHG16B);
+  CHECK_CPU_FEATURE_USABLE (XTPRUPDCTRL);
+  CHECK_CPU_FEATURE_USABLE (PDCM);
+  CHECK_CPU_FEATURE_USABLE (PCID);
+  CHECK_CPU_FEATURE_USABLE (DCA);
+  CHECK_CPU_FEATURE_USABLE (SSE4_1);
+  CHECK_CPU_FEATURE_USABLE (SSE4_2);
+  CHECK_CPU_FEATURE_USABLE (X2APIC);
+  CHECK_CPU_FEATURE_USABLE (MOVBE);
+  CHECK_CPU_FEATURE_USABLE (POPCNT);
+  CHECK_CPU_FEATURE_USABLE (TSC_DEADLINE);
+  CHECK_CPU_FEATURE_USABLE (AES);
+  CHECK_CPU_FEATURE_USABLE (XSAVE);
+  CHECK_CPU_FEATURE_USABLE (OSXSAVE);
   CHECK_CPU_FEATURE_USABLE (AVX);
   CHECK_CPU_FEATURE_USABLE (F16C);
+  CHECK_CPU_FEATURE_USABLE (RDRAND);
+  CHECK_CPU_FEATURE_USABLE (FPU);
+  CHECK_CPU_FEATURE_USABLE (VME);
+  CHECK_CPU_FEATURE_USABLE (DE);
+  CHECK_CPU_FEATURE_USABLE (PSE);
+  CHECK_CPU_FEATURE_USABLE (TSC);
+  CHECK_CPU_FEATURE_USABLE (MSR);
+  CHECK_CPU_FEATURE_USABLE (PAE);
+  CHECK_CPU_FEATURE_USABLE (MCE);
+  CHECK_CPU_FEATURE_USABLE (CX8);
+  CHECK_CPU_FEATURE_USABLE (APIC);
+  CHECK_CPU_FEATURE_USABLE (SEP);
+  CHECK_CPU_FEATURE_USABLE (MTRR);
+  CHECK_CPU_FEATURE_USABLE (PGE);
+  CHECK_CPU_FEATURE_USABLE (MCA);
+  CHECK_CPU_FEATURE_USABLE (CMOV);
+  CHECK_CPU_FEATURE_USABLE (PAT);
+  CHECK_CPU_FEATURE_USABLE (PSE_36);
+  CHECK_CPU_FEATURE_USABLE (PSN);
+  CHECK_CPU_FEATURE_USABLE (CLFSH);
+  CHECK_CPU_FEATURE_USABLE (DS);
+  CHECK_CPU_FEATURE_USABLE (ACPI);
+  CHECK_CPU_FEATURE_USABLE (MMX);
+  CHECK_CPU_FEATURE_USABLE (FXSR);
+  CHECK_CPU_FEATURE_USABLE (SSE);
+  CHECK_CPU_FEATURE_USABLE (SSE2);
+  CHECK_CPU_FEATURE_USABLE (SS);
+  CHECK_CPU_FEATURE_USABLE (HTT);
+  CHECK_CPU_FEATURE_USABLE (TM);
+  CHECK_CPU_FEATURE_USABLE (PBE);
+  CHECK_CPU_FEATURE_USABLE (FSGSBASE);
+  CHECK_CPU_FEATURE_USABLE (TSC_ADJUST);
+  CHECK_CPU_FEATURE_USABLE (SGX);
+  CHECK_CPU_FEATURE_USABLE (BMI1);
+  CHECK_CPU_FEATURE_USABLE (HLE);
   CHECK_CPU_FEATURE_USABLE (AVX2);
+  CHECK_CPU_FEATURE_USABLE (SMEP);
+  CHECK_CPU_FEATURE_USABLE (BMI2);
+  CHECK_CPU_FEATURE_USABLE (ERMS);
+  CHECK_CPU_FEATURE_USABLE (INVPCID);
+  CHECK_CPU_FEATURE_USABLE (RTM);
+  CHECK_CPU_FEATURE_USABLE (PQM);
+  CHECK_CPU_FEATURE_USABLE (DEPR_FPU_CS_DS);
+  CHECK_CPU_FEATURE_USABLE (MPX);
+  CHECK_CPU_FEATURE_USABLE (PQE);
   CHECK_CPU_FEATURE_USABLE (AVX512F);
   CHECK_CPU_FEATURE_USABLE (AVX512DQ);
+  CHECK_CPU_FEATURE_USABLE (RDSEED);
+  CHECK_CPU_FEATURE_USABLE (ADX);
+  CHECK_CPU_FEATURE_USABLE (SMAP);
   CHECK_CPU_FEATURE_USABLE (AVX512_IFMA);
+  CHECK_CPU_FEATURE_USABLE (CLFLUSHOPT);
+  CHECK_CPU_FEATURE_USABLE (CLWB);
+  CHECK_CPU_FEATURE_USABLE (TRACE);
   CHECK_CPU_FEATURE_USABLE (AVX512PF);
   CHECK_CPU_FEATURE_USABLE (AVX512ER);
   CHECK_CPU_FEATURE_USABLE (AVX512CD);
+  CHECK_CPU_FEATURE_USABLE (SHA);
   CHECK_CPU_FEATURE_USABLE (AVX512BW);
   CHECK_CPU_FEATURE_USABLE (AVX512VL);
+  CHECK_CPU_FEATURE_USABLE (PREFETCHWT1);
   CHECK_CPU_FEATURE_USABLE (AVX512_VBMI);
+  CHECK_CPU_FEATURE_USABLE (UMIP);
   CHECK_CPU_FEATURE_USABLE (PKU);
+  CHECK_CPU_FEATURE_USABLE (OSPKE);
+  CHECK_CPU_FEATURE_USABLE (WAITPKG);
   CHECK_CPU_FEATURE_USABLE (AVX512_VBMI2);
+  CHECK_CPU_FEATURE_USABLE (SHSTK);
+  CHECK_CPU_FEATURE_USABLE (GFNI);
   CHECK_CPU_FEATURE_USABLE (VAES);
   CHECK_CPU_FEATURE_USABLE (VPCLMULQDQ);
   CHECK_CPU_FEATURE_USABLE (AVX512_VNNI);
   CHECK_CPU_FEATURE_USABLE (AVX512_BITALG);
   CHECK_CPU_FEATURE_USABLE (AVX512_VPOPCNTDQ);
+  CHECK_CPU_FEATURE_USABLE (RDPID);
+  CHECK_CPU_FEATURE_USABLE (CLDEMOTE);
+  CHECK_CPU_FEATURE_USABLE (MOVDIRI);
+  CHECK_CPU_FEATURE_USABLE (MOVDIR64B);
+  CHECK_CPU_FEATURE_USABLE (ENQCMD);
+  CHECK_CPU_FEATURE_USABLE (SGX_LC);
+  CHECK_CPU_FEATURE_USABLE (PKS);
   CHECK_CPU_FEATURE_USABLE (AVX512_4VNNIW);
   CHECK_CPU_FEATURE_USABLE (AVX512_4FMAPS);
+  CHECK_CPU_FEATURE_USABLE (FSRM);
   CHECK_CPU_FEATURE_USABLE (AVX512_VP2INTERSECT);
+  CHECK_CPU_FEATURE_USABLE (MD_CLEAR);
+  CHECK_CPU_FEATURE_USABLE (SERIALIZE);
+  CHECK_CPU_FEATURE_USABLE (HYBRID);
+  CHECK_CPU_FEATURE_USABLE (TSXLDTRK);
+  CHECK_CPU_FEATURE_USABLE (PCONFIG);
+  CHECK_CPU_FEATURE_USABLE (IBT);
   CHECK_CPU_FEATURE_USABLE (AMX_BF16);
   CHECK_CPU_FEATURE_USABLE (AMX_TILE);
   CHECK_CPU_FEATURE_USABLE (AMX_INT8);
+  CHECK_CPU_FEATURE_USABLE (IBRS_IBPB);
+  CHECK_CPU_FEATURE_USABLE (STIBP);
+  CHECK_CPU_FEATURE_USABLE (L1D_FLUSH);
+  CHECK_CPU_FEATURE_USABLE (ARCH_CAPABILITIES);
+  CHECK_CPU_FEATURE_USABLE (CORE_CAPABILITIES);
+  CHECK_CPU_FEATURE_USABLE (SSBD);
+  CHECK_CPU_FEATURE_USABLE (LAHF64_SAHF64);
+  CHECK_CPU_FEATURE_USABLE (SVM);
+  CHECK_CPU_FEATURE_USABLE (LZCNT);
+  CHECK_CPU_FEATURE_USABLE (SSE4A);
+  CHECK_CPU_FEATURE_USABLE (PREFETCHW);
   CHECK_CPU_FEATURE_USABLE (XOP);
+  CHECK_CPU_FEATURE_USABLE (LWP);
   CHECK_CPU_FEATURE_USABLE (FMA4);
+  CHECK_CPU_FEATURE_USABLE (TBM);
+  CHECK_CPU_FEATURE_USABLE (SYSCALL_SYSRET);
+  CHECK_CPU_FEATURE_USABLE (NX);
+  CHECK_CPU_FEATURE_USABLE (PAGE1GB);
+  CHECK_CPU_FEATURE_USABLE (RDTSCP);
+  CHECK_CPU_FEATURE_USABLE (LM);
+  CHECK_CPU_FEATURE_USABLE (XSAVEOPT);
   CHECK_CPU_FEATURE_USABLE (XSAVEC);
+  CHECK_CPU_FEATURE_USABLE (XGETBV_ECX_1);
+  CHECK_CPU_FEATURE_USABLE (XSAVES);
+  CHECK_CPU_FEATURE_USABLE (XFD);
+  CHECK_CPU_FEATURE_USABLE (INVARIANT_TSC);
+  CHECK_CPU_FEATURE_USABLE (WBNOINVD);
   CHECK_CPU_FEATURE_USABLE (AVX512_BF16);
 
   return 0;
index e3bb45d78811d70f42bfebb8002dbced5bd8437a..42b97c5cc73892ccb050d585c612d825c829a120 100644 (file)
@@ -57,7 +57,7 @@ modules-names += x86_64/tst-x86_64mod-1
 LDFLAGS-tst-x86_64mod-1.so = -Wl,-soname,tst-x86_64mod-1.so
 ifneq (no,$(have-tunables))
 # Test the state size for XSAVE when XSAVEC is disabled.
-tst-x86_64-1-ENV = GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC_Usable
+tst-x86_64-1-ENV = GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC
 endif
 
 $(objpfx)tst-x86_64-1: $(objpfx)x86_64/tst-x86_64mod-1.so
@@ -71,10 +71,10 @@ CFLAGS-tst-platformmod-2.c = -mno-avx
 LDFLAGS-tst-platformmod-2.so = -Wl,-soname,tst-platformmod-2.so
 $(objpfx)tst-platform-1: $(objpfx)tst-platformmod-1.so
 $(objpfx)tst-platform-1.out: $(objpfx)x86_64/tst-platformmod-2.so
-# Turn off AVX512F_Usable and AVX2_Usable so that GLRO(dl_platform) is
+# Turn off AVX512F and AVX2 so that GLRO(dl_platform) is
 # always set to x86_64.
 tst-platform-1-ENV = LD_PRELOAD=$(objpfx)\$$PLATFORM/tst-platformmod-2.so \
-       GLIBC_TUNABLES=glibc.cpu.hwcaps=-AVX512F_Usable,-AVX2_Usable
+       GLIBC_TUNABLES=glibc.cpu.hwcaps=-AVX512F,-AVX2
 endif
 
 tests += tst-audit3 tst-audit4 tst-audit5 tst-audit6 tst-audit7 \
index 8e9baffeb4b2d5101cd1effa89bdd14a48b42aac..ca73d8fef9c11aa814d0f89acbb064e770caec95 100644 (file)
@@ -99,9 +99,9 @@ elf_machine_runtime_setup (struct link_map *l, int lazy, int profile)
         end in this function.  */
       if (__glibc_unlikely (profile))
        {
-         if (HAS_ARCH_FEATURE (AVX512F_Usable))
+         if (CPU_FEATURE_USABLE (AVX512F))
            *(ElfW(Addr) *) (got + 2) = (ElfW(Addr)) &_dl_runtime_profile_avx512;
-         else if (HAS_ARCH_FEATURE (AVX_Usable))
+         else if (CPU_FEATURE_USABLE (AVX))
            *(ElfW(Addr) *) (got + 2) = (ElfW(Addr)) &_dl_runtime_profile_avx;
          else
            *(ElfW(Addr) *) (got + 2) = (ElfW(Addr)) &_dl_runtime_profile_sse;
@@ -119,7 +119,7 @@ elf_machine_runtime_setup (struct link_map *l, int lazy, int profile)
             the resolved address.  */
          if (GLRO(dl_x86_cpu_features).xsave_state_size != 0)
            *(ElfW(Addr) *) (got + 2)
-             = (HAS_ARCH_FEATURE (XSAVEC_Usable)
+             = (CPU_FEATURE_USABLE (XSAVEC)
                 ? (ElfW(Addr)) &_dl_runtime_resolve_xsavec
                 : (ElfW(Addr)) &_dl_runtime_resolve_xsave);
          else
index 435ddad9919729f1ee5064c07cba2fdd1eb08b2d..33ea763de203b7546139ff3276f35d29b7591d89 100644 (file)
@@ -24,7 +24,7 @@
 # define CHECK_ARCH_EXT                                        \
   do                                                           \
     {                                                          \
-      if (!HAS_ARCH_FEATURE (AVX_Usable)) return;              \
+      if (!CPU_FEATURE_USABLE (AVX)) return;                   \
     }                                                          \
   while (0)
 
@@ -34,7 +34,7 @@
 # define CHECK_ARCH_EXT                                        \
   do                                                           \
     {                                                          \
-      if (!HAS_ARCH_FEATURE (AVX2_Usable)) return;             \
+      if (!CPU_FEATURE_USABLE (AVX2)) return;                  \
     }                                                          \
   while (0)
 
@@ -44,7 +44,7 @@
 # define CHECK_ARCH_EXT                                        \
   do                                                           \
     {                                                          \
-      if (!HAS_ARCH_FEATURE (AVX512F_Usable)) return;          \
+      if (!CPU_FEATURE_USABLE (AVX512F)) return;               \
     }                                                          \
   while (0)
 
index 86835eebc114682d7ce1c2c97dc94cad4d46a8f3..95fe2f4d701a6406a84bf415268f75e0c248f49a 100644 (file)
@@ -29,14 +29,14 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
     return OPTIMIZE (fma);
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA4_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA4))
     return OPTIMIZE (fma4);
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
     return OPTIMIZE (avx);
 
   return OPTIMIZE (sse2);
index 2242d97de0a1e9c5e2ababebb065778bae0a68f1..0a25a44ab083093f5374f4c492ff073d5fdb8d91 100644 (file)
@@ -26,8 +26,8 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
     return OPTIMIZE (fma);
 
   return OPTIMIZE (sse2);
index 03adf86b9b223ebc3275c30e0870e905a4f7f8aa..7659758972206cd3e404f12b75ab779f1459032c 100644 (file)
@@ -28,11 +28,11 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
     return OPTIMIZE (fma);
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA4_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA))
     return OPTIMIZE (fma4);
 
   return OPTIMIZE (sse2);
index 9c5c6f147649ac8057731206c119b8e61f39f96e..2655e554445a2971949e70ce92bc15fc98449528 100644 (file)
@@ -31,8 +31,8 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
     return OPTIMIZE (avx2);
 
   return OPTIMIZE (sse_wrapper);
index 70e22c53bfff21cde59203a2d7ed0451ff54eeab..5f8326503bce222aa4f051e5ea33d85d43efecd7 100644 (file)
@@ -34,10 +34,10 @@ IFUNC_SELECTOR (void)
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, MathVec_Prefer_No_AVX512))
     {
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512DQ_Usable))
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512DQ))
        return OPTIMIZE (skx);
 
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable))
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F))
        return OPTIMIZE (knl);
     }
 
index 63005c0af4d027912cc6006c86e69bf6025c4664..7240e554c96d58a41deed7a06ccf4e3c6a864f0b 100644 (file)
@@ -31,7 +31,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
     return OPTIMIZE (sse4);
 
   return OPTIMIZE (sse2);
index 7f26215da69eb0f9f88745ade4025d5eac563c49..e5d8a6f93258a744cb2cb46be82cd6a6e10d1cad 100644 (file)
@@ -26,7 +26,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
     return OPTIMIZE (sse41);
 
   return OPTIMIZE (c);
index 9992a1e97a5ca84c4d8c5f57b770d62cec54119d..0d8c0b79116606c6bb02ed207aac4e8b66045a00 100644 (file)
@@ -41,8 +41,8 @@ __fma_fma4 (double x, double y, double z)
 }
 
 
-libm_ifunc (__fma, HAS_ARCH_FEATURE (FMA_Usable)
-           ? __fma_fma3 : (HAS_ARCH_FEATURE (FMA4_Usable)
+libm_ifunc (__fma, CPU_FEATURE_USABLE (FMA)
+           ? __fma_fma3 : (CPU_FEATURE_USABLE (FMA4)
                            ? __fma_fma4 : __fma_sse2));
 libm_alias_double (__fma, fma)
 
index 4cbcf1f61be59b695cad71410916ab0c68c1c9a5..c01e5a21d4b556b751bfb10ab5ed6f1251063d66 100644 (file)
@@ -40,8 +40,8 @@ __fmaf_fma4 (float x, float y, float z)
 }
 
 
-libm_ifunc (__fmaf, HAS_ARCH_FEATURE (FMA_Usable)
-           ? __fmaf_fma3 : (HAS_ARCH_FEATURE (FMA4_Usable)
+libm_ifunc (__fmaf, CPU_FEATURE_USABLE (FMA)
+           ? __fmaf_fma3 : (CPU_FEATURE_USABLE (FMA4)
                             ? __fmaf_fma4 : __fmaf_sse2));
 libm_alias_float (__fma, fma)
 
index 69f30398ae87555c5bff0a8965371c85870b6a5d..f4e311d470315e91f0010160ce9f2e3331690f30 100644 (file)
@@ -28,7 +28,7 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2);
 
index ce7eb1eecf67e26e526822d227ec8d8e2c5b55e9..f93ec39d981e6ea7674ac1c290295170d9eddeb8 100644 (file)
@@ -41,19 +41,19 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/memchr.c.  */
   IFUNC_IMPL (i, name, memchr,
              IFUNC_IMPL_ADD (array, i, memchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __memchr_avx2)
              IFUNC_IMPL_ADD (array, i, memchr, 1, __memchr_sse2))
 
   /* Support sysdeps/x86_64/multiarch/memcmp.c.  */
   IFUNC_IMPL (i, name, memcmp,
              IFUNC_IMPL_ADD (array, i, memcmp,
-                             (HAS_ARCH_FEATURE (AVX2_Usable)
-                              && HAS_CPU_FEATURE (MOVBE)),
+                             (CPU_FEATURE_USABLE (AVX2)
+                              && CPU_FEATURE_USABLE (MOVBE)),
                              __memcmp_avx2_movbe)
-             IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSE4_1),
+             IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSE4_1),
                              __memcmp_sse4_1)
-             IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSSE3),
                              __memcmp_ssse3)
              IFUNC_IMPL_ADD (array, i, memcmp, 1, __memcmp_sse2))
 
@@ -61,25 +61,25 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/memmove_chk.c.  */
   IFUNC_IMPL (i, name, __memmove_chk,
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memmove_chk_avx512_no_vzeroupper)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memmove_chk_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memmove_chk_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memmove_chk_avx_unaligned)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memmove_chk_avx_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memmove_chk_ssse3_back)
              IFUNC_IMPL_ADD (array, i, __memmove_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memmove_chk_ssse3)
              IFUNC_IMPL_ADD (array, i, __memmove_chk, 1,
                              __memmove_chk_sse2_unaligned)
@@ -92,23 +92,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/memmove.c.  */
   IFUNC_IMPL (i, name, memmove,
              IFUNC_IMPL_ADD (array, i, memmove,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memmove_avx_unaligned)
              IFUNC_IMPL_ADD (array, i, memmove,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memmove_avx_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, memmove,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memmove_avx512_no_vzeroupper)
              IFUNC_IMPL_ADD (array, i, memmove,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memmove_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, memmove,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memmove_avx512_unaligned_erms)
-             IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
                              __memmove_ssse3_back)
-             IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
                              __memmove_ssse3)
              IFUNC_IMPL_ADD (array, i, memmove, 1, __memmove_erms)
              IFUNC_IMPL_ADD (array, i, memmove, 1,
@@ -119,7 +119,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/memrchr.c.  */
   IFUNC_IMPL (i, name, memrchr,
              IFUNC_IMPL_ADD (array, i, memrchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __memrchr_avx2)
              IFUNC_IMPL_ADD (array, i, memrchr, 1, __memrchr_sse2))
 
@@ -133,19 +133,19 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
              IFUNC_IMPL_ADD (array, i, __memset_chk, 1,
                              __memset_chk_sse2_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __memset_chk_avx2_unaligned)
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __memset_chk_avx2_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memset_chk_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memset_chk_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, __memset_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memset_chk_avx512_no_vzeroupper)
              )
 #endif
@@ -158,48 +158,48 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
                              __memset_sse2_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, memset, 1, __memset_erms)
              IFUNC_IMPL_ADD (array, i, memset,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __memset_avx2_unaligned)
              IFUNC_IMPL_ADD (array, i, memset,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __memset_avx2_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, memset,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memset_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, memset,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memset_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, memset,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memset_avx512_no_vzeroupper)
             )
 
   /* Support sysdeps/x86_64/multiarch/rawmemchr.c.  */
   IFUNC_IMPL (i, name, rawmemchr,
              IFUNC_IMPL_ADD (array, i, rawmemchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __rawmemchr_avx2)
              IFUNC_IMPL_ADD (array, i, rawmemchr, 1, __rawmemchr_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strlen.c.  */
   IFUNC_IMPL (i, name, strlen,
              IFUNC_IMPL_ADD (array, i, strlen,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strlen_avx2)
              IFUNC_IMPL_ADD (array, i, strlen, 1, __strlen_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strnlen.c.  */
   IFUNC_IMPL (i, name, strnlen,
              IFUNC_IMPL_ADD (array, i, strnlen,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strnlen_avx2)
              IFUNC_IMPL_ADD (array, i, strnlen, 1, __strnlen_sse2))
 
   /* Support sysdeps/x86_64/multiarch/stpncpy.c.  */
   IFUNC_IMPL (i, name, stpncpy,
-             IFUNC_IMPL_ADD (array, i, stpncpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (SSSE3),
                              __stpncpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, stpncpy, HAS_ARCH_FEATURE (AVX2_Usable),
+             IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (AVX2),
                              __stpncpy_avx2)
              IFUNC_IMPL_ADD (array, i, stpncpy, 1,
                              __stpncpy_sse2_unaligned)
@@ -207,9 +207,9 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
 
   /* Support sysdeps/x86_64/multiarch/stpcpy.c.  */
   IFUNC_IMPL (i, name, stpcpy,
-             IFUNC_IMPL_ADD (array, i, stpcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (SSSE3),
                              __stpcpy_ssse3)
-             IFUNC_IMPL_ADD (array, i, stpcpy, HAS_ARCH_FEATURE (AVX2_Usable),
+             IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (AVX2),
                              __stpcpy_avx2)
              IFUNC_IMPL_ADD (array, i, stpcpy, 1, __stpcpy_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, stpcpy, 1, __stpcpy_sse2))
@@ -217,35 +217,35 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c.  */
   IFUNC_IMPL (i, name, strcasecmp,
              IFUNC_IMPL_ADD (array, i, strcasecmp,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __strcasecmp_avx)
              IFUNC_IMPL_ADD (array, i, strcasecmp,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strcasecmp_sse42)
              IFUNC_IMPL_ADD (array, i, strcasecmp,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strcasecmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strcasecmp, 1, __strcasecmp_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c.  */
   IFUNC_IMPL (i, name, strcasecmp_l,
              IFUNC_IMPL_ADD (array, i, strcasecmp_l,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __strcasecmp_l_avx)
              IFUNC_IMPL_ADD (array, i, strcasecmp_l,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strcasecmp_l_sse42)
              IFUNC_IMPL_ADD (array, i, strcasecmp_l,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strcasecmp_l_ssse3)
              IFUNC_IMPL_ADD (array, i, strcasecmp_l, 1,
                              __strcasecmp_l_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strcat.c.  */
   IFUNC_IMPL (i, name, strcat,
-             IFUNC_IMPL_ADD (array, i, strcat, HAS_ARCH_FEATURE (AVX2_Usable),
+             IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (AVX2),
                              __strcat_avx2)
-             IFUNC_IMPL_ADD (array, i, strcat, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (SSSE3),
                              __strcat_ssse3)
              IFUNC_IMPL_ADD (array, i, strcat, 1, __strcat_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, strcat, 1, __strcat_sse2))
@@ -253,7 +253,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/strchr.c.  */
   IFUNC_IMPL (i, name, strchr,
              IFUNC_IMPL_ADD (array, i, strchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strchr_avx2)
              IFUNC_IMPL_ADD (array, i, strchr, 1, __strchr_sse2_no_bsf)
              IFUNC_IMPL_ADD (array, i, strchr, 1, __strchr_sse2))
@@ -261,54 +261,54 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/strchrnul.c.  */
   IFUNC_IMPL (i, name, strchrnul,
              IFUNC_IMPL_ADD (array, i, strchrnul,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strchrnul_avx2)
              IFUNC_IMPL_ADD (array, i, strchrnul, 1, __strchrnul_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strrchr.c.  */
   IFUNC_IMPL (i, name, strrchr,
              IFUNC_IMPL_ADD (array, i, strrchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strrchr_avx2)
              IFUNC_IMPL_ADD (array, i, strrchr, 1, __strrchr_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strcmp.c.  */
   IFUNC_IMPL (i, name, strcmp,
              IFUNC_IMPL_ADD (array, i, strcmp,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strcmp_avx2)
-             IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSE4_2),
                              __strcmp_sse42)
-             IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSSE3),
                              __strcmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strcmp, 1, __strcmp_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, strcmp, 1, __strcmp_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strcpy.c.  */
   IFUNC_IMPL (i, name, strcpy,
-             IFUNC_IMPL_ADD (array, i, strcpy, HAS_ARCH_FEATURE (AVX2_Usable),
+             IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (AVX2),
                              __strcpy_avx2)
-             IFUNC_IMPL_ADD (array, i, strcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (SSSE3),
                              __strcpy_ssse3)
              IFUNC_IMPL_ADD (array, i, strcpy, 1, __strcpy_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, strcpy, 1, __strcpy_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strcspn.c.  */
   IFUNC_IMPL (i, name, strcspn,
-             IFUNC_IMPL_ADD (array, i, strcspn, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strcspn, CPU_FEATURE_USABLE (SSE4_2),
                              __strcspn_sse42)
              IFUNC_IMPL_ADD (array, i, strcspn, 1, __strcspn_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strncase_l.c.  */
   IFUNC_IMPL (i, name, strncasecmp,
              IFUNC_IMPL_ADD (array, i, strncasecmp,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __strncasecmp_avx)
              IFUNC_IMPL_ADD (array, i, strncasecmp,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strncasecmp_sse42)
              IFUNC_IMPL_ADD (array, i, strncasecmp,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strncasecmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strncasecmp, 1,
                              __strncasecmp_sse2))
@@ -316,22 +316,22 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/strncase_l.c.  */
   IFUNC_IMPL (i, name, strncasecmp_l,
              IFUNC_IMPL_ADD (array, i, strncasecmp_l,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __strncasecmp_l_avx)
              IFUNC_IMPL_ADD (array, i, strncasecmp_l,
-                             HAS_CPU_FEATURE (SSE4_2),
+                             CPU_FEATURE_USABLE (SSE4_2),
                              __strncasecmp_l_sse42)
              IFUNC_IMPL_ADD (array, i, strncasecmp_l,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __strncasecmp_l_ssse3)
              IFUNC_IMPL_ADD (array, i, strncasecmp_l, 1,
                              __strncasecmp_l_sse2))
 
   /* Support sysdeps/x86_64/multiarch/strncat.c.  */
   IFUNC_IMPL (i, name, strncat,
-             IFUNC_IMPL_ADD (array, i, strncat, HAS_ARCH_FEATURE (AVX2_Usable),
+             IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (AVX2),
                              __strncat_avx2)
-             IFUNC_IMPL_ADD (array, i, strncat, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (SSSE3),
                              __strncat_ssse3)
              IFUNC_IMPL_ADD (array, i, strncat, 1,
                              __strncat_sse2_unaligned)
@@ -339,9 +339,9 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
 
   /* Support sysdeps/x86_64/multiarch/strncpy.c.  */
   IFUNC_IMPL (i, name, strncpy,
-             IFUNC_IMPL_ADD (array, i, strncpy, HAS_ARCH_FEATURE (AVX2_Usable),
+             IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (AVX2),
                              __strncpy_avx2)
-             IFUNC_IMPL_ADD (array, i, strncpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (SSSE3),
                              __strncpy_ssse3)
              IFUNC_IMPL_ADD (array, i, strncpy, 1,
                              __strncpy_sse2_unaligned)
@@ -349,14 +349,14 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
 
   /* Support sysdeps/x86_64/multiarch/strpbrk.c.  */
   IFUNC_IMPL (i, name, strpbrk,
-             IFUNC_IMPL_ADD (array, i, strpbrk, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strpbrk, CPU_FEATURE_USABLE (SSE4_2),
                              __strpbrk_sse42)
              IFUNC_IMPL_ADD (array, i, strpbrk, 1, __strpbrk_sse2))
 
 
   /* Support sysdeps/x86_64/multiarch/strspn.c.  */
   IFUNC_IMPL (i, name, strspn,
-             IFUNC_IMPL_ADD (array, i, strspn, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strspn, CPU_FEATURE_USABLE (SSE4_2),
                              __strspn_sse42)
              IFUNC_IMPL_ADD (array, i, strspn, 1, __strspn_sse2))
 
@@ -368,70 +368,70 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/wcschr.c.  */
   IFUNC_IMPL (i, name, wcschr,
              IFUNC_IMPL_ADD (array, i, wcschr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wcschr_avx2)
              IFUNC_IMPL_ADD (array, i, wcschr, 1, __wcschr_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wcsrchr.c.  */
   IFUNC_IMPL (i, name, wcsrchr,
              IFUNC_IMPL_ADD (array, i, wcsrchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wcsrchr_avx2)
              IFUNC_IMPL_ADD (array, i, wcsrchr, 1, __wcsrchr_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wcscmp.c.  */
   IFUNC_IMPL (i, name, wcscmp,
              IFUNC_IMPL_ADD (array, i, wcscmp,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wcscmp_avx2)
              IFUNC_IMPL_ADD (array, i, wcscmp, 1, __wcscmp_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wcsncmp.c.  */
   IFUNC_IMPL (i, name, wcsncmp,
              IFUNC_IMPL_ADD (array, i, wcsncmp,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wcsncmp_avx2)
              IFUNC_IMPL_ADD (array, i, wcsncmp, 1, __wcsncmp_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wcscpy.c.  */
   IFUNC_IMPL (i, name, wcscpy,
-             IFUNC_IMPL_ADD (array, i, wcscpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, wcscpy, CPU_FEATURE_USABLE (SSSE3),
                              __wcscpy_ssse3)
              IFUNC_IMPL_ADD (array, i, wcscpy, 1, __wcscpy_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wcslen.c.  */
   IFUNC_IMPL (i, name, wcslen,
              IFUNC_IMPL_ADD (array, i, wcslen,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wcslen_avx2)
              IFUNC_IMPL_ADD (array, i, wcslen, 1, __wcslen_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wcsnlen.c.  */
   IFUNC_IMPL (i, name, wcsnlen,
              IFUNC_IMPL_ADD (array, i, wcsnlen,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wcsnlen_avx2)
              IFUNC_IMPL_ADD (array, i, wcsnlen,
-                             HAS_CPU_FEATURE (SSE4_1),
+                             CPU_FEATURE_USABLE (SSE4_1),
                              __wcsnlen_sse4_1)
              IFUNC_IMPL_ADD (array, i, wcsnlen, 1, __wcsnlen_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wmemchr.c.  */
   IFUNC_IMPL (i, name, wmemchr,
              IFUNC_IMPL_ADD (array, i, wmemchr,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wmemchr_avx2)
              IFUNC_IMPL_ADD (array, i, wmemchr, 1, __wmemchr_sse2))
 
   /* Support sysdeps/x86_64/multiarch/wmemcmp.c.  */
   IFUNC_IMPL (i, name, wmemcmp,
              IFUNC_IMPL_ADD (array, i, wmemcmp,
-                             (HAS_ARCH_FEATURE (AVX2_Usable)
-                              && HAS_CPU_FEATURE (MOVBE)),
+                             (CPU_FEATURE_USABLE (AVX2)
+                              && CPU_FEATURE_USABLE (MOVBE)),
                              __wmemcmp_avx2_movbe)
-             IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSE4_1),
+             IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSE4_1),
                              __wmemcmp_sse4_1)
-             IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSSE3),
                              __wmemcmp_ssse3)
              IFUNC_IMPL_ADD (array, i, wmemcmp, 1, __wmemcmp_sse2))
 
@@ -440,35 +440,35 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
              IFUNC_IMPL_ADD (array, i, wmemset, 1,
                              __wmemset_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, wmemset,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wmemset_avx2_unaligned)
              IFUNC_IMPL_ADD (array, i, wmemset,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __wmemset_avx512_unaligned))
 
 #ifdef SHARED
   /* Support sysdeps/x86_64/multiarch/memcpy_chk.c.  */
   IFUNC_IMPL (i, name, __memcpy_chk,
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memcpy_chk_avx512_no_vzeroupper)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memcpy_chk_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memcpy_chk_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memcpy_chk_avx_unaligned)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memcpy_chk_avx_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_chk_ssse3_back)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_chk_ssse3)
              IFUNC_IMPL_ADD (array, i, __memcpy_chk, 1,
                              __memcpy_chk_sse2_unaligned)
@@ -481,23 +481,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/memcpy.c.  */
   IFUNC_IMPL (i, name, memcpy,
              IFUNC_IMPL_ADD (array, i, memcpy,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memcpy_avx_unaligned)
              IFUNC_IMPL_ADD (array, i, memcpy,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __memcpy_avx_unaligned_erms)
-             IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_ssse3_back)
-             IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
                              __memcpy_ssse3)
              IFUNC_IMPL_ADD (array, i, memcpy,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memcpy_avx512_no_vzeroupper)
              IFUNC_IMPL_ADD (array, i, memcpy,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memcpy_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, memcpy,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __memcpy_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, memcpy, 1, __memcpy_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, memcpy, 1,
@@ -508,25 +508,25 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/mempcpy_chk.c.  */
   IFUNC_IMPL (i, name, __mempcpy_chk,
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __mempcpy_chk_avx512_no_vzeroupper)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __mempcpy_chk_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __mempcpy_chk_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __mempcpy_chk_avx_unaligned)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __mempcpy_chk_avx_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_chk_ssse3_back)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
-                             HAS_CPU_FEATURE (SSSE3),
+                             CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_chk_ssse3)
              IFUNC_IMPL_ADD (array, i, __mempcpy_chk, 1,
                              __mempcpy_chk_sse2_unaligned)
@@ -539,23 +539,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/mempcpy.c.  */
   IFUNC_IMPL (i, name, mempcpy,
              IFUNC_IMPL_ADD (array, i, mempcpy,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __mempcpy_avx512_no_vzeroupper)
              IFUNC_IMPL_ADD (array, i, mempcpy,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __mempcpy_avx512_unaligned)
              IFUNC_IMPL_ADD (array, i, mempcpy,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __mempcpy_avx512_unaligned_erms)
              IFUNC_IMPL_ADD (array, i, mempcpy,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __mempcpy_avx_unaligned)
              IFUNC_IMPL_ADD (array, i, mempcpy,
-                             HAS_ARCH_FEATURE (AVX_Usable),
+                             CPU_FEATURE_USABLE (AVX),
                              __mempcpy_avx_unaligned_erms)
-             IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_ssse3_back)
-             IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
                              __mempcpy_ssse3)
              IFUNC_IMPL_ADD (array, i, mempcpy, 1,
                              __mempcpy_sse2_unaligned)
@@ -566,11 +566,11 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
   /* Support sysdeps/x86_64/multiarch/strncmp.c.  */
   IFUNC_IMPL (i, name, strncmp,
              IFUNC_IMPL_ADD (array, i, strncmp,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __strncmp_avx2)
-             IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSE4_2),
+             IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSE4_2),
                              __strncmp_sse42)
-             IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSSE3),
+             IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSSE3),
                              __strncmp_ssse3)
              IFUNC_IMPL_ADD (array, i, strncmp, 1, __strncmp_sse2))
 
@@ -580,10 +580,10 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
              IFUNC_IMPL_ADD (array, i, __wmemset_chk, 1,
                              __wmemset_chk_sse2_unaligned)
              IFUNC_IMPL_ADD (array, i, __wmemset_chk,
-                             HAS_ARCH_FEATURE (AVX2_Usable),
+                             CPU_FEATURE_USABLE (AVX2),
                              __wmemset_chk_avx2_unaligned)
              IFUNC_IMPL_ADD (array, i, __wmemset_chk,
-                             HAS_ARCH_FEATURE (AVX512F_Usable),
+                             CPU_FEATURE_USABLE (AVX512F),
                              __wmemset_chk_avx512_unaligned))
 #endif
 
index c14db39cf49d48cc104f1352cf49f25fc55fe422..0e21b3a6288ed2633e6ed5b3983be7957bc205c5 100644 (file)
@@ -30,15 +30,15 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
-      && CPU_FEATURES_CPU_P (cpu_features, MOVBE)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
+      && CPU_FEATURE_USABLE_P (cpu_features, MOVBE)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2_movbe);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
     return OPTIMIZE (sse4_1);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (sse2);
index 81673d2019df70e6d05e13b623bb85c355ba7da7..9ada03aa43f94f54670d00cfb1eba58b2b755c44 100644 (file)
@@ -45,13 +45,13 @@ IFUNC_SELECTOR (void)
       || CPU_FEATURES_ARCH_P (cpu_features, Prefer_FSRM))
     return OPTIMIZE (erms);
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
       && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
     {
       if (CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER))
        return OPTIMIZE (avx512_no_vzeroupper);
 
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
        return OPTIMIZE (avx512_unaligned_erms);
 
       return OPTIMIZE (avx512_unaligned);
@@ -59,16 +59,16 @@ IFUNC_SELECTOR (void)
 
   if (CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     {
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
        return OPTIMIZE (avx_unaligned_erms);
 
       return OPTIMIZE (avx_unaligned);
     }
 
-  if (!CPU_FEATURES_CPU_P (cpu_features, SSSE3)
+  if (!CPU_FEATURE_USABLE_P (cpu_features, SSSE3)
       || CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Copy))
     {
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
        return OPTIMIZE (sse2_unaligned_erms);
 
       return OPTIMIZE (sse2_unaligned);
index d690293385db33eeb3b05b5502c972f67447fdab..f52613d3720cc5f6bf005b3202f8eee67051889e 100644 (file)
@@ -42,27 +42,27 @@ IFUNC_SELECTOR (void)
   if (CPU_FEATURES_ARCH_P (cpu_features, Prefer_ERMS))
     return OPTIMIZE (erms);
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
       && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
     {
       if (CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER))
        return OPTIMIZE (avx512_no_vzeroupper);
 
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
        return OPTIMIZE (avx512_unaligned_erms);
 
       return OPTIMIZE (avx512_unaligned);
     }
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX2))
     {
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
        return OPTIMIZE (avx2_unaligned_erms);
       else
        return OPTIMIZE (avx2_unaligned);
     }
 
-  if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
+  if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
     return OPTIMIZE (sse2_unaligned_erms);
 
   return OPTIMIZE (sse2_unaligned);
index 082179c89a8e2c1bd44e4c8441345487778a6e22..cbf18385d337f72255ba5145e997e7e86e484d07 100644 (file)
@@ -27,7 +27,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2))
     return OPTIMIZE (sse42);
 
   return OPTIMIZE (sse2);
index f349ee70fd13358d16ae4b162afa412d3f4706af..08183339318e1dd740317291df53de217db322e8 100644 (file)
@@ -29,14 +29,14 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX_Usable))
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
     return OPTIMIZE (avx);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2)
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2)
       && !CPU_FEATURES_ARCH_P (cpu_features, Slow_SSE4_2))
     return OPTIMIZE (sse42);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (sse2);
index ae4f451803d1a0f257e3be13a7187cc63345d88b..63b0dc0d968c1dfef551aaff720cb83d263a9471 100644 (file)
@@ -32,14 +32,14 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2);
 
   if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Load))
     return OPTIMIZE (sse2_unaligned);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (sse2);
index 583f6310a1c716578b14e603c42de41b367eea1e..8cfce562fc299b945841486e61f98a2af768efee 100644 (file)
@@ -28,10 +28,10 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     {
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
          && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
        return OPTIMIZE (avx512_unaligned);
       else
index 686fe0779cc77bdcc69e8bb806b006ef034f2b04..074c663cf6e372efcf18bd512ce5828805e8d530 100644 (file)
@@ -33,4 +33,4 @@
 #undef __sched_cpucount
 
 libc_ifunc (__sched_cpucount,
-           HAS_CPU_FEATURE (POPCNT) ? popcount_cpucount : generic_cpucount);
+           CPU_FEATURE_USABLE (POPCNT) ? popcount_cpucount : generic_cpucount);
index f27980dd36d076cc06db8f9bd612ce883d288769..8df4609bf8cf83b91aa0d03dee3e1f02f38e266e 100644 (file)
@@ -36,7 +36,7 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2);
 
index 4db7332ac1753baed2867b23f34505a9578bd2f7..16ae72a4c809fccd53018f2295ee27ef91df7dcd 100644 (file)
@@ -37,14 +37,14 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2);
 
   if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Load))
     return OPTIMIZE (sse2_unaligned);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (sse2);
index 6b63b0ac291cc2606e8be9d2f662a5881401c935..3c94b3ffd92828a1d06dcbabd81110f15c555461 100644 (file)
@@ -37,15 +37,15 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2)
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2)
       && !CPU_FEATURES_ARCH_P (cpu_features, Slow_SSE4_2))
     return OPTIMIZE (sse42);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (sse2);
index 317373ceda5836ea92b1dbbe52c1d42be10438dc..2782803e73ab11a93e33b35bdab7b31ed2758b51 100644 (file)
@@ -75,18 +75,18 @@ do_test (int argc, char **argv)
   int fails;
 
   get_cpuinfo ();
-  fails = check_proc ("avx", HAS_ARCH_FEATURE (AVX_Usable),
-                     "HAS_ARCH_FEATURE (AVX_Usable)");
-  fails += check_proc ("fma4", HAS_ARCH_FEATURE (FMA4_Usable),
-                      "HAS_ARCH_FEATURE (FMA4_Usable)");
-  fails += check_proc ("sse4_2", HAS_CPU_FEATURE (SSE4_2),
-                      "HAS_CPU_FEATURE (SSE4_2)");
-  fails += check_proc ("sse4_1", HAS_CPU_FEATURE (SSE4_1)
-                      , "HAS_CPU_FEATURE (SSE4_1)");
-  fails += check_proc ("ssse3", HAS_CPU_FEATURE (SSSE3),
-                      "HAS_CPU_FEATURE (SSSE3)");
-  fails += check_proc ("popcnt", HAS_CPU_FEATURE (POPCNT),
-                      "HAS_CPU_FEATURE (POPCNT)");
+  fails = check_proc ("avx", CPU_FEATURE_USABLE (AVX),
+                     "CPU_FEATURE_USABLE (AVX)");
+  fails += check_proc ("fma4", CPU_FEATURE_USABLE (FMA4),
+                      "CPU_FEATURE_USABLE (FMA4)");
+  fails += check_proc ("sse4_2", CPU_FEATURE_USABLE (SSE4_2),
+                      "CPU_FEATURE_USABLE (SSE4_2)");
+  fails += check_proc ("sse4_1", CPU_FEATURE_USABLE (SSE4_1)
+                      , "CPU_FEATURE_USABLE (SSE4_1)");
+  fails += check_proc ("ssse3", CPU_FEATURE_USABLE (SSSE3),
+                      "CPU_FEATURE_USABLE (SSSE3)");
+  fails += check_proc ("popcnt", CPU_FEATURE_USABLE (POPCNT),
+                      "CPU_FEATURE_USABLE (POPCNT)");
 
   printf ("%d differences between /proc/cpuinfo and glibc code.\n", fails);
 
index 0dd2a9a34b409e7ef31c661a3216d5ae68f74bd0..e08536c5934d6c362009bf2b1b6437584e32d7b0 100644 (file)
@@ -34,7 +34,7 @@ IFUNC_SELECTOR (void)
 {
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
     return OPTIMIZE (ssse3);
 
   return OPTIMIZE (sse2);
index 8c1fc1a57428cf9afba1dd4b39c463808227427a..52e7e5d4f3002e7a5dbc6a9b0db86df3fcab1197 100644 (file)
@@ -36,11 +36,11 @@ IFUNC_SELECTOR (void)
   const struct cpu_features* cpu_features = __get_cpu_features ();
 
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
     return OPTIMIZE (avx2);
 
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
     return OPTIMIZE (sse4_1);
 
   return OPTIMIZE (sse2);