const struct cpu_features* cpu_features = __get_cpu_features ();
if (CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
+ && CPU_FEATURES_CPU_P (cpu_features, BMI2)
&& CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
{
if (CPU_FEATURES_ARCH_P (cpu_features, AVX512VL_Usable)
- && CPU_FEATURES_ARCH_P (cpu_features, AVX512BW_Usable)
- && CPU_FEATURES_CPU_P (cpu_features, BMI2))
+ && CPU_FEATURES_ARCH_P (cpu_features, AVX512BW_Usable))
return OPTIMIZE (evex);
if (CPU_FEATURES_CPU_P (cpu_features, RTM))
/* Support sysdeps/x86_64/multiarch/strchr.c. */
IFUNC_IMPL (i, name, strchr,
IFUNC_IMPL_ADD (array, i, strchr,
- HAS_ARCH_FEATURE (AVX2_Usable),
+ (HAS_ARCH_FEATURE (AVX2_Usable)
+ && HAS_CPU_FEATURE (BMI2)),
__strchr_avx2)
IFUNC_IMPL_ADD (array, i, strchr,
(HAS_ARCH_FEATURE (AVX2_Usable)
+ && HAS_CPU_FEATURE (BMI2)
&& HAS_CPU_FEATURE (RTM)),
__strchr_avx2_rtm)
IFUNC_IMPL_ADD (array, i, strchr,
/* Support sysdeps/x86_64/multiarch/strchrnul.c. */
IFUNC_IMPL (i, name, strchrnul,
IFUNC_IMPL_ADD (array, i, strchrnul,
- HAS_ARCH_FEATURE (AVX2_Usable),
+ (HAS_ARCH_FEATURE (AVX2_Usable)
+ && HAS_CPU_FEATURE (BMI2)),
__strchrnul_avx2)
IFUNC_IMPL_ADD (array, i, strchrnul,
(HAS_ARCH_FEATURE (AVX2_Usable)
+ && HAS_CPU_FEATURE (BMI2)
&& HAS_CPU_FEATURE (RTM)),
__strchrnul_avx2_rtm)
IFUNC_IMPL_ADD (array, i, strchrnul,
/* Support sysdeps/x86_64/multiarch/wcschr.c. */
IFUNC_IMPL (i, name, wcschr,
IFUNC_IMPL_ADD (array, i, wcschr,
- HAS_ARCH_FEATURE (AVX2_Usable),
+ (HAS_ARCH_FEATURE (AVX2_Usable)
+ && HAS_CPU_FEATURE (BMI2)),
__wcschr_avx2)
IFUNC_IMPL_ADD (array, i, wcschr,
(HAS_ARCH_FEATURE (AVX2_Usable)
+ && HAS_CPU_FEATURE (BMI2)
&& HAS_CPU_FEATURE (RTM)),
__wcschr_avx2_rtm)
IFUNC_IMPL_ADD (array, i, wcschr,