cpu_features->feature[index_arch_Prefer_No_VZEROUPPER]
|= bit_arch_Prefer_No_VZEROUPPER;
else
- cpu_features->feature[index_arch_Prefer_No_AVX512]
- |= bit_arch_Prefer_No_AVX512;
+ {
+ cpu_features->feature[index_arch_Prefer_No_AVX512]
+ |= bit_arch_Prefer_No_AVX512;
+
+ /* Avoid RTM abort triggered by VZEROUPPER inside a
+ transactionally executing RTM region. */
+ if (CPU_FEATURES_CPU_P (cpu_features, RTM))
+ cpu_features->feature[index_arch_Prefer_No_VZEROUPPER]
+ |= bit_arch_Prefer_No_VZEROUPPER;
+
+ /* Since to compare 2 32-byte strings, 256-bit EVEX strcmp
+ requires 2 loads, 3 VPCMPs and 2 KORDs while AVX2 strcmp
+ requires 1 load, 2 VPCMPEQs, 1 VPMINU and 1 VPMOVMSKB,
+ AVX2 strcmp is faster than EVEX strcmp. */
+ if (CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
+ cpu_features->feature[index_arch_Prefer_AVX2_STRCMP]
+ |= bit_arch_Prefer_AVX2_STRCMP;
+ }
}
/* This spells out "AuthenticAMD" or "HygonGenuine". */
else if ((ebx == 0x68747541 && ecx == 0x444d4163 && edx == 0x69746e65)
#define bit_arch_Prefer_FSRM (1u << 13)
#define bit_arch_Prefer_No_AVX512 (1u << 14)
#define bit_arch_MathVec_Prefer_No_AVX512 (1u << 15)
+#define bit_arch_Prefer_AVX2_STRCMP (1u << 16)
#define index_arch_Fast_Rep_String FEATURE_INDEX_2
#define index_arch_Fast_Copy_Backward FEATURE_INDEX_2
#define index_arch_Prefer_No_AVX512 FEATURE_INDEX_2
#define index_arch_MathVec_Prefer_No_AVX512 FEATURE_INDEX_2
#define index_arch_Prefer_FSRM FEATURE_INDEX_2
+#define index_arch_Prefer_AVX2_STRCMP FEATURE_INDEX_2
/* XCR0 Feature flags. */
#define bit_XMM_state (1u << 1)
CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
Fast_Copy_Backward, disable,
18);
+ CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
+ (n, cpu_features, Prefer_AVX2_STRCMP, AVX2_Usable,
+ disable, 18);
}
break;
case 19: