unsigned int stepping = 0;
enum cpu_features_kind kind;
+ /* Default is avoid non-temporal memset for non Intel/AMD hardware. This is,
+ as of writing this, we only have benchmarks indicatings it profitability
+ on Intel/AMD. */
+ cpu_features->preferred[index_arch_Avoid_Non_Temporal_Memset]
+ |= bit_arch_Avoid_Non_Temporal_Memset;
+
cpu_features->cachesize_non_temporal_divisor = 4;
#if !HAS_CPUID
if (__get_cpuid_max (0, 0) == 0)
update_active (cpu_features);
+ /* Benchmarks indicate non-temporal memset can be profitable on Intel
+ hardware. */
+ cpu_features->preferred[index_arch_Avoid_Non_Temporal_Memset]
+ &= ~bit_arch_Avoid_Non_Temporal_Memset;
+
if (family == 0x06)
{
model += extended_model;
ecx = cpu_features->features[CPUID_INDEX_1].cpuid.ecx;
+ /* Benchmarks indicate non-temporal memset can be profitable on AMD
+ hardware. */
+ cpu_features->preferred[index_arch_Avoid_Non_Temporal_Memset]
+ &= ~bit_arch_Avoid_Non_Temporal_Memset;
+
if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
{
/* Since the FMA4 bit is in CPUID_INDEX_80000001 and
if (CPU_FEATURE_USABLE_P (cpu_features, FSRM))
rep_movsb_threshold = 2112;
- /* Non-temporal stores are more performant on Intel and AMD hardware above
- non_temporal_threshold. Enable this for both Intel and AMD hardware. */
- unsigned long int memset_non_temporal_threshold = SIZE_MAX;
- if (!CPU_FEATURES_ARCH_P (cpu_features, Avoid_Non_Temporal_Memset)
- && (cpu_features->basic.kind == arch_kind_intel
- || cpu_features->basic.kind == arch_kind_amd))
- memset_non_temporal_threshold = non_temporal_threshold;
-
/* For AMD CPUs that support ERMS (Zen3+), REP MOVSB is in a lot of
cases slower than the vectorized path (and for some alignments,
it is really slow, check BZ #30994). */
if (tunable_size != 0)
shared = tunable_size;
+ /* Non-temporal stores are more performant on some hardware above
+ non_temporal_threshold. Currently Prefer_Non_Temporal is set for for both
+ Intel and AMD hardware. */
+ unsigned long int memset_non_temporal_threshold = SIZE_MAX;
+ if (!CPU_FEATURES_ARCH_P (cpu_features, Avoid_Non_Temporal_Memset))
+ memset_non_temporal_threshold = non_temporal_threshold;
+
tunable_size = TUNABLE_GET (x86_non_temporal_threshold, long int, NULL);
if (tunable_size > minimum_non_temporal_threshold
&& tunable_size <= maximum_non_temporal_threshold)