]> git.ipfire.org Git - thirdparty/glibc.git/blobdiff - sysdeps/x86_64/multiarch/mempcpy_chk.S
Update copyright dates with scripts/update-copyrights.
[thirdparty/glibc.git] / sysdeps / x86_64 / multiarch / mempcpy_chk.S
index c0d33fecd2898573b62fadc5738c01da42683d07..6927962e81a6683696a50fcfa3ba09351e9cd84e 100644 (file)
@@ -1,6 +1,6 @@
 /* Multiple versions of __mempcpy_chk
    All versions must be listed in ifunc-impl-list.c.
-   Copyright (C) 2010-2014 Free Software Foundation, Inc.
+   Copyright (C) 2010-2017 Free Software Foundation, Inc.
    Contributed by Intel Corporation.
    This file is part of the GNU C Library.
 
        .text
 ENTRY(__mempcpy_chk)
        .type   __mempcpy_chk, @gnu_indirect_function
-       cmpl    $0, KIND_OFFSET+__cpu_features(%rip)
-       jne     1f
-       call    __init_cpu_features
-1:     leaq    __mempcpy_chk_sse2(%rip), %rax
-       testl   $bit_SSSE3, __cpu_features+CPUID_OFFSET+index_SSSE3(%rip)
+       LOAD_RTLD_GLOBAL_RO_RDX
+       HAS_ARCH_FEATURE (AVX512F_Usable)
+       jz      1f
+       lea     __mempcpy_chk_avx512_no_vzeroupper(%rip), %RAX_LP
+       HAS_ARCH_FEATURE (Prefer_No_VZEROUPPER)
+       jnz     2f
+       lea     __mempcpy_chk_avx512_unaligned_erms(%rip), %RAX_LP
+       HAS_CPU_FEATURE (ERMS)
+       jnz     2f
+       lea     __mempcpy_chk_avx512_unaligned(%rip), %RAX_LP
+       ret
+1:     lea     __mempcpy_chk_avx_unaligned(%rip), %RAX_LP
+       HAS_ARCH_FEATURE (AVX_Fast_Unaligned_Load)
+       jz      L(Fast_Unaligned_Load)
+       HAS_CPU_FEATURE (ERMS)
        jz      2f
-       leaq    __mempcpy_chk_ssse3(%rip), %rax
-       testl   $bit_Fast_Copy_Backward, __cpu_features+FEATURE_OFFSET+index_Fast_Copy_Backward(%rip)
+       lea     __mempcpy_chk_avx_unaligned_erms(%rip), %RAX_LP
+       ret
+L(Fast_Unaligned_Load):
+       lea     __mempcpy_chk_sse2_unaligned(%rip), %RAX_LP
+       HAS_ARCH_FEATURE (Fast_Unaligned_Copy)
+       jz      L(SSSE3)
+       HAS_CPU_FEATURE (ERMS)
        jz      2f
-       leaq    __mempcpy_chk_ssse3_back(%rip), %rax
-       testl   $bit_AVX_Usable, __cpu_features+FEATURE_OFFSET+index_AVX_Usable(%rip)
+       lea     __mempcpy_chk_sse2_unaligned_erms(%rip), %RAX_LP
+       ret
+L(SSSE3):
+       HAS_CPU_FEATURE (SSSE3)
        jz      2f
-       leaq    __mempcpy_chk_avx_unaligned(%rip), %rax
+       lea    __mempcpy_chk_ssse3_back(%rip), %RAX_LP
+       HAS_ARCH_FEATURE (Fast_Copy_Backward)
+       jnz     2f
+       lea     __mempcpy_chk_ssse3(%rip), %RAX_LP
 2:     ret
 END(__mempcpy_chk)
 # else