]> git.ipfire.org Git - thirdparty/glibc.git/commitdiff
x86: Align entry for memrchr to 64-bytes.
authorNoah Goldstein <goldstein.w.n@gmail.com>
Fri, 24 Jun 2022 16:42:12 +0000 (09:42 -0700)
committerSunil K Pandey <skpgkp2@gmail.com>
Tue, 19 Jul 2022 05:13:57 +0000 (22:13 -0700)
The function was tuned around 64-byte entry alignment and performs
better for all sizes with it.

As well different code boths where explicitly written to touch the
minimum number of cache line i.e sizes <= 32 touch only the entry
cache line.

(cherry picked from commit 227afaa67213efcdce6a870ef5086200f1076438)

sysdeps/x86_64/multiarch/memrchr-avx2.S

index 5f8e0be18cfe4fadc6e1c526d99e826ae62b74c7..edd8180ba1ede9a57f6cfced8f9ce3c06eaa05fd 100644 (file)
@@ -35,7 +35,7 @@
 # define VEC_SIZE                      32
 # define PAGE_SIZE                     4096
        .section SECTION(.text), "ax", @progbits
-ENTRY(MEMRCHR)
+ENTRY_P2ALIGN(MEMRCHR, 6)
 # ifdef __ILP32__
        /* Clear upper bits.  */
        and     %RDX_LP, %RDX_LP