1 /* Enumerate available IFUNC implementations of a function. x86-64 version.
2 Copyright (C) 2012-2022 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <https://www.gnu.org/licenses/>. */
22 #include <ifunc-impl-list.h>
24 #include "init-arch.h"
26 /* Fill ARRAY of MAX elements with IFUNC implementations for function
27 NAME supported on target machine and return the number of valid
28 entries. Each set of implementations for a given function is sorted in
29 descending order by ISA level. */
32 __libc_ifunc_impl_list (const char *name
, struct libc_ifunc_impl
*array
,
37 /* Support sysdeps/x86_64/multiarch/memcmpeq.c. */
38 IFUNC_IMPL (i
, name
, __memcmpeq
,
39 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcmpeq
,
40 (CPU_FEATURE_USABLE (AVX512VL
)
41 && CPU_FEATURE_USABLE (AVX512BW
)
42 && CPU_FEATURE_USABLE (BMI2
)),
44 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcmpeq
,
45 (CPU_FEATURE_USABLE (AVX2
)
46 && CPU_FEATURE_USABLE (BMI2
)),
48 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcmpeq
,
49 (CPU_FEATURE_USABLE (AVX2
)
50 && CPU_FEATURE_USABLE (BMI2
)
51 && CPU_FEATURE_USABLE (RTM
)),
53 /* ISA V2 wrapper for SSE2 implementation because the SSE2
54 implementation is also used at ISA level 2. */
55 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcmpeq
,
59 /* Support sysdeps/x86_64/multiarch/memchr.c. */
60 IFUNC_IMPL (i
, name
, memchr
,
61 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memchr
,
62 (CPU_FEATURE_USABLE (AVX512VL
)
63 && CPU_FEATURE_USABLE (AVX512BW
)
64 && CPU_FEATURE_USABLE (BMI2
)),
66 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memchr
,
67 (CPU_FEATURE_USABLE (AVX512VL
)
68 && CPU_FEATURE_USABLE (AVX512BW
)
69 && CPU_FEATURE_USABLE (BMI2
)),
71 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memchr
,
72 CPU_FEATURE_USABLE (AVX2
),
74 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memchr
,
75 (CPU_FEATURE_USABLE (AVX2
)
76 && CPU_FEATURE_USABLE (RTM
)),
78 /* ISA V2 wrapper for SSE2 implementation because the SSE2
79 implementation is also used at ISA level 2. */
80 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memchr
,
84 /* Support sysdeps/x86_64/multiarch/memcmp.c. */
85 IFUNC_IMPL (i
, name
, memcmp
,
86 /* NB: If any of these names change or if any new
87 implementations are added be sure to update
88 sysdeps/x86_64/memcmp-isa-default-impl.h. */
89 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcmp
,
90 (CPU_FEATURE_USABLE (AVX512VL
)
91 && CPU_FEATURE_USABLE (AVX512BW
)
92 && CPU_FEATURE_USABLE (BMI2
)
93 && CPU_FEATURE_USABLE (MOVBE
)),
95 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcmp
,
96 (CPU_FEATURE_USABLE (AVX2
)
97 && CPU_FEATURE_USABLE (BMI2
)
98 && CPU_FEATURE_USABLE (MOVBE
)),
100 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcmp
,
101 (CPU_FEATURE_USABLE (AVX2
)
102 && CPU_FEATURE_USABLE (BMI2
)
103 && CPU_FEATURE_USABLE (MOVBE
)
104 && CPU_FEATURE_USABLE (RTM
)),
105 __memcmp_avx2_movbe_rtm
)
106 /* ISA V2 wrapper for SSE2 implementation because the SSE2
107 implementation is also used at ISA level 2. */
108 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcmp
,
113 /* Support sysdeps/x86_64/multiarch/memmove_chk.c. */
114 IFUNC_IMPL (i
, name
, __memmove_chk
,
115 IFUNC_IMPL_ADD (array
, i
, __memmove_chk
, 1,
117 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
118 CPU_FEATURE_USABLE (AVX512F
),
119 __memmove_chk_avx512_no_vzeroupper
)
120 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
121 CPU_FEATURE_USABLE (AVX512VL
),
122 __memmove_chk_avx512_unaligned
)
123 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
124 CPU_FEATURE_USABLE (AVX512VL
),
125 __memmove_chk_avx512_unaligned_erms
)
126 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
127 CPU_FEATURE_USABLE (AVX512VL
),
128 __memmove_chk_evex_unaligned
)
129 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memmove_chk
,
130 CPU_FEATURE_USABLE (AVX512VL
),
131 __memmove_chk_evex_unaligned_erms
)
132 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
133 CPU_FEATURE_USABLE (AVX
),
134 __memmove_chk_avx_unaligned
)
135 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
136 CPU_FEATURE_USABLE (AVX
),
137 __memmove_chk_avx_unaligned_erms
)
138 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
139 (CPU_FEATURE_USABLE (AVX
)
140 && CPU_FEATURE_USABLE (RTM
)),
141 __memmove_chk_avx_unaligned_rtm
)
142 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memmove_chk
,
143 (CPU_FEATURE_USABLE (AVX
)
144 && CPU_FEATURE_USABLE (RTM
)),
145 __memmove_chk_avx_unaligned_erms_rtm
)
146 /* By V3 we assume fast aligned copy. */
147 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memmove_chk
,
148 CPU_FEATURE_USABLE (SSSE3
),
150 /* ISA V2 wrapper for SSE2 implementation because the SSE2
151 implementation is also used at ISA level 2 (SSSE3 is too
152 optimized around aligned copy to be better as general
154 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memmove_chk
, 1,
155 __memmove_chk_sse2_unaligned
)
156 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memmove_chk
, 1,
157 __memmove_chk_sse2_unaligned_erms
))
160 /* Support sysdeps/x86_64/multiarch/memmove.c. */
161 IFUNC_IMPL (i
, name
, memmove
,
162 IFUNC_IMPL_ADD (array
, i
, memmove
, 1,
164 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
165 CPU_FEATURE_USABLE (AVX512F
),
166 __memmove_avx512_no_vzeroupper
)
167 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
168 CPU_FEATURE_USABLE (AVX512VL
),
169 __memmove_avx512_unaligned
)
170 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
171 CPU_FEATURE_USABLE (AVX512VL
),
172 __memmove_avx512_unaligned_erms
)
173 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
174 CPU_FEATURE_USABLE (AVX512VL
),
175 __memmove_evex_unaligned
)
176 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memmove
,
177 CPU_FEATURE_USABLE (AVX512VL
),
178 __memmove_evex_unaligned_erms
)
179 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
180 CPU_FEATURE_USABLE (AVX
),
181 __memmove_avx_unaligned
)
182 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
183 CPU_FEATURE_USABLE (AVX
),
184 __memmove_avx_unaligned_erms
)
185 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
186 (CPU_FEATURE_USABLE (AVX
)
187 && CPU_FEATURE_USABLE (RTM
)),
188 __memmove_avx_unaligned_rtm
)
189 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memmove
,
190 (CPU_FEATURE_USABLE (AVX
)
191 && CPU_FEATURE_USABLE (RTM
)),
192 __memmove_avx_unaligned_erms_rtm
)
193 /* By V3 we assume fast aligned copy. */
194 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memmove
,
195 CPU_FEATURE_USABLE (SSSE3
),
197 /* ISA V2 wrapper for SSE2 implementation because the SSE2
198 implementation is also used at ISA level 2 (SSSE3 is too
199 optimized around aligned copy to be better as general
201 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memmove
, 1,
202 __memmove_sse2_unaligned
)
203 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memmove
, 1,
204 __memmove_sse2_unaligned_erms
))
206 /* Support sysdeps/x86_64/multiarch/memrchr.c. */
207 IFUNC_IMPL (i
, name
, memrchr
,
208 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memrchr
,
209 (CPU_FEATURE_USABLE (AVX512VL
)
210 && CPU_FEATURE_USABLE (AVX512BW
)),
212 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memrchr
,
213 CPU_FEATURE_USABLE (AVX2
),
215 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memrchr
,
216 (CPU_FEATURE_USABLE (AVX2
)
217 && CPU_FEATURE_USABLE (RTM
)),
219 /* ISA V2 wrapper for SSE2 implementation because the SSE2
220 implementation is also used at ISA level 2. */
221 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memrchr
,
226 /* Support sysdeps/x86_64/multiarch/memset_chk.c. */
227 IFUNC_IMPL (i
, name
, __memset_chk
,
228 IFUNC_IMPL_ADD (array
, i
, __memset_chk
, 1,
230 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
231 (CPU_FEATURE_USABLE (AVX512VL
)
232 && CPU_FEATURE_USABLE (AVX512BW
)
233 && CPU_FEATURE_USABLE (BMI2
)),
234 __memset_chk_avx512_unaligned_erms
)
235 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
236 (CPU_FEATURE_USABLE (AVX512VL
)
237 && CPU_FEATURE_USABLE (AVX512BW
)
238 && CPU_FEATURE_USABLE (BMI2
)),
239 __memset_chk_avx512_unaligned
)
240 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
241 CPU_FEATURE_USABLE (AVX512F
),
242 __memset_chk_avx512_no_vzeroupper
)
243 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
244 (CPU_FEATURE_USABLE (AVX512VL
)
245 && CPU_FEATURE_USABLE (AVX512BW
)
246 && CPU_FEATURE_USABLE (BMI2
)),
247 __memset_chk_evex_unaligned
)
248 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memset_chk
,
249 (CPU_FEATURE_USABLE (AVX512VL
)
250 && CPU_FEATURE_USABLE (AVX512BW
)
251 && CPU_FEATURE_USABLE (BMI2
)),
252 __memset_chk_evex_unaligned_erms
)
253 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
254 CPU_FEATURE_USABLE (AVX2
),
255 __memset_chk_avx2_unaligned
)
256 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
257 CPU_FEATURE_USABLE (AVX2
),
258 __memset_chk_avx2_unaligned_erms
)
259 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
260 (CPU_FEATURE_USABLE (AVX2
)
261 && CPU_FEATURE_USABLE (RTM
)),
262 __memset_chk_avx2_unaligned_rtm
)
263 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memset_chk
,
264 (CPU_FEATURE_USABLE (AVX2
)
265 && CPU_FEATURE_USABLE (RTM
)),
266 __memset_chk_avx2_unaligned_erms_rtm
)
267 /* ISA V2 wrapper for SSE2 implementation because the SSE2
268 implementation is also used at ISA level 2. */
269 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memset_chk
, 1,
270 __memset_chk_sse2_unaligned
)
271 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memset_chk
, 1,
272 __memset_chk_sse2_unaligned_erms
)
276 /* Support sysdeps/x86_64/multiarch/memset.c. */
277 IFUNC_IMPL (i
, name
, memset
,
278 IFUNC_IMPL_ADD (array
, i
, memset
, 1,
280 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
281 (CPU_FEATURE_USABLE (AVX512VL
)
282 && CPU_FEATURE_USABLE (AVX512BW
)
283 && CPU_FEATURE_USABLE (BMI2
)),
284 __memset_avx512_unaligned_erms
)
285 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
286 (CPU_FEATURE_USABLE (AVX512VL
)
287 && CPU_FEATURE_USABLE (AVX512BW
)
288 && CPU_FEATURE_USABLE (BMI2
)),
289 __memset_avx512_unaligned
)
290 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
291 CPU_FEATURE_USABLE (AVX512F
),
292 __memset_avx512_no_vzeroupper
)
293 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
294 (CPU_FEATURE_USABLE (AVX512VL
)
295 && CPU_FEATURE_USABLE (AVX512BW
)
296 && CPU_FEATURE_USABLE (BMI2
)),
297 __memset_evex_unaligned
)
298 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memset
,
299 (CPU_FEATURE_USABLE (AVX512VL
)
300 && CPU_FEATURE_USABLE (AVX512BW
)
301 && CPU_FEATURE_USABLE (BMI2
)),
302 __memset_evex_unaligned_erms
)
303 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
304 CPU_FEATURE_USABLE (AVX2
),
305 __memset_avx2_unaligned
)
306 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
307 CPU_FEATURE_USABLE (AVX2
),
308 __memset_avx2_unaligned_erms
)
309 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
310 (CPU_FEATURE_USABLE (AVX2
)
311 && CPU_FEATURE_USABLE (RTM
)),
312 __memset_avx2_unaligned_rtm
)
313 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memset
,
314 (CPU_FEATURE_USABLE (AVX2
)
315 && CPU_FEATURE_USABLE (RTM
)),
316 __memset_avx2_unaligned_erms_rtm
)
317 /* ISA V2 wrapper for SSE2 implementation because the SSE2
318 implementation is also used at ISA level 2. */
319 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memset
, 1,
320 __memset_sse2_unaligned
)
321 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memset
, 1,
322 __memset_sse2_unaligned_erms
)
325 /* Support sysdeps/x86_64/multiarch/rawmemchr.c. */
326 IFUNC_IMPL (i
, name
, rawmemchr
,
327 X86_IFUNC_IMPL_ADD_V4 (array
, i
, rawmemchr
,
328 (CPU_FEATURE_USABLE (AVX512VL
)
329 && CPU_FEATURE_USABLE (AVX512BW
)
330 && CPU_FEATURE_USABLE (BMI2
)),
332 X86_IFUNC_IMPL_ADD_V4 (array
, i
, rawmemchr
,
333 (CPU_FEATURE_USABLE (AVX512VL
)
334 && CPU_FEATURE_USABLE (AVX512BW
)
335 && CPU_FEATURE_USABLE (BMI2
)),
336 __rawmemchr_evex_rtm
)
337 X86_IFUNC_IMPL_ADD_V3 (array
, i
, rawmemchr
,
338 CPU_FEATURE_USABLE (AVX2
),
340 X86_IFUNC_IMPL_ADD_V3 (array
, i
, rawmemchr
,
341 (CPU_FEATURE_USABLE (AVX2
)
342 && CPU_FEATURE_USABLE (RTM
)),
343 __rawmemchr_avx2_rtm
)
344 /* ISA V2 wrapper for SSE2 implementation because the SSE2
345 implementation is also used at ISA level 2. */
346 X86_IFUNC_IMPL_ADD_V2 (array
, i
, rawmemchr
,
350 /* Support sysdeps/x86_64/multiarch/strlen.c. */
351 IFUNC_IMPL (i
, name
, strlen
,
352 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strlen
,
353 (CPU_FEATURE_USABLE (AVX512VL
)
354 && CPU_FEATURE_USABLE (AVX512BW
)
355 && CPU_FEATURE_USABLE (BMI2
)),
357 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strlen
,
358 (CPU_FEATURE_USABLE (AVX512VL
)
359 && CPU_FEATURE_USABLE (AVX512BW
)
360 && CPU_FEATURE_USABLE (BMI2
)),
362 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strlen
,
363 (CPU_FEATURE_USABLE (AVX2
)
364 && CPU_FEATURE_USABLE (BMI2
)),
366 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strlen
,
367 (CPU_FEATURE_USABLE (AVX2
)
368 && CPU_FEATURE_USABLE (BMI2
)
369 && CPU_FEATURE_USABLE (RTM
)),
371 /* ISA V2 wrapper for SSE2 implementation because the SSE2
372 implementation is also used at ISA level 2. */
373 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strlen
,
377 /* Support sysdeps/x86_64/multiarch/strnlen.c. */
378 IFUNC_IMPL (i
, name
, strnlen
,
379 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strnlen
,
380 (CPU_FEATURE_USABLE (AVX512VL
)
381 && CPU_FEATURE_USABLE (AVX512BW
)
382 && CPU_FEATURE_USABLE (BMI2
)),
384 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strnlen
,
385 (CPU_FEATURE_USABLE (AVX512VL
)
386 && CPU_FEATURE_USABLE (AVX512BW
)
387 && CPU_FEATURE_USABLE (BMI2
)),
389 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strnlen
,
390 (CPU_FEATURE_USABLE (AVX2
)
391 && CPU_FEATURE_USABLE (BMI2
)),
393 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strnlen
,
394 (CPU_FEATURE_USABLE (AVX2
)
395 && CPU_FEATURE_USABLE (BMI2
)
396 && CPU_FEATURE_USABLE (RTM
)),
398 /* ISA V2 wrapper for SSE2 implementation because the SSE2
399 implementation is also used at ISA level 2. */
400 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strnlen
,
404 /* Support sysdeps/x86_64/multiarch/stpncpy.c. */
405 IFUNC_IMPL (i
, name
, stpncpy
,
406 IFUNC_IMPL_ADD (array
, i
, stpncpy
, CPU_FEATURE_USABLE (AVX2
),
408 IFUNC_IMPL_ADD (array
, i
, stpncpy
,
409 (CPU_FEATURE_USABLE (AVX2
)
410 && CPU_FEATURE_USABLE (RTM
)),
412 IFUNC_IMPL_ADD (array
, i
, stpncpy
,
413 (CPU_FEATURE_USABLE (AVX512VL
)
414 && CPU_FEATURE_USABLE (AVX512BW
)),
416 IFUNC_IMPL_ADD (array
, i
, stpncpy
, 1,
417 __stpncpy_sse2_unaligned
))
419 /* Support sysdeps/x86_64/multiarch/stpcpy.c. */
420 IFUNC_IMPL (i
, name
, stpcpy
,
421 IFUNC_IMPL_ADD (array
, i
, stpcpy
, CPU_FEATURE_USABLE (AVX2
),
423 IFUNC_IMPL_ADD (array
, i
, stpcpy
,
424 (CPU_FEATURE_USABLE (AVX2
)
425 && CPU_FEATURE_USABLE (RTM
)),
427 IFUNC_IMPL_ADD (array
, i
, stpcpy
,
428 (CPU_FEATURE_USABLE (AVX512VL
)
429 && CPU_FEATURE_USABLE (AVX512BW
)),
431 IFUNC_IMPL_ADD (array
, i
, stpcpy
, 1, __stpcpy_sse2_unaligned
)
432 IFUNC_IMPL_ADD (array
, i
, stpcpy
, 1, __stpcpy_sse2
))
434 /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c. */
435 IFUNC_IMPL (i
, name
, strcasecmp
,
436 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcasecmp
,
437 (CPU_FEATURE_USABLE (AVX512VL
)
438 && CPU_FEATURE_USABLE (AVX512BW
)),
440 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
441 CPU_FEATURE_USABLE (AVX2
),
443 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
444 (CPU_FEATURE_USABLE (AVX2
)
445 && CPU_FEATURE_USABLE (RTM
)),
446 __strcasecmp_avx2_rtm
)
447 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp
,
448 CPU_FEATURE_USABLE (SSE4_2
),
450 /* ISA V2 wrapper for SSE2 implementation because the SSE2
451 implementation is also used at ISA level 2. */
452 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp
,
456 /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c. */
457 IFUNC_IMPL (i
, name
, strcasecmp_l
,
458 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcasecmp
,
459 (CPU_FEATURE_USABLE (AVX512VL
)
460 && CPU_FEATURE_USABLE (AVX512BW
)),
462 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
463 CPU_FEATURE_USABLE (AVX2
),
465 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcasecmp
,
466 (CPU_FEATURE_USABLE (AVX2
)
467 && CPU_FEATURE_USABLE (RTM
)),
468 __strcasecmp_l_avx2_rtm
)
469 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp_l
,
470 CPU_FEATURE_USABLE (SSE4_2
),
471 __strcasecmp_l_sse42
)
472 /* ISA V2 wrapper for SSE2 implementation because the SSE2
473 implementation is also used at ISA level 2. */
474 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcasecmp_l
,
476 __strcasecmp_l_sse2
))
478 /* Support sysdeps/x86_64/multiarch/strcat.c. */
479 IFUNC_IMPL (i
, name
, strcat
,
480 IFUNC_IMPL_ADD (array
, i
, strcat
, CPU_FEATURE_USABLE (AVX2
),
482 IFUNC_IMPL_ADD (array
, i
, strcat
,
483 (CPU_FEATURE_USABLE (AVX2
)
484 && CPU_FEATURE_USABLE (RTM
)),
486 IFUNC_IMPL_ADD (array
, i
, strcat
,
487 (CPU_FEATURE_USABLE (AVX512VL
)
488 && CPU_FEATURE_USABLE (AVX512BW
)),
490 IFUNC_IMPL_ADD (array
, i
, strcat
, 1, __strcat_sse2_unaligned
)
491 IFUNC_IMPL_ADD (array
, i
, strcat
, 1, __strcat_sse2
))
493 /* Support sysdeps/x86_64/multiarch/strchr.c. */
494 IFUNC_IMPL (i
, name
, strchr
,
495 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strchr
,
496 (CPU_FEATURE_USABLE (AVX512VL
)
497 && CPU_FEATURE_USABLE (AVX512BW
)
498 && CPU_FEATURE_USABLE (BMI2
)),
500 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchr
,
501 (CPU_FEATURE_USABLE (AVX2
)
502 && CPU_FEATURE_USABLE (BMI2
)),
504 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchr
,
505 (CPU_FEATURE_USABLE (AVX2
)
506 && CPU_FEATURE_USABLE (BMI2
)
507 && CPU_FEATURE_USABLE (RTM
)),
509 /* ISA V2 wrapper for SSE2 implementation because the SSE2
510 implementation is also used at ISA level 2. */
511 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strchr
,
514 X86_IFUNC_IMPL_ADD_V1 (array
, i
, strchr
,
516 __strchr_sse2_no_bsf
))
518 /* Support sysdeps/x86_64/multiarch/strchrnul.c. */
519 IFUNC_IMPL (i
, name
, strchrnul
,
520 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strchrnul
,
521 (CPU_FEATURE_USABLE (AVX512VL
)
522 && CPU_FEATURE_USABLE (AVX512BW
)
523 && CPU_FEATURE_USABLE (BMI2
)),
525 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchrnul
,
526 (CPU_FEATURE_USABLE (AVX2
)
527 && CPU_FEATURE_USABLE (BMI2
)),
529 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strchrnul
,
530 (CPU_FEATURE_USABLE (AVX2
)
531 && CPU_FEATURE_USABLE (BMI2
)
532 && CPU_FEATURE_USABLE (RTM
)),
533 __strchrnul_avx2_rtm
)
534 /* ISA V2 wrapper for SSE2 implementation because the SSE2
535 implementation is also used at ISA level 2. */
536 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strchrnul
,
540 /* Support sysdeps/x86_64/multiarch/strrchr.c. */
541 IFUNC_IMPL (i
, name
, strrchr
,
542 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strrchr
,
543 (CPU_FEATURE_USABLE (AVX512VL
)
544 && CPU_FEATURE_USABLE (AVX512BW
)),
546 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strrchr
,
547 CPU_FEATURE_USABLE (AVX2
),
549 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strrchr
,
550 (CPU_FEATURE_USABLE (AVX2
)
551 && CPU_FEATURE_USABLE (RTM
)),
553 /* ISA V2 wrapper for SSE2 implementation because the SSE2
554 implementation is also used at ISA level 2. */
555 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strrchr
,
559 /* Support sysdeps/x86_64/multiarch/strcmp.c. */
560 IFUNC_IMPL (i
, name
, strcmp
,
561 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strcmp
,
562 (CPU_FEATURE_USABLE (AVX512VL
)
563 && CPU_FEATURE_USABLE (AVX512BW
)
564 && CPU_FEATURE_USABLE (BMI2
)),
566 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcmp
,
567 CPU_FEATURE_USABLE (AVX2
),
569 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strcmp
,
570 (CPU_FEATURE_USABLE (AVX2
)
571 && CPU_FEATURE_USABLE (RTM
)),
573 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcmp
,
574 CPU_FEATURE_USABLE (SSE4_2
),
576 /* ISA V2 wrapper for SSE2 implementations because the SSE2
577 implementations are also used at ISA level 2. */
578 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcmp
,
580 __strcmp_sse2_unaligned
)
581 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strcmp
,
585 /* Support sysdeps/x86_64/multiarch/strcpy.c. */
586 IFUNC_IMPL (i
, name
, strcpy
,
587 IFUNC_IMPL_ADD (array
, i
, strcpy
, CPU_FEATURE_USABLE (AVX2
),
589 IFUNC_IMPL_ADD (array
, i
, strcpy
,
590 (CPU_FEATURE_USABLE (AVX2
)
591 && CPU_FEATURE_USABLE (RTM
)),
593 IFUNC_IMPL_ADD (array
, i
, strcpy
,
594 (CPU_FEATURE_USABLE (AVX512VL
)
595 && CPU_FEATURE_USABLE (AVX512BW
)),
597 IFUNC_IMPL_ADD (array
, i
, strcpy
, 1, __strcpy_sse2_unaligned
)
598 IFUNC_IMPL_ADD (array
, i
, strcpy
, 1, __strcpy_sse2
))
600 /* Support sysdeps/x86_64/multiarch/strcspn.c. */
601 IFUNC_IMPL (i
, name
, strcspn
,
602 /* All implementations of strcspn are built at all ISA
604 IFUNC_IMPL_ADD (array
, i
, strcspn
, CPU_FEATURE_USABLE (SSE4_2
),
606 IFUNC_IMPL_ADD (array
, i
, strcspn
, 1, __strcspn_generic
))
608 /* Support sysdeps/x86_64/multiarch/strncase_l.c. */
609 IFUNC_IMPL (i
, name
, strncasecmp
,
610 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncasecmp
,
611 (CPU_FEATURE_USABLE (AVX512VL
)
612 && CPU_FEATURE_USABLE (AVX512BW
)),
614 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
615 CPU_FEATURE_USABLE (AVX2
),
617 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
618 (CPU_FEATURE_USABLE (AVX2
)
619 && CPU_FEATURE_USABLE (RTM
)),
620 __strncasecmp_avx2_rtm
)
621 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp
,
622 CPU_FEATURE_USABLE (SSE4_2
),
624 /* ISA V2 wrapper for SSE2 implementation because the SSE2
625 implementation is also used at ISA level 2. */
626 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp
,
630 /* Support sysdeps/x86_64/multiarch/strncase_l.c. */
631 IFUNC_IMPL (i
, name
, strncasecmp_l
,
632 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncasecmp
,
633 (CPU_FEATURE_USABLE (AVX512VL
)
634 && CPU_FEATURE_USABLE (AVX512BW
)),
635 __strncasecmp_l_evex
)
636 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
637 CPU_FEATURE_USABLE (AVX2
),
638 __strncasecmp_l_avx2
)
639 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncasecmp
,
640 (CPU_FEATURE_USABLE (AVX2
)
641 && CPU_FEATURE_USABLE (RTM
)),
642 __strncasecmp_l_avx2_rtm
)
643 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp_l
,
644 CPU_FEATURE_USABLE (SSE4_2
),
645 __strncasecmp_l_sse42
)
646 /* ISA V2 wrapper for SSE2 implementation because the SSE2
647 implementation is also used at ISA level 2. */
648 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncasecmp_l
,
650 __strncasecmp_l_sse2
))
652 /* Support sysdeps/x86_64/multiarch/strncat.c. */
653 IFUNC_IMPL (i
, name
, strncat
,
654 IFUNC_IMPL_ADD (array
, i
, strncat
, CPU_FEATURE_USABLE (AVX2
),
656 IFUNC_IMPL_ADD (array
, i
, strncat
,
657 (CPU_FEATURE_USABLE (AVX2
)
658 && CPU_FEATURE_USABLE (RTM
)),
660 IFUNC_IMPL_ADD (array
, i
, strncat
,
661 (CPU_FEATURE_USABLE (AVX512VL
)
662 && CPU_FEATURE_USABLE (AVX512BW
)),
664 IFUNC_IMPL_ADD (array
, i
, strncat
, 1,
665 __strncat_sse2_unaligned
))
667 /* Support sysdeps/x86_64/multiarch/strncpy.c. */
668 IFUNC_IMPL (i
, name
, strncpy
,
669 IFUNC_IMPL_ADD (array
, i
, strncpy
, CPU_FEATURE_USABLE (AVX2
),
671 IFUNC_IMPL_ADD (array
, i
, strncpy
,
672 (CPU_FEATURE_USABLE (AVX2
)
673 && CPU_FEATURE_USABLE (RTM
)),
675 IFUNC_IMPL_ADD (array
, i
, strncpy
,
676 (CPU_FEATURE_USABLE (AVX512VL
)
677 && CPU_FEATURE_USABLE (AVX512BW
)),
679 IFUNC_IMPL_ADD (array
, i
, strncpy
, 1,
680 __strncpy_sse2_unaligned
))
682 /* Support sysdeps/x86_64/multiarch/strpbrk.c. */
683 IFUNC_IMPL (i
, name
, strpbrk
,
684 /* All implementations of strpbrk are built at all ISA
686 IFUNC_IMPL_ADD (array
, i
, strpbrk
, CPU_FEATURE_USABLE (SSE4_2
),
688 IFUNC_IMPL_ADD (array
, i
, strpbrk
, 1, __strpbrk_generic
))
691 /* Support sysdeps/x86_64/multiarch/strspn.c. */
692 IFUNC_IMPL (i
, name
, strspn
,
693 /* All implementations of strspn are built at all ISA
695 IFUNC_IMPL_ADD (array
, i
, strspn
, CPU_FEATURE_USABLE (SSE4_2
),
697 IFUNC_IMPL_ADD (array
, i
, strspn
, 1, __strspn_generic
))
699 /* Support sysdeps/x86_64/multiarch/strstr.c. */
700 IFUNC_IMPL (i
, name
, strstr
,
701 IFUNC_IMPL_ADD (array
, i
, strstr
,
702 (CPU_FEATURE_USABLE (AVX512VL
)
703 && CPU_FEATURE_USABLE (AVX512BW
)
704 && CPU_FEATURE_USABLE (AVX512DQ
)
705 && CPU_FEATURE_USABLE (BMI2
)),
707 IFUNC_IMPL_ADD (array
, i
, strstr
, 1, __strstr_sse2_unaligned
)
708 IFUNC_IMPL_ADD (array
, i
, strstr
, 1, __strstr_generic
))
710 /* Support sysdeps/x86_64/multiarch/wcschr.c. */
711 IFUNC_IMPL (i
, name
, wcschr
,
712 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcschr
,
713 (CPU_FEATURE_USABLE (AVX512VL
)
714 && CPU_FEATURE_USABLE (AVX512BW
)
715 && CPU_FEATURE_USABLE (BMI2
)),
717 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcschr
,
718 (CPU_FEATURE_USABLE (AVX2
)
719 && CPU_FEATURE_USABLE (BMI2
)),
721 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcschr
,
722 (CPU_FEATURE_USABLE (AVX2
)
723 && CPU_FEATURE_USABLE (BMI2
)
724 && CPU_FEATURE_USABLE (RTM
)),
726 /* ISA V2 wrapper for SSE2 implementation because the SSE2
727 implementation is also used at ISA level 2. */
728 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcschr
,
732 /* Support sysdeps/x86_64/multiarch/wcsrchr.c. */
733 IFUNC_IMPL (i
, name
, wcsrchr
,
734 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsrchr
,
735 (CPU_FEATURE_USABLE (AVX512VL
)
736 && CPU_FEATURE_USABLE (AVX512BW
)
737 && CPU_FEATURE_USABLE (BMI2
)),
739 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsrchr
,
740 CPU_FEATURE_USABLE (AVX2
),
742 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsrchr
,
743 (CPU_FEATURE_USABLE (AVX2
)
744 && CPU_FEATURE_USABLE (RTM
)),
746 /* ISA V2 wrapper for SSE2 implementation because the SSE2
747 implementation is also used at ISA level 2. */
748 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsrchr
,
752 /* Support sysdeps/x86_64/multiarch/wcscmp.c. */
753 IFUNC_IMPL (i
, name
, wcscmp
,
754 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcscmp
,
755 (CPU_FEATURE_USABLE (AVX512VL
)
756 && CPU_FEATURE_USABLE (AVX512BW
)
757 && CPU_FEATURE_USABLE (BMI2
)),
759 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcscmp
,
760 CPU_FEATURE_USABLE (AVX2
),
762 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcscmp
,
763 (CPU_FEATURE_USABLE (AVX2
)
764 && CPU_FEATURE_USABLE (RTM
)),
766 /* ISA V2 wrapper for SSE2 implementation because the SSE2
767 implementation is also used at ISA level 2. */
768 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcscmp
,
772 /* Support sysdeps/x86_64/multiarch/wcsncmp.c. */
773 IFUNC_IMPL (i
, name
, wcsncmp
,
774 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsncmp
,
775 (CPU_FEATURE_USABLE (AVX512VL
)
776 && CPU_FEATURE_USABLE (AVX512BW
)
777 && CPU_FEATURE_USABLE (BMI2
)),
779 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsncmp
,
780 CPU_FEATURE_USABLE (AVX2
),
782 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsncmp
,
783 (CPU_FEATURE_USABLE (AVX2
)
784 && CPU_FEATURE_USABLE (RTM
)),
786 /* ISA V2 wrapper for GENERIC implementation because the
787 GENERIC implementation is also used at ISA level 2. */
788 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsncmp
,
792 /* Support sysdeps/x86_64/multiarch/wcscpy.c. */
793 IFUNC_IMPL (i
, name
, wcscpy
,
794 IFUNC_IMPL_ADD (array
, i
, wcscpy
, CPU_FEATURE_USABLE (SSSE3
),
796 IFUNC_IMPL_ADD (array
, i
, wcscpy
, 1, __wcscpy_generic
))
798 /* Support sysdeps/x86_64/multiarch/wcslen.c. */
799 IFUNC_IMPL (i
, name
, wcslen
,
800 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcslen
,
801 (CPU_FEATURE_USABLE (AVX512VL
)
802 && CPU_FEATURE_USABLE (AVX512BW
)
803 && CPU_FEATURE_USABLE (BMI2
)),
805 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcslen
,
806 (CPU_FEATURE_USABLE (AVX512VL
)
807 && CPU_FEATURE_USABLE (AVX512BW
)
808 && CPU_FEATURE_USABLE (BMI2
)),
810 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcslen
,
811 (CPU_FEATURE_USABLE (AVX2
)
812 && CPU_FEATURE_USABLE (BMI2
)),
814 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcslen
,
815 (CPU_FEATURE_USABLE (AVX2
)
816 && CPU_FEATURE_USABLE (BMI2
)
817 && CPU_FEATURE_USABLE (RTM
)),
819 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcslen
,
820 CPU_FEATURE_USABLE (SSE4_1
),
822 X86_IFUNC_IMPL_ADD_V1 (array
, i
, wcslen
,
826 /* Support sysdeps/x86_64/multiarch/wcsnlen.c. */
827 IFUNC_IMPL (i
, name
, wcsnlen
,
828 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsnlen
,
829 (CPU_FEATURE_USABLE (AVX512VL
)
830 && CPU_FEATURE_USABLE (AVX512BW
)
831 && CPU_FEATURE_USABLE (BMI2
)),
833 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wcsnlen
,
834 (CPU_FEATURE_USABLE (AVX512VL
)
835 && CPU_FEATURE_USABLE (AVX512BW
)
836 && CPU_FEATURE_USABLE (BMI2
)),
838 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsnlen
,
839 (CPU_FEATURE_USABLE (AVX2
)
840 && CPU_FEATURE_USABLE (BMI2
)),
842 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wcsnlen
,
843 (CPU_FEATURE_USABLE (AVX2
)
844 && CPU_FEATURE_USABLE (BMI2
)
845 && CPU_FEATURE_USABLE (RTM
)),
847 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wcsnlen
,
848 CPU_FEATURE_USABLE (SSE4_1
),
850 X86_IFUNC_IMPL_ADD_V1 (array
, i
, wcsnlen
,
854 /* Support sysdeps/x86_64/multiarch/wmemchr.c. */
855 IFUNC_IMPL (i
, name
, wmemchr
,
856 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemchr
,
857 (CPU_FEATURE_USABLE (AVX512VL
)
858 && CPU_FEATURE_USABLE (AVX512BW
)
859 && CPU_FEATURE_USABLE (BMI2
)),
861 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemchr
,
862 (CPU_FEATURE_USABLE (AVX512VL
)
863 && CPU_FEATURE_USABLE (AVX512BW
)
864 && CPU_FEATURE_USABLE (BMI2
)),
866 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemchr
,
867 CPU_FEATURE_USABLE (AVX2
),
869 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemchr
,
870 (CPU_FEATURE_USABLE (AVX2
)
871 && CPU_FEATURE_USABLE (RTM
)),
873 /* ISA V2 wrapper for SSE2 implementation because the SSE2
874 implementation is also used at ISA level 2. */
875 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wmemchr
,
879 /* Support sysdeps/x86_64/multiarch/wmemcmp.c. */
880 IFUNC_IMPL (i
, name
, wmemcmp
,
881 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemcmp
,
882 (CPU_FEATURE_USABLE (AVX512VL
)
883 && CPU_FEATURE_USABLE (AVX512BW
)
884 && CPU_FEATURE_USABLE (BMI2
)
885 && CPU_FEATURE_USABLE (MOVBE
)),
886 __wmemcmp_evex_movbe
)
887 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemcmp
,
888 (CPU_FEATURE_USABLE (AVX2
)
889 && CPU_FEATURE_USABLE (BMI2
)
890 && CPU_FEATURE_USABLE (MOVBE
)),
891 __wmemcmp_avx2_movbe
)
892 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemcmp
,
893 (CPU_FEATURE_USABLE (AVX2
)
894 && CPU_FEATURE_USABLE (BMI2
)
895 && CPU_FEATURE_USABLE (MOVBE
)
896 && CPU_FEATURE_USABLE (RTM
)),
897 __wmemcmp_avx2_movbe_rtm
)
898 /* ISA V2 wrapper for SSE2 implementation because the SSE2
899 implementation is also used at ISA level 2. */
900 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wmemcmp
,
904 /* Support sysdeps/x86_64/multiarch/wmemset.c. */
905 IFUNC_IMPL (i
, name
, wmemset
,
906 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemset
,
907 (CPU_FEATURE_USABLE (AVX512VL
)
908 && CPU_FEATURE_USABLE (AVX512BW
)
909 && CPU_FEATURE_USABLE (BMI2
)),
910 __wmemset_evex_unaligned
)
911 X86_IFUNC_IMPL_ADD_V4 (array
, i
, wmemset
,
912 (CPU_FEATURE_USABLE (AVX512VL
)
913 && CPU_FEATURE_USABLE (AVX512BW
)
914 && CPU_FEATURE_USABLE (BMI2
)),
915 __wmemset_avx512_unaligned
)
916 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemset
,
917 CPU_FEATURE_USABLE (AVX2
),
918 __wmemset_avx2_unaligned
)
919 X86_IFUNC_IMPL_ADD_V3 (array
, i
, wmemset
,
920 (CPU_FEATURE_USABLE (AVX2
)
921 && CPU_FEATURE_USABLE (RTM
)),
922 __wmemset_avx2_unaligned_rtm
)
923 /* ISA V2 wrapper for SSE2 implementation because the SSE2
924 implementation is also used at ISA level 2. */
925 X86_IFUNC_IMPL_ADD_V2 (array
, i
, wmemset
, 1,
926 __wmemset_sse2_unaligned
))
929 /* Support sysdeps/x86_64/multiarch/memcpy_chk.c. */
930 IFUNC_IMPL (i
, name
, __memcpy_chk
,
931 IFUNC_IMPL_ADD (array
, i
, __memcpy_chk
, 1,
933 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
934 CPU_FEATURE_USABLE (AVX512F
),
935 __memcpy_chk_avx512_no_vzeroupper
)
936 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
937 CPU_FEATURE_USABLE (AVX512VL
),
938 __memcpy_chk_avx512_unaligned
)
939 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
940 CPU_FEATURE_USABLE (AVX512VL
),
941 __memcpy_chk_avx512_unaligned_erms
)
942 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
943 CPU_FEATURE_USABLE (AVX512VL
),
944 __memcpy_chk_evex_unaligned
)
945 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __memcpy_chk
,
946 CPU_FEATURE_USABLE (AVX512VL
),
947 __memcpy_chk_evex_unaligned_erms
)
948 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
949 CPU_FEATURE_USABLE (AVX
),
950 __memcpy_chk_avx_unaligned
)
951 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
952 CPU_FEATURE_USABLE (AVX
),
953 __memcpy_chk_avx_unaligned_erms
)
954 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
955 (CPU_FEATURE_USABLE (AVX
)
956 && CPU_FEATURE_USABLE (RTM
)),
957 __memcpy_chk_avx_unaligned_rtm
)
958 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __memcpy_chk
,
959 (CPU_FEATURE_USABLE (AVX
)
960 && CPU_FEATURE_USABLE (RTM
)),
961 __memcpy_chk_avx_unaligned_erms_rtm
)
962 /* By V3 we assume fast aligned copy. */
963 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcpy_chk
,
964 CPU_FEATURE_USABLE (SSSE3
),
966 /* ISA V2 wrapper for SSE2 implementation because the SSE2
967 implementation is also used at ISA level 2 (SSSE3 is too
968 optimized around aligned copy to be better as general
970 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcpy_chk
, 1,
971 __memcpy_chk_sse2_unaligned
)
972 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __memcpy_chk
, 1,
973 __memcpy_chk_sse2_unaligned_erms
))
976 /* Support sysdeps/x86_64/multiarch/memcpy.c. */
977 IFUNC_IMPL (i
, name
, memcpy
,
978 IFUNC_IMPL_ADD (array
, i
, memcpy
, 1,
980 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
981 CPU_FEATURE_USABLE (AVX512F
),
982 __memcpy_avx512_no_vzeroupper
)
983 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
984 CPU_FEATURE_USABLE (AVX512VL
),
985 __memcpy_avx512_unaligned
)
986 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
987 CPU_FEATURE_USABLE (AVX512VL
),
988 __memcpy_avx512_unaligned_erms
)
989 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
990 CPU_FEATURE_USABLE (AVX512VL
),
991 __memcpy_evex_unaligned
)
992 X86_IFUNC_IMPL_ADD_V4 (array
, i
, memcpy
,
993 CPU_FEATURE_USABLE (AVX512VL
),
994 __memcpy_evex_unaligned_erms
)
995 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
996 CPU_FEATURE_USABLE (AVX
),
997 __memcpy_avx_unaligned
)
998 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
999 CPU_FEATURE_USABLE (AVX
),
1000 __memcpy_avx_unaligned_erms
)
1001 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
1002 (CPU_FEATURE_USABLE (AVX
)
1003 && CPU_FEATURE_USABLE (RTM
)),
1004 __memcpy_avx_unaligned_rtm
)
1005 X86_IFUNC_IMPL_ADD_V3 (array
, i
, memcpy
,
1006 (CPU_FEATURE_USABLE (AVX
)
1007 && CPU_FEATURE_USABLE (RTM
)),
1008 __memcpy_avx_unaligned_erms_rtm
)
1009 /* By V3 we assume fast aligned copy. */
1010 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcpy
,
1011 CPU_FEATURE_USABLE (SSSE3
),
1013 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1014 implementation is also used at ISA level 2 (SSSE3 is too
1015 optimized around aligned copy to be better as general
1016 purpose memmove). */
1017 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcpy
, 1,
1018 __memcpy_sse2_unaligned
)
1019 X86_IFUNC_IMPL_ADD_V2 (array
, i
, memcpy
, 1,
1020 __memcpy_sse2_unaligned_erms
))
1023 /* Support sysdeps/x86_64/multiarch/mempcpy_chk.c. */
1024 IFUNC_IMPL (i
, name
, __mempcpy_chk
,
1025 IFUNC_IMPL_ADD (array
, i
, __mempcpy_chk
, 1,
1027 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1028 CPU_FEATURE_USABLE (AVX512F
),
1029 __mempcpy_chk_avx512_no_vzeroupper
)
1030 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1031 CPU_FEATURE_USABLE (AVX512VL
),
1032 __mempcpy_chk_avx512_unaligned
)
1033 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1034 CPU_FEATURE_USABLE (AVX512VL
),
1035 __mempcpy_chk_avx512_unaligned_erms
)
1036 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1037 CPU_FEATURE_USABLE (AVX512VL
),
1038 __mempcpy_chk_evex_unaligned
)
1039 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __mempcpy_chk
,
1040 CPU_FEATURE_USABLE (AVX512VL
),
1041 __mempcpy_chk_evex_unaligned_erms
)
1042 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1043 CPU_FEATURE_USABLE (AVX
),
1044 __mempcpy_chk_avx_unaligned
)
1045 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1046 CPU_FEATURE_USABLE (AVX
),
1047 __mempcpy_chk_avx_unaligned_erms
)
1048 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1049 (CPU_FEATURE_USABLE (AVX
)
1050 && CPU_FEATURE_USABLE (RTM
)),
1051 __mempcpy_chk_avx_unaligned_rtm
)
1052 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __mempcpy_chk
,
1053 (CPU_FEATURE_USABLE (AVX
)
1054 && CPU_FEATURE_USABLE (RTM
)),
1055 __mempcpy_chk_avx_unaligned_erms_rtm
)
1056 /* By V3 we assume fast aligned copy. */
1057 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __mempcpy_chk
,
1058 CPU_FEATURE_USABLE (SSSE3
),
1059 __mempcpy_chk_ssse3
)
1060 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1061 implementation is also used at ISA level 2 (SSSE3 is too
1062 optimized around aligned copy to be better as general
1063 purpose memmove). */
1064 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __mempcpy_chk
, 1,
1065 __mempcpy_chk_sse2_unaligned
)
1066 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __mempcpy_chk
, 1,
1067 __mempcpy_chk_sse2_unaligned_erms
))
1070 /* Support sysdeps/x86_64/multiarch/mempcpy.c. */
1071 IFUNC_IMPL (i
, name
, mempcpy
,
1072 IFUNC_IMPL_ADD (array
, i
, mempcpy
, 1,
1074 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1075 CPU_FEATURE_USABLE (AVX512F
),
1076 __mempcpy_avx512_no_vzeroupper
)
1077 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1078 CPU_FEATURE_USABLE (AVX512VL
),
1079 __mempcpy_avx512_unaligned
)
1080 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1081 CPU_FEATURE_USABLE (AVX512VL
),
1082 __mempcpy_avx512_unaligned_erms
)
1083 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1084 CPU_FEATURE_USABLE (AVX512VL
),
1085 __mempcpy_evex_unaligned
)
1086 X86_IFUNC_IMPL_ADD_V4 (array
, i
, mempcpy
,
1087 CPU_FEATURE_USABLE (AVX512VL
),
1088 __mempcpy_evex_unaligned_erms
)
1089 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1090 CPU_FEATURE_USABLE (AVX
),
1091 __mempcpy_avx_unaligned
)
1092 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1093 CPU_FEATURE_USABLE (AVX
),
1094 __mempcpy_avx_unaligned_erms
)
1095 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1096 (CPU_FEATURE_USABLE (AVX
)
1097 && CPU_FEATURE_USABLE (RTM
)),
1098 __mempcpy_avx_unaligned_rtm
)
1099 X86_IFUNC_IMPL_ADD_V3 (array
, i
, mempcpy
,
1100 (CPU_FEATURE_USABLE (AVX
)
1101 && CPU_FEATURE_USABLE (RTM
)),
1102 __mempcpy_avx_unaligned_erms_rtm
)
1103 /* By V3 we assume fast aligned copy. */
1104 X86_IFUNC_IMPL_ADD_V2 (array
, i
, mempcpy
,
1105 CPU_FEATURE_USABLE (SSSE3
),
1107 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1108 implementation is also used at ISA level 2 (SSSE3 is too
1109 optimized around aligned copy to be better as general
1110 purpose memmove). */
1111 X86_IFUNC_IMPL_ADD_V2 (array
, i
, mempcpy
, 1,
1112 __mempcpy_sse2_unaligned
)
1113 X86_IFUNC_IMPL_ADD_V2 (array
, i
, mempcpy
, 1,
1114 __mempcpy_sse2_unaligned_erms
))
1116 /* Support sysdeps/x86_64/multiarch/strncmp.c. */
1117 IFUNC_IMPL (i
, name
, strncmp
,
1118 X86_IFUNC_IMPL_ADD_V4 (array
, i
, strncmp
,
1119 (CPU_FEATURE_USABLE (AVX512VL
)
1120 && CPU_FEATURE_USABLE (AVX512BW
)),
1122 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncmp
,
1123 CPU_FEATURE_USABLE (AVX2
),
1125 X86_IFUNC_IMPL_ADD_V3 (array
, i
, strncmp
,
1126 (CPU_FEATURE_USABLE (AVX2
)
1127 && CPU_FEATURE_USABLE (RTM
)),
1129 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncmp
,
1130 CPU_FEATURE_USABLE (SSE4_2
),
1132 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1133 implementation is also used at ISA level 2. */
1134 X86_IFUNC_IMPL_ADD_V2 (array
, i
, strncmp
,
1139 /* Support sysdeps/x86_64/multiarch/wmemset_chk.c. */
1140 IFUNC_IMPL (i
, name
, __wmemset_chk
,
1141 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __wmemset_chk
,
1142 (CPU_FEATURE_USABLE (AVX512VL
)
1143 && CPU_FEATURE_USABLE (AVX512BW
)
1144 && CPU_FEATURE_USABLE (BMI2
)),
1145 __wmemset_chk_evex_unaligned
)
1146 X86_IFUNC_IMPL_ADD_V4 (array
, i
, __wmemset_chk
,
1147 (CPU_FEATURE_USABLE (AVX512VL
)
1148 && CPU_FEATURE_USABLE (AVX512BW
)
1149 && CPU_FEATURE_USABLE (BMI2
)),
1150 __wmemset_chk_avx512_unaligned
)
1151 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __wmemset_chk
,
1152 CPU_FEATURE_USABLE (AVX2
),
1153 __wmemset_chk_avx2_unaligned
)
1154 X86_IFUNC_IMPL_ADD_V3 (array
, i
, __wmemset_chk
,
1155 (CPU_FEATURE_USABLE (AVX2
)
1156 && CPU_FEATURE_USABLE (RTM
)),
1157 __wmemset_chk_avx2_unaligned_rtm
)
1158 /* ISA V2 wrapper for SSE2 implementation because the SSE2
1159 implementation is also used at ISA level 2. */
1160 X86_IFUNC_IMPL_ADD_V2 (array
, i
, __wmemset_chk
, 1,
1161 __wmemset_chk_sse2_unaligned
))