+++ /dev/null
-#define USE_AS_BZERO
-#define memset __bzero
-#include "memset.S"
-
-weak_alias (__bzero, bzero)
+++ /dev/null
-#define USE_AS_BZERO
-#define memset __bzero
-#include <sysdeps/i386/i586/memset.S>
-weak_alias (__bzero, bzero)
#define PARMS 4+4 /* space for 1 saved reg */
#define RTN PARMS
#define DEST RTN
-#ifdef USE_AS_BZERO
-# define LEN DEST+4
-#else
-# define CHR DEST+4
-# define LEN CHR+4
-#endif
+#define CHR DEST+4
+#define LEN CHR+4
.text
-#if defined SHARED && IS_IN (libc) && !defined USE_AS_BZERO
+#if defined SHARED && IS_IN (libc)
ENTRY (__memset_chk)
movl 12(%esp), %eax
cmpl %eax, 16(%esp)
movl DEST(%esp), %edi
cfi_rel_offset (edi, 0)
movl LEN(%esp), %edx
-#ifdef USE_AS_BZERO
- xorl %eax, %eax /* we fill with 0 */
-#else
movb CHR(%esp), %al
movb %al, %ah
movl %eax, %ecx
shll $16, %eax
movw %cx, %ax
-#endif
cld
/* If less than 36 bytes to write, skip tricky code (it wouldn't work). */
rep
stosb
-#ifndef USE_AS_BZERO
/* Load result (only if used as memset). */
movl DEST(%esp), %eax /* start address of destination is result */
-#endif
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
+++ /dev/null
-#define USE_AS_BZERO
-#define memset __bzero
-#include <sysdeps/i386/i686/memset.S>
-weak_alias (__bzero, bzero)
#include "asm-syntax.h"
#define PARMS 4+4 /* space for 1 saved reg */
-#ifdef USE_AS_BZERO
-# define DEST PARMS
-# define LEN DEST+4
-#else
-# define RTN PARMS
-# define DEST RTN
-# define CHR DEST+4
-# define LEN CHR+4
-#endif
+#define RTN PARMS
+#define DEST RTN
+#define CHR DEST+4
+#define LEN CHR+4
.text
-#if defined SHARED && IS_IN (libc) && !defined USE_AS_BZERO
+#if defined SHARED && IS_IN (libc)
ENTRY_CHK (__memset_chk)
movl 12(%esp), %eax
cmpl %eax, 16(%esp)
cfi_adjust_cfa_offset (4)
movl DEST(%esp), %edx
movl LEN(%esp), %ecx
-#ifdef USE_AS_BZERO
- xorl %eax, %eax /* fill with 0 */
-#else
movzbl CHR(%esp), %eax
-#endif
jecxz 1f
movl %edx, %edi
cfi_rel_offset (edi, 0)
2: movl %ecx, %edx
shrl $2, %ecx
andl $3, %edx
-#ifndef USE_AS_BZERO
imul $0x01010101, %eax
-#endif
rep
stosl
movl %edx, %ecx
stosb
1:
-#ifndef USE_AS_BZERO
movl DEST(%esp), %eax /* start address of destination is result */
-#endif
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
ifeq ($(subdir),string)
gen-as-const-headers += locale-defines.sym
-sysdep_routines += bzero-sse2 memset-sse2 memcpy-ssse3 mempcpy-ssse3 \
+sysdep_routines += memset-sse2 memcpy-ssse3 mempcpy-ssse3 \
memmove-ssse3 memcpy-ssse3-rep mempcpy-ssse3-rep \
memmove-ssse3-rep \
- memset-sse2-rep bzero-sse2-rep strcmp-ssse3 \
+ memset-sse2-rep strcmp-ssse3 \
strcmp-sse4 strncmp-c strncmp-ssse3 strncmp-sse4 \
memcmp-ssse3 memcmp-sse4 varshift \
strlen-sse2 strlen-sse2-bsf strncpy-c strcpy-ssse3 \
memcpy-sse2-unaligned \
mempcpy-sse2-unaligned memmove-sse2-unaligned \
strcspn-c strpbrk-c strspn-c \
- bzero-ia32 rawmemchr-ia32 \
+ rawmemchr-ia32 \
memchr-ia32 memcmp-ia32 memcpy-ia32 memmove-ia32 \
mempcpy-ia32 memset-ia32 strcat-ia32 strchr-ia32 \
strrchr-ia32 strcpy-ia32 strcmp-ia32 strcspn-ia32 \
+++ /dev/null
-/* bzero optimized for i686.
- Copyright (C) 2017-2022 Free Software Foundation, Inc.
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
-#include <sysdep.h>
-
-#if IS_IN (libc)
-# define __bzero __bzero_ia32
-
-# ifdef SHARED
-# undef libc_hidden_builtin_def
-/* IFUNC doesn't work with the hidden functions in shared library since
- they will be called without setting up EBX needed for PLT which is
- used by IFUNC. */
-# define libc_hidden_builtin_def(name) \
- .globl __GI___bzero; __GI___bzero = __bzero
-# endif
-
-# undef weak_alias
-# define weak_alias(original, alias)
-
-# include <sysdeps/i386/i686/bzero.S>
-#endif
+++ /dev/null
-#define USE_AS_BZERO
-#define __memset_sse2_rep __bzero_sse2_rep
-#include "memset-sse2-rep.S"
+++ /dev/null
-#define USE_AS_BZERO
-#define __memset_sse2 __bzero_sse2
-#include "memset-sse2.S"
+++ /dev/null
-/* Multiple versions of bzero.
- All versions must be listed in ifunc-impl-list.c.
- Copyright (C) 2017-2022 Free Software Foundation, Inc.
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, see
- <https://www.gnu.org/licenses/>. */
-
-/* Define multiple versions only for the definition in libc. */
-#if IS_IN (libc)
-# define bzero __redirect_bzero
-# include <string.h>
-# undef bzero
-
-# define SYMBOL_NAME bzero
-# include "ifunc-memset.h"
-
-libc_ifunc_redirected (__redirect_bzero, __bzero, IFUNC_SELECTOR ());
-
-weak_alias (__bzero, bzero)
-#endif
size_t i = 0;
- /* Support sysdeps/i386/i686/multiarch/bzero.S. */
- IFUNC_IMPL (i, name, bzero,
- IFUNC_IMPL_ADD (array, i, bzero, CPU_FEATURE_USABLE (SSE2),
- __bzero_sse2_rep)
- IFUNC_IMPL_ADD (array, i, bzero, CPU_FEATURE_USABLE (SSE2),
- __bzero_sse2)
- IFUNC_IMPL_ADD (array, i, bzero, 1, __bzero_ia32))
-
/* Support sysdeps/i386/i686/multiarch/memchr.S. */
IFUNC_IMPL (i, name, memchr,
IFUNC_IMPL_ADD (array, i, memchr, CPU_FEATURE_USABLE (SSE2),
#define PUSH(REG) pushl REG; CFI_PUSH (REG)
#define POP(REG) popl REG; CFI_POP (REG)
-#ifdef USE_AS_BZERO
-# define DEST PARMS
-# define LEN DEST+4
-# define SETRTNVAL
-#else
-# define DEST PARMS
-# define CHR DEST+4
-# define LEN CHR+4
-# define SETRTNVAL movl DEST(%esp), %eax
-#endif
+#define DEST PARMS
+#define CHR DEST+4
+#define LEN CHR+4
+#define SETRTNVAL movl DEST(%esp), %eax
#ifdef PIC
# define ENTRANCE PUSH (%ebx);
#endif
.section .text.sse2,"ax",@progbits
-#if defined SHARED && IS_IN (libc) && !defined USE_AS_BZERO
+#if defined SHARED && IS_IN (libc)
ENTRY (__memset_chk_sse2_rep)
movl 12(%esp), %eax
cmpl %eax, 16(%esp)
ENTRANCE
movl LEN(%esp), %ecx
-#ifdef USE_AS_BZERO
- xor %eax, %eax
-#else
movzbl CHR(%esp), %eax
movb %al, %ah
/* Fill the whole EAX with pattern. */
movl %eax, %edx
shl $16, %eax
or %edx, %eax
-#endif
movl DEST(%esp), %edx
cmp $32, %ecx
jae L(32bytesormore)
/* ECX > 32 and EDX is 4 byte aligned. */
L(32bytesormore):
/* Fill xmm0 with the pattern. */
-#ifdef USE_AS_BZERO
- pxor %xmm0, %xmm0
-#else
movd %eax, %xmm0
pshufd $0, %xmm0, %xmm0
-#endif
testl $0xf, %edx
jz L(aligned_16)
/* ECX > 32 and EDX is not 16 byte aligned. */
#define PUSH(REG) pushl REG; CFI_PUSH (REG)
#define POP(REG) popl REG; CFI_POP (REG)
-#ifdef USE_AS_BZERO
-# define DEST PARMS
-# define LEN DEST+4
-# define SETRTNVAL
-#else
-# define DEST PARMS
-# define CHR DEST+4
-# define LEN CHR+4
-# define SETRTNVAL movl DEST(%esp), %eax
-#endif
+#define DEST PARMS
+#define CHR DEST+4
+#define LEN CHR+4
+#define SETRTNVAL movl DEST(%esp), %eax
#ifdef PIC
# define ENTRANCE PUSH (%ebx);
#endif
.section .text.sse2,"ax",@progbits
-#if defined SHARED && IS_IN (libc) && !defined USE_AS_BZERO
+#if defined SHARED && IS_IN (libc)
ENTRY (__memset_chk_sse2)
movl 12(%esp), %eax
cmpl %eax, 16(%esp)
ENTRANCE
movl LEN(%esp), %ecx
-#ifdef USE_AS_BZERO
- xor %eax, %eax
-#else
movzbl CHR(%esp), %eax
movb %al, %ah
/* Fill the whole EAX with pattern. */
movl %eax, %edx
shl $16, %eax
or %edx, %eax
-#endif
movl DEST(%esp), %edx
cmp $32, %ecx
jae L(32bytesormore)
/* ECX > 32 and EDX is 4 byte aligned. */
L(32bytesormore):
/* Fill xmm0 with the pattern. */
-#ifdef USE_AS_BZERO
- pxor %xmm0, %xmm0
-#else
movd %eax, %xmm0
pshufd $0, %xmm0, %xmm0
-#endif
testl $0xf, %edx
jz L(aligned_16)
/* ECX > 32 and EDX is not 16 byte aligned. */
#define POP(REG) popl REG; CFI_POP (REG)
#define STR1 8
-#ifdef USE_AS_BZERO
-#define N STR1+4
-#else
#define STR2 STR1+4
#define N STR2+4
-#endif
.text
-#if defined SHARED && IS_IN (libc) && !defined USE_AS_BZERO
+#if defined SHARED && IS_IN (libc)
ENTRY (__memset_chk)
movl 12(%esp), %eax
cmpl %eax, 16(%esp)
PUSH (%edi)
movl N(%esp), %ecx
movl STR1(%esp), %edi
-#ifdef USE_AS_BZERO
- xor %eax, %eax
-#else
movzbl STR2(%esp), %eax
mov %edi, %edx
-#endif
rep stosb
-#ifndef USE_AS_BZERO
mov %edx, %eax
-#endif
POP (%edi)
ret
END (memset)
-#ifndef USE_AS_BZERO
libc_hidden_builtin_def (memset)
-#endif