]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/i386/i586/memset.S
50b498b6ab820dd90a9c7432bdd66306df9fcc56
[thirdparty/glibc.git] / sysdeps / i386 / i586 / memset.S
1 /* memset/bzero -- set memory area to CH/0
2 Highly optimized version for ix86, x>=5.
3 Copyright (C) 1996-2014 Free Software Foundation, Inc.
4 This file is part of the GNU C Library.
5 Contributed by Torbjorn Granlund, <tege@matematik.su.se>
6
7 The GNU C Library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2.1 of the License, or (at your option) any later version.
11
12 The GNU C Library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with the GNU C Library; if not, see
19 <http://www.gnu.org/licenses/>. */
20
21 #include <sysdep.h>
22 #include "asm-syntax.h"
23
24 /* BEWARE: `#ifdef memset' means that memset is redefined as `bzero' */
25 #define BZERO_P (defined memset)
26
27 #define PARMS 4+4 /* space for 1 saved reg */
28 #define RTN PARMS
29 #define DEST RTN
30 #if BZERO_P
31 # define LEN DEST+4
32 #else
33 # define CHR DEST+4
34 # define LEN CHR+4
35 #endif
36
37 .text
38 #if defined PIC && IS_IN (libc) && !BZERO_P
39 ENTRY (__memset_chk)
40 movl 12(%esp), %eax
41 cmpl %eax, 16(%esp)
42 jb HIDDEN_JUMPTARGET (__chk_fail)
43 END (__memset_chk)
44 #endif
45 ENTRY (memset)
46
47 pushl %edi
48 cfi_adjust_cfa_offset (4)
49
50 movl DEST(%esp), %edi
51 cfi_rel_offset (edi, 0)
52 movl LEN(%esp), %edx
53 #if BZERO_P
54 xorl %eax, %eax /* we fill with 0 */
55 #else
56 movb CHR(%esp), %al
57 movb %al, %ah
58 movl %eax, %ecx
59 shll $16, %eax
60 movw %cx, %ax
61 #endif
62 cld
63
64 /* If less than 36 bytes to write, skip tricky code (it wouldn't work). */
65 cmpl $36, %edx
66 movl %edx, %ecx /* needed when branch is taken! */
67 jl L(2)
68
69 /* First write 0-3 bytes to make the pointer 32-bit aligned. */
70 movl %edi, %ecx /* Copy ptr to ecx... */
71 negl %ecx /* ...and negate that and... */
72 andl $3, %ecx /* ...mask to get byte count. */
73 subl %ecx, %edx /* adjust global byte count */
74 rep
75 stosb
76
77 subl $32, %edx /* offset count for unrolled loop */
78 movl (%edi), %ecx /* Fetch destination cache line */
79
80 .align 2, 0x90 /* supply 0x90 for broken assemblers */
81 L(1): movl 28(%edi), %ecx /* allocate cache line for destination */
82 subl $32, %edx /* decr loop count */
83 movl %eax, 0(%edi) /* store words pairwise */
84 movl %eax, 4(%edi)
85 movl %eax, 8(%edi)
86 movl %eax, 12(%edi)
87 movl %eax, 16(%edi)
88 movl %eax, 20(%edi)
89 movl %eax, 24(%edi)
90 movl %eax, 28(%edi)
91 leal 32(%edi), %edi /* update destination pointer */
92 jge L(1)
93
94 leal 32(%edx), %ecx /* reset offset count */
95
96 /* Write last 0-7 full 32-bit words (up to 8 words if loop was skipped). */
97 L(2): shrl $2, %ecx /* convert byte count to longword count */
98 rep
99 stosl
100
101 /* Finally write the last 0-3 bytes. */
102 movl %edx, %ecx
103 andl $3, %ecx
104 rep
105 stosb
106
107 #if !BZERO_P
108 /* Load result (only if used as memset). */
109 movl DEST(%esp), %eax /* start address of destination is result */
110 #endif
111 popl %edi
112 cfi_adjust_cfa_offset (-4)
113 cfi_restore (edi)
114
115 #if BZERO_P
116 ret
117 #else
118 ret
119 #endif
120 END (memset)
121 libc_hidden_builtin_def (memset)