]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/m68k/memchr.S
b59335b611ce25989d77f47fbf0a706318e58d4f
[thirdparty/glibc.git] / sysdeps / m68k / memchr.S
1 /* memchr (str, ch, n) -- Return pointer to first occurrence of CH in the
2 first N bytes of STR.
3 For Motorola 68000.
4 Copyright (C) 1999-2019 Free Software Foundation, Inc.
5 This file is part of the GNU C Library.
6 Contributed by Andreas Schwab <schwab@gnu.org>.
7
8 The GNU C Library is free software; you can redistribute it and/or
9 modify it under the terms of the GNU Lesser General Public
10 License as published by the Free Software Foundation; either
11 version 2.1 of the License, or (at your option) any later version.
12
13 The GNU C Library is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 Lesser General Public License for more details.
17
18 You should have received a copy of the GNU Lesser General Public
19 License along with the GNU C Library. If not, see
20 <http://www.gnu.org/licenses/>. */
21
22 #include <sysdep.h>
23 #include "asm-syntax.h"
24
25 TEXT
26 ENTRY(__memchr)
27 /* Save the callee-saved registers we use. */
28 #ifdef __mcoldfire__
29 movel R(d2),MEM_PREDEC(sp)
30 cfi_adjust_cfa_offset (4)
31 movel R(d3),MEM_PREDEC(sp)
32 cfi_adjust_cfa_offset (4)
33 movel R(d4),MEM_PREDEC(sp)
34 cfi_adjust_cfa_offset (4)
35 cfi_rel_offset (R(d2), 8)
36 cfi_rel_offset (R(d3), 4)
37 cfi_rel_offset (R(d4), 0)
38 #else
39 moveml R(d2)-R(d4),MEM_PREDEC(sp)
40 cfi_adjust_cfa_offset (3*4)
41 cfi_rel_offset (R(d2), 0)
42 cfi_rel_offset (R(d3), 4)
43 cfi_rel_offset (R(d4), 8)
44 #endif
45
46 /* Get string pointer, character and length. */
47 movel MEM_DISP(sp,16),R(a0)
48 moveb MEM_DISP(sp,23),R(d0)
49 movel MEM_DISP(sp,24),R(d4)
50
51 /* Check if at least four bytes left to search. */
52 #ifdef __mcoldfire__
53 subql #4,R(d4)
54 bcs L(L6)
55 addql #4,R(d4)
56 #else
57 moveql #4,R(d1)
58 cmpl R(d1),R(d4)
59 bcs L(L6)
60 #endif
61
62 /* Distribute the character to all bytes of a longword. */
63 movel R(d0),R(d1)
64 lsll #8,R(d1)
65 moveb R(d0),R(d1)
66 movel R(d1),R(d0)
67 swap R(d0)
68 movew R(d1),R(d0)
69
70 /* First search for the character one byte at a time until the
71 pointer is aligned to a longword boundary. */
72 movel R(a0),R(d1)
73 #ifdef __mcoldfire__
74 andl #3,R(d1)
75 #else
76 andw #3,R(d1)
77 #endif
78 beq L(L1)
79 cmpb MEM(a0),R(d0)
80 beq L(L9)
81 addql #1,R(a0)
82 subql #1,R(d4)
83 beq L(L7)
84
85 #ifdef __mcoldfire__
86 subql #3,R(d1)
87 #else
88 subqw #3,R(d1)
89 #endif
90 beq L(L1)
91 cmpb MEM(a0),R(d0)
92 beq L(L9)
93 addql #1,R(a0)
94 subql #1,R(d4)
95 beq L(L7)
96
97 #ifdef __mcoldfire__
98 addql #1,R(d1)
99 #else
100 addqw #1,R(d1)
101 #endif
102 beq L(L1)
103 cmpb MEM(a0),R(d0)
104 beq L(L9)
105 addql #1,R(a0)
106 subql #1,R(d4)
107 beq L(L7)
108
109 L(L1:)
110 /* Load the magic bits. Unlike the generic implementation we can
111 use the carry bit as the fourth hole. */
112 movel #0xfefefeff,R(d3)
113
114 /* We exit the loop if adding MAGIC_BITS to LONGWORD fails to
115 change any of the hole bits of LONGWORD.
116
117 1) Is this safe? Will it catch all the zero bytes?
118 Suppose there is a byte with all zeros. Any carry bits
119 propagating from its left will fall into the hole at its
120 least significant bit and stop. Since there will be no
121 carry from its most significant bit, the LSB of the
122 byte to the left will be unchanged, and the zero will be
123 detected.
124
125 2) Is this worthwhile? Will it ignore everything except
126 zero bytes? Suppose every byte of LONGWORD has a bit set
127 somewhere. There will be a carry into bit 8. If bit 8
128 is set, this will carry into bit 16. If bit 8 is clear,
129 one of bits 9-15 must be set, so there will be a carry
130 into bit 16. Similarly, there will be a carry into bit
131 24. If one of bits 24-31 is set, there will be a carry
132 into bit 32 (=carry flag), so all of the hole bits will
133 be changed.
134
135 3) But wait! Aren't we looking for C, not zero?
136 Good point. So what we do is XOR LONGWORD with a longword,
137 each of whose bytes is C. This turns each byte that is C
138 into a zero. */
139
140 /* Still at least 4 bytes to search? */
141 subql #4,R(d4)
142 bcs L(L6)
143
144 L(L2:)
145 /* Get the longword in question. */
146 movel MEM_POSTINC(a0),R(d1)
147 /* XOR with the byte we search for. */
148 eorl R(d0),R(d1)
149
150 /* Add the magic value. We get carry bits reported for each byte
151 which is not C. */
152 movel R(d3),R(d2)
153 addl R(d1),R(d2)
154
155 /* Check the fourth carry bit before it is clobbered by the next
156 XOR. If it is not set we have a hit. */
157 bcc L(L8)
158
159 /* We are only interested in carry bits that change due to the
160 previous add, so remove original bits. */
161 eorl R(d1),R(d2)
162
163 /* Now test for the other three overflow bits.
164 Set all non-carry bits. */
165 orl R(d3),R(d2)
166 /* Add 1 to get zero if all carry bits were set. */
167 addql #1,R(d2)
168
169 /* If we don't get zero then at least one byte of the word equals
170 C. */
171 bne L(L8)
172
173 /* Still at least 4 bytes to search? */
174 subql #4,R(d4)
175 bcs L(L6)
176
177 /* Get the longword in question. */
178 movel MEM_POSTINC(a0),R(d1)
179 /* XOR with the byte we search for. */
180 eorl R(d0),R(d1)
181
182 /* Add the magic value. We get carry bits reported for each byte
183 which is not C. */
184 movel R(d3),R(d2)
185 addl R(d1),R(d2)
186
187 /* Check the fourth carry bit before it is clobbered by the next
188 XOR. If it is not set we have a hit. */
189 bcc L(L8)
190
191 /* We are only interested in carry bits that change due to the
192 previous add, so remove original bits */
193 eorl R(d1),R(d2)
194
195 /* Now test for the other three overflow bits.
196 Set all non-carry bits. */
197 orl R(d3),R(d2)
198 /* Add 1 to get zero if all carry bits were set. */
199 addql #1,R(d2)
200
201 /* If we don't get zero then at least one byte of the word equals
202 C. */
203 bne L(L8)
204
205 /* Still at least 4 bytes to search? */
206 subql #4,R(d4)
207 bcc L(L2)
208
209 L(L6:)
210 /* Search one byte at a time in the remaining less than 4 bytes. */
211 #ifdef __mcoldfire__
212 addql #4,R(d4)
213 #else
214 andw #3,R(d4)
215 #endif
216 beq L(L7)
217 cmpb MEM(a0),R(d0)
218 beq L(L9)
219 addql #1,R(a0)
220
221 #ifdef __mcoldfire__
222 subql #1,R(d4)
223 #else
224 subqw #1,R(d4)
225 #endif
226 beq L(L7)
227 cmpb MEM(a0),R(d0)
228 beq L(L9)
229 addql #1,R(a0)
230
231 #ifdef __mcoldfire__
232 subql #1,R(d4)
233 #else
234 subqw #1,R(d4)
235 #endif
236 beq L(L7)
237 cmpb MEM(a0),R(d0)
238 beq L(L9)
239
240 L(L7:)
241 /* Return NULL. */
242 clrl R(d0)
243 movel R(d0),R(a0)
244 #ifdef __mcoldfire__
245 movel MEM_POSTINC(sp),R(d4)
246 cfi_remember_state
247 cfi_adjust_cfa_offset (-4)
248 cfi_restore (R(d4))
249 movel MEM_POSTINC(sp),R(d3)
250 cfi_adjust_cfa_offset (-4)
251 cfi_restore (R(d3))
252 movel MEM_POSTINC(sp),R(d2)
253 cfi_adjust_cfa_offset (-4)
254 cfi_restore (R(d2))
255 #else
256 moveml MEM_POSTINC(sp),R(d2)-R(d4)
257 cfi_remember_state
258 cfi_adjust_cfa_offset (-3*4)
259 cfi_restore (R(d2))
260 cfi_restore (R(d3))
261 cfi_restore (R(d4))
262 #endif
263 rts
264
265 cfi_restore_state
266 L(L8:)
267 /* We have a hit. Check to see which byte it was. First
268 compensate for the autoincrement in the loop. */
269 subql #4,R(a0)
270
271 cmpb MEM(a0),R(d0)
272 beq L(L9)
273 addql #1,R(a0)
274
275 cmpb MEM(a0),R(d0)
276 beq L(L9)
277 addql #1,R(a0)
278
279 cmpb MEM(a0),R(d0)
280 beq L(L9)
281 addql #1,R(a0)
282
283 /* Otherwise the fourth byte must equal C. */
284 L(L9:)
285 movel R(a0),R(d0)
286 #ifdef __mcoldfire__
287 movel MEM_POSTINC(sp),R(d4)
288 cfi_adjust_cfa_offset (-4)
289 cfi_restore (R(d4))
290 movel MEM_POSTINC(sp),R(d3)
291 cfi_adjust_cfa_offset (-4)
292 cfi_restore (R(d3))
293 movel MEM_POSTINC(sp),R(d2)
294 cfi_adjust_cfa_offset (-4)
295 cfi_restore (R(d2))
296 #else
297 moveml MEM_POSTINC(sp),R(d2)-R(d4)
298 cfi_adjust_cfa_offset (-3*4)
299 cfi_restore (R(d2))
300 cfi_restore (R(d3))
301 cfi_restore (R(d4))
302 #endif
303 rts
304 END(__memchr)
305
306 weak_alias (__memchr, memchr)
307 libc_hidden_builtin_def (memchr)