]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - arch/mips/lib/memset.S
9f022d6e648f2e8ad60317512b9e3bc6a7c93271
[thirdparty/kernel/stable.git] / arch / mips / lib / memset.S
1 /*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
10 */
11 #include <asm/asm.h>
12 #include <asm/asm-offsets.h>
13 #include <asm/regdef.h>
14
15 #if LONGSIZE == 4
16 #define LONG_S_L swl
17 #define LONG_S_R swr
18 #else
19 #define LONG_S_L sdl
20 #define LONG_S_R sdr
21 #endif
22
23 #ifdef CONFIG_CPU_MICROMIPS
24 #define STORSIZE (LONGSIZE * 2)
25 #define STORMASK (STORSIZE - 1)
26 #define FILL64RG t8
27 #define FILLPTRG t7
28 #undef LONG_S
29 #define LONG_S LONG_SP
30 #else
31 #define STORSIZE LONGSIZE
32 #define STORMASK LONGMASK
33 #define FILL64RG a1
34 #define FILLPTRG t0
35 #endif
36
37 #define LEGACY_MODE 1
38 #define EVA_MODE 2
39
40 /*
41 * No need to protect it with EVA #ifdefery. The generated block of code
42 * will never be assembled if EVA is not enabled.
43 */
44 #define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
45 #define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
46
47 #define EX(insn,reg,addr,handler) \
48 .if \mode == LEGACY_MODE; \
49 9: insn reg, addr; \
50 .else; \
51 9: ___BUILD_EVA_INSN(insn, reg, addr); \
52 .endif; \
53 .section __ex_table,"a"; \
54 PTR 9b, handler; \
55 .previous
56
57 .macro f_fill64 dst, offset, val, fixup, mode
58 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
59 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
62 #if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
63 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
64 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
67 #endif
68 #if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
69 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
70 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
77 #endif
78 .endm
79
80 .set noreorder
81 .align 5
82
83 /*
84 * Macro to generate the __bzero{,_user} symbol
85 * Arguments:
86 * mode: LEGACY_MODE or EVA_MODE
87 */
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
90 .ifnotdef __memset
91 .set __memset, 1
92 .hidden __memset /* Make sure it does not leak */
93 .endif
94
95 sltiu t0, a2, STORSIZE /* very small region? */
96 bnez t0, .Lsmall_memset\@
97 andi t0, a0, STORMASK /* aligned? */
98
99 #ifdef CONFIG_CPU_MICROMIPS
100 move t8, a1 /* used by 'swp' instruction */
101 move t9, a1
102 #endif
103 #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
104 beqz t0, 1f
105 PTR_SUBU t0, STORSIZE /* alignment in bytes */
106 #else
107 .set noat
108 li AT, STORSIZE
109 beqz t0, 1f
110 PTR_SUBU t0, AT /* alignment in bytes */
111 .set at
112 #endif
113
114 #ifndef CONFIG_CPU_MIPSR6
115 R10KCBARRIER(0(ra))
116 #ifdef __MIPSEB__
117 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
118 #else
119 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
120 #endif
121 PTR_SUBU a0, t0 /* long align ptr */
122 PTR_ADDU a2, t0 /* correct size */
123
124 #else /* CONFIG_CPU_MIPSR6 */
125 #define STORE_BYTE(N) \
126 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
127 beqz t0, 0f; \
128 PTR_ADDU t0, 1;
129
130 PTR_ADDU a2, t0 /* correct size */
131 PTR_ADDU t0, 1
132 STORE_BYTE(0)
133 STORE_BYTE(1)
134 #if LONGSIZE == 4
135 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
136 #else
137 STORE_BYTE(2)
138 STORE_BYTE(3)
139 STORE_BYTE(4)
140 STORE_BYTE(5)
141 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
142 #endif
143 0:
144 ori a0, STORMASK
145 xori a0, STORMASK
146 PTR_ADDIU a0, STORSIZE
147 #endif /* CONFIG_CPU_MIPSR6 */
148 1: ori t1, a2, 0x3f /* # of full blocks */
149 xori t1, 0x3f
150 beqz t1, .Lmemset_partial\@ /* no block to fill */
151 andi t0, a2, 0x40-STORSIZE
152
153 PTR_ADDU t1, a0 /* end address */
154 .set reorder
155 1: PTR_ADDIU a0, 64
156 R10KCBARRIER(0(ra))
157 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
158 bne t1, a0, 1b
159 .set noreorder
160
161 .Lmemset_partial\@:
162 R10KCBARRIER(0(ra))
163 PTR_LA t1, 2f /* where to start */
164 #ifdef CONFIG_CPU_MICROMIPS
165 LONG_SRL t7, t0, 1
166 #endif
167 #if LONGSIZE == 4
168 PTR_SUBU t1, FILLPTRG
169 #else
170 .set noat
171 LONG_SRL AT, FILLPTRG, 1
172 PTR_SUBU t1, AT
173 .set at
174 #endif
175 jr t1
176 PTR_ADDU a0, t0 /* dest ptr */
177
178 .set push
179 .set noreorder
180 .set nomacro
181 /* ... but first do longs ... */
182 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
183 2: .set pop
184 andi a2, STORMASK /* At most one long to go */
185
186 beqz a2, 1f
187 #ifndef CONFIG_CPU_MIPSR6
188 PTR_ADDU a0, a2 /* What's left */
189 R10KCBARRIER(0(ra))
190 #ifdef __MIPSEB__
191 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
192 #else
193 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
194 #endif
195 #else
196 PTR_SUBU t0, $0, a2
197 PTR_ADDIU t0, 1
198 STORE_BYTE(0)
199 STORE_BYTE(1)
200 #if LONGSIZE == 4
201 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
202 #else
203 STORE_BYTE(2)
204 STORE_BYTE(3)
205 STORE_BYTE(4)
206 STORE_BYTE(5)
207 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
208 #endif
209 0:
210 #endif
211 1: jr ra
212 move a2, zero
213
214 .Lsmall_memset\@:
215 beqz a2, 2f
216 PTR_ADDU t1, a0, a2
217
218 1: PTR_ADDIU a0, 1 /* fill bytewise */
219 R10KCBARRIER(0(ra))
220 bne t1, a0, 1b
221 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
222
223 2: jr ra /* done */
224 move a2, zero
225 .if __memset == 1
226 END(memset)
227 .set __memset, 0
228 .hidden __memset
229 .endif
230
231 .Lbyte_fixup\@:
232 PTR_SUBU a2, $0, t0
233 jr ra
234 PTR_ADDIU a2, 1
235
236 .Lfirst_fixup\@:
237 jr ra
238 nop
239
240 .Lfwd_fixup\@:
241 PTR_L t0, TI_TASK($28)
242 andi a2, 0x3f
243 LONG_L t0, THREAD_BUADDR(t0)
244 LONG_ADDU a2, t1
245 jr ra
246 LONG_SUBU a2, t0
247
248 .Lpartial_fixup\@:
249 PTR_L t0, TI_TASK($28)
250 andi a2, STORMASK
251 LONG_L t0, THREAD_BUADDR(t0)
252 LONG_ADDU a2, t1
253 jr ra
254 LONG_SUBU a2, t0
255
256 .Llast_fixup\@:
257 jr ra
258 andi v1, a2, STORMASK
259
260 .Lsmall_fixup\@:
261 PTR_SUBU a2, t1, a0
262 jr ra
263 PTR_ADDIU a2, 1
264
265 .endm
266
267 /*
268 * memset(void *s, int c, size_t n)
269 *
270 * a0: start of area to clear
271 * a1: char to fill with
272 * a2: size of area to clear
273 */
274
275 LEAF(memset)
276 beqz a1, 1f
277 move v0, a0 /* result */
278
279 andi a1, 0xff /* spread fillword */
280 LONG_SLL t1, a1, 8
281 or a1, t1
282 LONG_SLL t1, a1, 16
283 #if LONGSIZE == 8
284 or a1, t1
285 LONG_SLL t1, a1, 32
286 #endif
287 or a1, t1
288 1:
289 #ifndef CONFIG_EVA
290 FEXPORT(__bzero)
291 #endif
292 __BUILD_BZERO LEGACY_MODE
293
294 #ifdef CONFIG_EVA
295 LEAF(__bzero)
296 __BUILD_BZERO EVA_MODE
297 END(__bzero)
298 #endif