]>
Commit | Line | Data |
---|---|---|
d2912cb1 | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
1da177e4 | 2 | /* |
4baa9922 | 3 | * arch/arm/include/asm/assembler.h |
1da177e4 LT |
4 | * |
5 | * Copyright (C) 1996-2000 Russell King | |
6 | * | |
1da177e4 LT |
7 | * This file contains arm architecture specific defines |
8 | * for the different processors. | |
9 | * | |
10 | * Do not include any C declarations in this file - it is included by | |
11 | * assembler source. | |
12 | */ | |
2bc58a6f MD |
13 | #ifndef __ASM_ASSEMBLER_H__ |
14 | #define __ASM_ASSEMBLER_H__ | |
15 | ||
1da177e4 LT |
16 | #ifndef __ASSEMBLY__ |
17 | #error "Only include this from assembly code" | |
18 | #endif | |
19 | ||
20 | #include <asm/ptrace.h> | |
80c59daf | 21 | #include <asm/opcodes-virt.h> |
0b1f68e8 | 22 | #include <asm/asm-offsets.h> |
9a2b51b6 AR |
23 | #include <asm/page.h> |
24 | #include <asm/thread_info.h> | |
747ffc2f | 25 | #include <asm/uaccess-asm.h> |
1da177e4 | 26 | |
6f6f6a70 RH |
27 | #define IOMEM(x) (x) |
28 | ||
1da177e4 LT |
29 | /* |
30 | * Endian independent macros for shifting bytes within registers. | |
31 | */ | |
32 | #ifndef __ARMEB__ | |
d98b90ea VK |
33 | #define lspull lsr |
34 | #define lspush lsl | |
1da177e4 LT |
35 | #define get_byte_0 lsl #0 |
36 | #define get_byte_1 lsr #8 | |
37 | #define get_byte_2 lsr #16 | |
38 | #define get_byte_3 lsr #24 | |
39 | #define put_byte_0 lsl #0 | |
40 | #define put_byte_1 lsl #8 | |
41 | #define put_byte_2 lsl #16 | |
42 | #define put_byte_3 lsl #24 | |
43 | #else | |
d98b90ea VK |
44 | #define lspull lsl |
45 | #define lspush lsr | |
1da177e4 LT |
46 | #define get_byte_0 lsr #24 |
47 | #define get_byte_1 lsr #16 | |
48 | #define get_byte_2 lsr #8 | |
49 | #define get_byte_3 lsl #0 | |
50 | #define put_byte_0 lsl #24 | |
51 | #define put_byte_1 lsl #16 | |
52 | #define put_byte_2 lsl #8 | |
53 | #define put_byte_3 lsl #0 | |
54 | #endif | |
55 | ||
457c2403 BD |
56 | /* Select code for any configuration running in BE8 mode */ |
57 | #ifdef CONFIG_CPU_ENDIAN_BE8 | |
58 | #define ARM_BE8(code...) code | |
59 | #else | |
60 | #define ARM_BE8(code...) | |
61 | #endif | |
62 | ||
1da177e4 LT |
63 | /* |
64 | * Data preload for architectures that support it | |
65 | */ | |
66 | #if __LINUX_ARM_ARCH__ >= 5 | |
67 | #define PLD(code...) code | |
68 | #else | |
69 | #define PLD(code...) | |
70 | #endif | |
71 | ||
2239aff6 NP |
72 | /* |
73 | * This can be used to enable code to cacheline align the destination | |
74 | * pointer when bulk writing to memory. Experiments on StrongARM and | |
75 | * XScale didn't show this a worthwhile thing to do when the cache is not | |
76 | * set to write-allocate (this would need further testing on XScale when WA | |
77 | * is used). | |
78 | * | |
79 | * On Feroceon there is much to gain however, regardless of cache mode. | |
80 | */ | |
81 | #ifdef CONFIG_CPU_FEROCEON | |
82 | #define CALGN(code...) code | |
83 | #else | |
84 | #define CALGN(code...) | |
85 | #endif | |
86 | ||
ffa47aa6 AB |
87 | #define IMM12_MASK 0xfff |
88 | ||
1da177e4 | 89 | /* |
9c42954d | 90 | * Enable and disable interrupts |
1da177e4 | 91 | */ |
59d1ff3b | 92 | #if __LINUX_ARM_ARCH__ >= 6 |
0d928b0b | 93 | .macro disable_irq_notrace |
59d1ff3b | 94 | cpsid i |
9c42954d RK |
95 | .endm |
96 | ||
0d928b0b | 97 | .macro enable_irq_notrace |
9c42954d RK |
98 | cpsie i |
99 | .endm | |
59d1ff3b | 100 | #else |
0d928b0b | 101 | .macro disable_irq_notrace |
9c42954d RK |
102 | msr cpsr_c, #PSR_I_BIT | SVC_MODE |
103 | .endm | |
104 | ||
0d928b0b | 105 | .macro enable_irq_notrace |
9c42954d RK |
106 | msr cpsr_c, #SVC_MODE |
107 | .endm | |
59d1ff3b | 108 | #endif |
9c42954d | 109 | |
3302cadd | 110 | .macro asm_trace_hardirqs_off, save=1 |
0d928b0b | 111 | #if defined(CONFIG_TRACE_IRQFLAGS) |
3302cadd | 112 | .if \save |
0d928b0b | 113 | stmdb sp!, {r0-r3, ip, lr} |
3302cadd | 114 | .endif |
0d928b0b | 115 | bl trace_hardirqs_off |
3302cadd | 116 | .if \save |
0d928b0b | 117 | ldmia sp!, {r0-r3, ip, lr} |
3302cadd | 118 | .endif |
0d928b0b UKK |
119 | #endif |
120 | .endm | |
121 | ||
3302cadd | 122 | .macro asm_trace_hardirqs_on, cond=al, save=1 |
0d928b0b UKK |
123 | #if defined(CONFIG_TRACE_IRQFLAGS) |
124 | /* | |
125 | * actually the registers should be pushed and pop'd conditionally, but | |
126 | * after bl the flags are certainly clobbered | |
127 | */ | |
3302cadd | 128 | .if \save |
0d928b0b | 129 | stmdb sp!, {r0-r3, ip, lr} |
3302cadd | 130 | .endif |
0d928b0b | 131 | bl\cond trace_hardirqs_on |
3302cadd | 132 | .if \save |
0d928b0b | 133 | ldmia sp!, {r0-r3, ip, lr} |
3302cadd | 134 | .endif |
0d928b0b UKK |
135 | #endif |
136 | .endm | |
137 | ||
3302cadd | 138 | .macro disable_irq, save=1 |
0d928b0b | 139 | disable_irq_notrace |
3302cadd | 140 | asm_trace_hardirqs_off \save |
0d928b0b UKK |
141 | .endm |
142 | ||
143 | .macro enable_irq | |
144 | asm_trace_hardirqs_on | |
145 | enable_irq_notrace | |
146 | .endm | |
9c42954d RK |
147 | /* |
148 | * Save the current IRQ state and disable IRQs. Note that this macro | |
149 | * assumes FIQs are enabled, and that the processor is in SVC mode. | |
150 | */ | |
151 | .macro save_and_disable_irqs, oldcpsr | |
55bdd694 CM |
152 | #ifdef CONFIG_CPU_V7M |
153 | mrs \oldcpsr, primask | |
154 | #else | |
9c42954d | 155 | mrs \oldcpsr, cpsr |
55bdd694 | 156 | #endif |
9c42954d | 157 | disable_irq |
1da177e4 LT |
158 | .endm |
159 | ||
8e43a905 | 160 | .macro save_and_disable_irqs_notrace, oldcpsr |
b2bf482a VM |
161 | #ifdef CONFIG_CPU_V7M |
162 | mrs \oldcpsr, primask | |
163 | #else | |
8e43a905 | 164 | mrs \oldcpsr, cpsr |
b2bf482a | 165 | #endif |
8e43a905 RV |
166 | disable_irq_notrace |
167 | .endm | |
168 | ||
1da177e4 LT |
169 | /* |
170 | * Restore interrupt state previously stored in a register. We don't | |
171 | * guarantee that this will preserve the flags. | |
172 | */ | |
0d928b0b | 173 | .macro restore_irqs_notrace, oldcpsr |
55bdd694 CM |
174 | #ifdef CONFIG_CPU_V7M |
175 | msr primask, \oldcpsr | |
176 | #else | |
1da177e4 | 177 | msr cpsr_c, \oldcpsr |
55bdd694 | 178 | #endif |
1da177e4 LT |
179 | .endm |
180 | ||
0d928b0b UKK |
181 | .macro restore_irqs, oldcpsr |
182 | tst \oldcpsr, #PSR_I_BIT | |
01e09a28 | 183 | asm_trace_hardirqs_on cond=eq |
0d928b0b UKK |
184 | restore_irqs_notrace \oldcpsr |
185 | .endm | |
186 | ||
14327c66 RK |
187 | /* |
188 | * Assembly version of "adr rd, BSYM(sym)". This should only be used to | |
189 | * reference local symbols in the same assembly file which are to be | |
190 | * resolved by the assembler. Other usage is undefined. | |
191 | */ | |
192 | .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo | |
193 | .macro badr\c, rd, sym | |
194 | #ifdef CONFIG_THUMB2_KERNEL | |
195 | adr\c \rd, \sym + 1 | |
196 | #else | |
197 | adr\c \rd, \sym | |
198 | #endif | |
199 | .endm | |
200 | .endr | |
201 | ||
39ad04cc CM |
202 | /* |
203 | * Get current thread_info. | |
204 | */ | |
205 | .macro get_thread_info, rd | |
9a2b51b6 | 206 | ARM( mov \rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT ) |
39ad04cc | 207 | THUMB( mov \rd, sp ) |
9a2b51b6 AR |
208 | THUMB( lsr \rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT ) |
209 | mov \rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT | |
39ad04cc CM |
210 | .endm |
211 | ||
0b1f68e8 CM |
212 | /* |
213 | * Increment/decrement the preempt count. | |
214 | */ | |
215 | #ifdef CONFIG_PREEMPT_COUNT | |
216 | .macro inc_preempt_count, ti, tmp | |
217 | ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count | |
218 | add \tmp, \tmp, #1 @ increment it | |
219 | str \tmp, [\ti, #TI_PREEMPT] | |
220 | .endm | |
221 | ||
222 | .macro dec_preempt_count, ti, tmp | |
223 | ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count | |
224 | sub \tmp, \tmp, #1 @ decrement it | |
225 | str \tmp, [\ti, #TI_PREEMPT] | |
226 | .endm | |
227 | ||
228 | .macro dec_preempt_count_ti, ti, tmp | |
229 | get_thread_info \ti | |
230 | dec_preempt_count \ti, \tmp | |
231 | .endm | |
232 | #else | |
233 | .macro inc_preempt_count, ti, tmp | |
234 | .endm | |
235 | ||
236 | .macro dec_preempt_count, ti, tmp | |
237 | .endm | |
238 | ||
239 | .macro dec_preempt_count_ti, ti, tmp | |
240 | .endm | |
241 | #endif | |
242 | ||
f441882a | 243 | #define USERL(l, x...) \ |
1da177e4 | 244 | 9999: x; \ |
4260415f | 245 | .pushsection __ex_table,"a"; \ |
1da177e4 | 246 | .align 3; \ |
f441882a | 247 | .long 9999b,l; \ |
4260415f | 248 | .popsection |
bac4e960 | 249 | |
f441882a VW |
250 | #define USER(x...) USERL(9001f, x) |
251 | ||
f00ec48f RK |
252 | #ifdef CONFIG_SMP |
253 | #define ALT_SMP(instr...) \ | |
254 | 9998: instr | |
ed3768a8 DM |
255 | /* |
256 | * Note: if you get assembler errors from ALT_UP() when building with | |
257 | * CONFIG_THUMB2_KERNEL, you almost certainly need to use | |
258 | * ALT_SMP( W(instr) ... ) | |
259 | */ | |
f00ec48f RK |
260 | #define ALT_UP(instr...) \ |
261 | .pushsection ".alt.smp.init", "a" ;\ | |
262 | .long 9998b ;\ | |
ed3768a8 | 263 | 9997: instr ;\ |
89c6bc58 RK |
264 | .if . - 9997b == 2 ;\ |
265 | nop ;\ | |
266 | .endif ;\ | |
ed3768a8 DM |
267 | .if . - 9997b != 4 ;\ |
268 | .error "ALT_UP() content must assemble to exactly 4 bytes";\ | |
269 | .endif ;\ | |
f00ec48f RK |
270 | .popsection |
271 | #define ALT_UP_B(label) \ | |
272 | .equ up_b_offset, label - 9998b ;\ | |
273 | .pushsection ".alt.smp.init", "a" ;\ | |
274 | .long 9998b ;\ | |
ed3768a8 | 275 | W(b) . + up_b_offset ;\ |
f00ec48f RK |
276 | .popsection |
277 | #else | |
278 | #define ALT_SMP(instr...) | |
279 | #define ALT_UP(instr...) instr | |
280 | #define ALT_UP_B(label) b label | |
281 | #endif | |
282 | ||
d675d0bc WD |
283 | /* |
284 | * Instruction barrier | |
285 | */ | |
286 | .macro instr_sync | |
287 | #if __LINUX_ARM_ARCH__ >= 7 | |
288 | isb | |
289 | #elif __LINUX_ARM_ARCH__ == 6 | |
290 | mcr p15, 0, r0, c7, c5, 4 | |
291 | #endif | |
292 | .endm | |
293 | ||
bac4e960 RK |
294 | /* |
295 | * SMP data memory barrier | |
296 | */ | |
ed3768a8 | 297 | .macro smp_dmb mode |
bac4e960 RK |
298 | #ifdef CONFIG_SMP |
299 | #if __LINUX_ARM_ARCH__ >= 7 | |
ed3768a8 | 300 | .ifeqs "\mode","arm" |
3ea12806 | 301 | ALT_SMP(dmb ish) |
ed3768a8 | 302 | .else |
3ea12806 | 303 | ALT_SMP(W(dmb) ish) |
ed3768a8 | 304 | .endif |
bac4e960 | 305 | #elif __LINUX_ARM_ARCH__ == 6 |
f00ec48f RK |
306 | ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb |
307 | #else | |
308 | #error Incompatible SMP platform | |
bac4e960 | 309 | #endif |
ed3768a8 | 310 | .ifeqs "\mode","arm" |
f00ec48f | 311 | ALT_UP(nop) |
ed3768a8 DM |
312 | .else |
313 | ALT_UP(W(nop)) | |
314 | .endif | |
bac4e960 RK |
315 | #endif |
316 | .endm | |
b86040a5 | 317 | |
55bdd694 CM |
318 | #if defined(CONFIG_CPU_V7M) |
319 | /* | |
320 | * setmode is used to assert to be in svc mode during boot. For v7-M | |
321 | * this is done in __v7m_setup, so setmode can be empty here. | |
322 | */ | |
323 | .macro setmode, mode, reg | |
324 | .endm | |
325 | #elif defined(CONFIG_THUMB2_KERNEL) | |
b86040a5 CM |
326 | .macro setmode, mode, reg |
327 | mov \reg, #\mode | |
328 | msr cpsr_c, \reg | |
329 | .endm | |
330 | #else | |
331 | .macro setmode, mode, reg | |
332 | msr cpsr_c, #\mode | |
333 | .endm | |
334 | #endif | |
8b592783 | 335 | |
80c59daf DM |
336 | /* |
337 | * Helper macro to enter SVC mode cleanly and mask interrupts. reg is | |
338 | * a scratch register for the macro to overwrite. | |
339 | * | |
340 | * This macro is intended for forcing the CPU into SVC mode at boot time. | |
341 | * you cannot return to the original mode. | |
80c59daf DM |
342 | */ |
343 | .macro safe_svcmode_maskall reg:req | |
0e0779da | 344 | #if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M) |
80c59daf | 345 | mrs \reg , cpsr |
8e9c24a2 RK |
346 | eor \reg, \reg, #HYP_MODE |
347 | tst \reg, #MODE_MASK | |
80c59daf | 348 | bic \reg , \reg , #MODE_MASK |
8e9c24a2 | 349 | orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE |
80c59daf | 350 | THUMB( orr \reg , \reg , #PSR_T_BIT ) |
80c59daf | 351 | bne 1f |
2a552d5e | 352 | orr \reg, \reg, #PSR_A_BIT |
14327c66 | 353 | badr lr, 2f |
2a552d5e | 354 | msr spsr_cxsf, \reg |
80c59daf DM |
355 | __MSR_ELR_HYP(14) |
356 | __ERET | |
2a552d5e | 357 | 1: msr cpsr_c, \reg |
80c59daf | 358 | 2: |
1ecec696 DM |
359 | #else |
360 | /* | |
361 | * workaround for possibly broken pre-v6 hardware | |
362 | * (akita, Sharp Zaurus C-1000, PXA270-based) | |
363 | */ | |
364 | setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg | |
365 | #endif | |
80c59daf DM |
366 | .endm |
367 | ||
8b592783 CM |
368 | /* |
369 | * STRT/LDRT access macros with ARM and Thumb-2 variants | |
370 | */ | |
371 | #ifdef CONFIG_THUMB2_KERNEL | |
372 | ||
4e7682d0 | 373 | .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() |
8b592783 CM |
374 | 9999: |
375 | .if \inc == 1 | |
c001899a | 376 | \instr\()b\t\cond\().w \reg, [\ptr, #\off] |
8b592783 | 377 | .elseif \inc == 4 |
c001899a | 378 | \instr\t\cond\().w \reg, [\ptr, #\off] |
8b592783 CM |
379 | .else |
380 | .error "Unsupported inc macro argument" | |
381 | .endif | |
382 | ||
4260415f | 383 | .pushsection __ex_table,"a" |
8b592783 CM |
384 | .align 3 |
385 | .long 9999b, \abort | |
4260415f | 386 | .popsection |
8b592783 CM |
387 | .endm |
388 | ||
389 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
390 | @ explicit IT instruction needed because of the label | |
391 | @ introduced by the USER macro | |
392 | .ifnc \cond,al | |
393 | .if \rept == 1 | |
394 | itt \cond | |
395 | .elseif \rept == 2 | |
396 | ittt \cond | |
397 | .else | |
398 | .error "Unsupported rept macro argument" | |
399 | .endif | |
400 | .endif | |
401 | ||
402 | @ Slightly optimised to avoid incrementing the pointer twice | |
403 | usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | |
404 | .if \rept == 2 | |
1142b71d | 405 | usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort |
8b592783 CM |
406 | .endif |
407 | ||
408 | add\cond \ptr, #\rept * \inc | |
409 | .endm | |
410 | ||
411 | #else /* !CONFIG_THUMB2_KERNEL */ | |
412 | ||
4e7682d0 | 413 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() |
8b592783 CM |
414 | .rept \rept |
415 | 9999: | |
416 | .if \inc == 1 | |
c001899a | 417 | \instr\()b\t\cond \reg, [\ptr], #\inc |
8b592783 | 418 | .elseif \inc == 4 |
c001899a | 419 | \instr\t\cond \reg, [\ptr], #\inc |
8b592783 CM |
420 | .else |
421 | .error "Unsupported inc macro argument" | |
422 | .endif | |
423 | ||
4260415f | 424 | .pushsection __ex_table,"a" |
8b592783 CM |
425 | .align 3 |
426 | .long 9999b, \abort | |
4260415f | 427 | .popsection |
8b592783 CM |
428 | .endr |
429 | .endm | |
430 | ||
431 | #endif /* CONFIG_THUMB2_KERNEL */ | |
432 | ||
433 | .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
434 | usracc str, \reg, \ptr, \inc, \cond, \rept, \abort | |
435 | .endm | |
436 | ||
437 | .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
438 | usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort | |
439 | .endm | |
8f51965e DM |
440 | |
441 | /* Utility macro for declaring string literals */ | |
442 | .macro string name:req, string | |
443 | .type \name , #object | |
444 | \name: | |
445 | .asciz "\string" | |
446 | .size \name , . - \name | |
447 | .endm | |
448 | ||
6ebbf2ce RK |
449 | .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo |
450 | .macro ret\c, reg | |
451 | #if __LINUX_ARM_ARCH__ < 6 | |
452 | mov\c pc, \reg | |
453 | #else | |
454 | .ifeqs "\reg", "lr" | |
455 | bx\c \reg | |
456 | .else | |
457 | mov\c pc, \reg | |
458 | .endif | |
459 | #endif | |
460 | .endm | |
461 | .endr | |
462 | ||
463 | .macro ret.w, reg | |
464 | ret \reg | |
465 | #ifdef CONFIG_THUMB2_KERNEL | |
466 | nop | |
467 | #endif | |
468 | .endm | |
469 | ||
8bafae20 RK |
470 | .macro bug, msg, line |
471 | #ifdef CONFIG_THUMB2_KERNEL | |
472 | 1: .inst 0xde02 | |
473 | #else | |
474 | 1: .inst 0xe7f001f2 | |
475 | #endif | |
476 | #ifdef CONFIG_DEBUG_BUGVERBOSE | |
477 | .pushsection .rodata.str, "aMS", %progbits, 1 | |
478 | 2: .asciz "\msg" | |
479 | .popsection | |
480 | .pushsection __bug_table, "aw" | |
481 | .align 2 | |
482 | .word 1b, 2b | |
483 | .hword \line | |
484 | .popsection | |
485 | #endif | |
486 | .endm | |
487 | ||
0d73c3f8 MH |
488 | #ifdef CONFIG_KPROBES |
489 | #define _ASM_NOKPROBE(entry) \ | |
490 | .pushsection "_kprobe_blacklist", "aw" ; \ | |
491 | .balign 4 ; \ | |
492 | .long entry; \ | |
493 | .popsection | |
494 | #else | |
495 | #define _ASM_NOKPROBE(entry) | |
496 | #endif | |
497 | ||
2bc58a6f | 498 | #endif /* __ASM_ASSEMBLER_H__ */ |