]>
Commit | Line | Data |
---|---|---|
83d290c5 | 1 | /* SPDX-License-Identifier: GPL-2.0+ */ |
0ae76531 DF |
2 | /* |
3 | * (C) Copyright 2013 | |
4 | * David Feng <fenghua@phytium.com.cn> | |
5 | * | |
6 | * This file is based on sample code from ARMv8 ARM. | |
0ae76531 DF |
7 | */ |
8 | ||
9 | #include <asm-offsets.h> | |
10 | #include <config.h> | |
0ae76531 | 11 | #include <asm/macro.h> |
5e2ec773 | 12 | #include <asm/system.h> |
0ae76531 DF |
13 | #include <linux/linkage.h> |
14 | ||
46dc5428 | 15 | #ifndef CONFIG_CMO_BY_VA_ONLY |
0ae76531 | 16 | /* |
ba9eb6c7 | 17 | * void __asm_dcache_level(level) |
0ae76531 | 18 | * |
ba9eb6c7 | 19 | * flush or invalidate one level cache. |
0ae76531 DF |
20 | * |
21 | * x0: cache level | |
1a021230 | 22 | * x1: 0 clean & invalidate, 1 invalidate only |
95167db3 | 23 | * x16: FEAT_CCIDX |
1e6ad55c | 24 | * x2~x9: clobbered |
0ae76531 | 25 | */ |
e6a05862 | 26 | .pushsection .text.__asm_dcache_level, "ax" |
ba9eb6c7 | 27 | ENTRY(__asm_dcache_level) |
1e6ad55c YS |
28 | lsl x12, x0, #1 |
29 | msr csselr_el1, x12 /* select cache level */ | |
0ae76531 DF |
30 | isb /* sync change of cssidr_el1 */ |
31 | mrs x6, ccsidr_el1 /* read the new cssidr_el1 */ | |
f050bfac | 32 | ubfx x2, x6, #0, #3 /* x2 <- log2(cache line size)-4 */ |
95167db3 LW |
33 | cbz x16, 3f /* check for FEAT_CCIDX */ |
34 | ubfx x3, x6, #3, #21 /* x3 <- number of cache ways - 1 */ | |
35 | ubfx x4, x6, #32, #24 /* x4 <- number of cache sets - 1 */ | |
36 | b 4f | |
37 | 3: | |
f050bfac PCT |
38 | ubfx x3, x6, #3, #10 /* x3 <- number of cache ways - 1 */ |
39 | ubfx x4, x6, #13, #15 /* x4 <- number of cache sets - 1 */ | |
95167db3 | 40 | 4: |
0ae76531 | 41 | add x2, x2, #4 /* x2 <- log2(cache line size) */ |
42ddfad6 | 42 | clz w5, w3 /* bit position of #ways */ |
1e6ad55c | 43 | /* x12 <- cache level << 1 */ |
0ae76531 DF |
44 | /* x2 <- line length offset */ |
45 | /* x3 <- number of cache ways - 1 */ | |
46 | /* x4 <- number of cache sets - 1 */ | |
47 | /* x5 <- bit position of #ways */ | |
48 | ||
49 | loop_set: | |
50 | mov x6, x3 /* x6 <- working copy of #ways */ | |
51 | loop_way: | |
52 | lsl x7, x6, x5 | |
1e6ad55c | 53 | orr x9, x12, x7 /* map way and level to cisw value */ |
0ae76531 DF |
54 | lsl x7, x4, x2 |
55 | orr x9, x9, x7 /* map set number to cisw value */ | |
1e6ad55c YS |
56 | tbz w1, #0, 1f |
57 | dc isw, x9 | |
58 | b 2f | |
59 | 1: dc cisw, x9 /* clean & invalidate by set/way */ | |
60 | 2: subs x6, x6, #1 /* decrement the way */ | |
0ae76531 DF |
61 | b.ge loop_way |
62 | subs x4, x4, #1 /* decrement the set */ | |
63 | b.ge loop_set | |
64 | ||
65 | ret | |
ba9eb6c7 | 66 | ENDPROC(__asm_dcache_level) |
e6a05862 | 67 | .popsection |
0ae76531 DF |
68 | |
69 | /* | |
1e6ad55c YS |
70 | * void __asm_flush_dcache_all(int invalidate_only) |
71 | * | |
1a021230 | 72 | * x0: 0 clean & invalidate, 1 invalidate only |
0ae76531 | 73 | * |
ba9eb6c7 | 74 | * flush or invalidate all data cache by SET/WAY. |
0ae76531 | 75 | */ |
e6a05862 | 76 | .pushsection .text.__asm_dcache_all, "ax" |
1e6ad55c YS |
77 | ENTRY(__asm_dcache_all) |
78 | mov x1, x0 | |
0ae76531 DF |
79 | dsb sy |
80 | mrs x10, clidr_el1 /* read clidr_el1 */ | |
f050bfac | 81 | ubfx x11, x10, #24, #3 /* x11 <- loc */ |
0ae76531 DF |
82 | cbz x11, finished /* if loc is 0, exit */ |
83 | mov x15, lr | |
95167db3 LW |
84 | mrs x16, s3_0_c0_c7_2 /* read value of id_aa64mmfr2_el1*/ |
85 | ubfx x16, x16, #20, #4 /* save FEAT_CCIDX identifier in x16 */ | |
0ae76531 DF |
86 | mov x0, #0 /* start flush at cache level 0 */ |
87 | /* x0 <- cache level */ | |
88 | /* x10 <- clidr_el1 */ | |
89 | /* x11 <- loc */ | |
90 | /* x15 <- return address */ | |
91 | ||
92 | loop_level: | |
37479e65 | 93 | add x12, x0, x0, lsl #1 /* x12 <- tripled cache level */ |
1e6ad55c YS |
94 | lsr x12, x10, x12 |
95 | and x12, x12, #7 /* x12 <- cache type */ | |
96 | cmp x12, #2 | |
0ae76531 | 97 | b.lt skip /* skip if no cache or icache */ |
ba9eb6c7 | 98 | bl __asm_dcache_level /* x1 = 0 flush, 1 invalidate */ |
0ae76531 DF |
99 | skip: |
100 | add x0, x0, #1 /* increment cache level */ | |
101 | cmp x11, x0 | |
102 | b.gt loop_level | |
103 | ||
104 | mov x0, #0 | |
f1075aed | 105 | msr csselr_el1, x0 /* restore csselr_el1 */ |
0ae76531 DF |
106 | dsb sy |
107 | isb | |
108 | mov lr, x15 | |
109 | ||
110 | finished: | |
111 | ret | |
1e6ad55c | 112 | ENDPROC(__asm_dcache_all) |
e6a05862 | 113 | .popsection |
1e6ad55c | 114 | |
e6a05862 | 115 | .pushsection .text.__asm_flush_dcache_all, "ax" |
1e6ad55c | 116 | ENTRY(__asm_flush_dcache_all) |
1e6ad55c | 117 | mov x0, #0 |
25828588 | 118 | b __asm_dcache_all |
0ae76531 | 119 | ENDPROC(__asm_flush_dcache_all) |
e6a05862 | 120 | .popsection |
0ae76531 | 121 | |
e6a05862 | 122 | .pushsection .text.__asm_invalidate_dcache_all, "ax" |
1e6ad55c | 123 | ENTRY(__asm_invalidate_dcache_all) |
208bd513 | 124 | mov x0, #0x1 |
25828588 | 125 | b __asm_dcache_all |
1e6ad55c | 126 | ENDPROC(__asm_invalidate_dcache_all) |
e6a05862 | 127 | .popsection |
1e6ad55c | 128 | |
46dc5428 MZ |
129 | .pushsection .text.__asm_flush_l3_dcache, "ax" |
130 | WEAK(__asm_flush_l3_dcache) | |
131 | mov x0, #0 /* return status as success */ | |
132 | ret | |
133 | ENDPROC(__asm_flush_l3_dcache) | |
134 | .popsection | |
135 | ||
136 | .pushsection .text.__asm_invalidate_l3_icache, "ax" | |
137 | WEAK(__asm_invalidate_l3_icache) | |
138 | mov x0, #0 /* return status as success */ | |
139 | ret | |
140 | ENDPROC(__asm_invalidate_l3_icache) | |
141 | .popsection | |
142 | ||
143 | #else /* CONFIG_CMO_BY_VA */ | |
144 | ||
145 | /* | |
146 | * Define these so that they actively clash with in implementation | |
147 | * accidentally selecting CONFIG_CMO_BY_VA | |
148 | */ | |
149 | ||
150 | .pushsection .text.__asm_invalidate_l3_icache, "ax" | |
151 | ENTRY(__asm_invalidate_l3_icache) | |
152 | mov x0, xzr | |
153 | ret | |
154 | ENDPROC(__asm_invalidate_l3_icache) | |
155 | .popsection | |
156 | .pushsection .text.__asm_flush_l3_dcache, "ax" | |
157 | ENTRY(__asm_flush_l3_dcache) | |
158 | mov x0, xzr | |
159 | ret | |
160 | ENDPROC(__asm_flush_l3_dcache) | |
161 | .popsection | |
162 | #endif /* CONFIG_CMO_BY_VA */ | |
163 | ||
0ae76531 DF |
164 | /* |
165 | * void __asm_flush_dcache_range(start, end) | |
166 | * | |
167 | * clean & invalidate data cache in the range | |
168 | * | |
169 | * x0: start address | |
170 | * x1: end address | |
171 | */ | |
e6a05862 | 172 | .pushsection .text.__asm_flush_dcache_range, "ax" |
0ae76531 DF |
173 | ENTRY(__asm_flush_dcache_range) |
174 | mrs x3, ctr_el0 | |
f050bfac | 175 | ubfx x3, x3, #16, #4 |
0ae76531 DF |
176 | mov x2, #4 |
177 | lsl x2, x2, x3 /* cache line size */ | |
178 | ||
179 | /* x2 <- minimal cache line size in cache system */ | |
180 | sub x3, x2, #1 | |
181 | bic x0, x0, x3 | |
182 | 1: dc civac, x0 /* clean & invalidate data or unified cache */ | |
183 | add x0, x0, x2 | |
184 | cmp x0, x1 | |
185 | b.lo 1b | |
186 | dsb sy | |
187 | ret | |
188 | ENDPROC(__asm_flush_dcache_range) | |
e6a05862 | 189 | .popsection |
6775a820 SG |
190 | /* |
191 | * void __asm_invalidate_dcache_range(start, end) | |
192 | * | |
193 | * invalidate data cache in the range | |
194 | * | |
195 | * x0: start address | |
196 | * x1: end address | |
197 | */ | |
e6a05862 | 198 | .pushsection .text.__asm_invalidate_dcache_range, "ax" |
6775a820 SG |
199 | ENTRY(__asm_invalidate_dcache_range) |
200 | mrs x3, ctr_el0 | |
f050bfac | 201 | ubfx x3, x3, #16, #4 |
6775a820 SG |
202 | mov x2, #4 |
203 | lsl x2, x2, x3 /* cache line size */ | |
204 | ||
205 | /* x2 <- minimal cache line size in cache system */ | |
206 | sub x3, x2, #1 | |
207 | bic x0, x0, x3 | |
208 | 1: dc ivac, x0 /* invalidate data or unified cache */ | |
209 | add x0, x0, x2 | |
210 | cmp x0, x1 | |
211 | b.lo 1b | |
212 | dsb sy | |
213 | ret | |
214 | ENDPROC(__asm_invalidate_dcache_range) | |
e6a05862 | 215 | .popsection |
0ae76531 DF |
216 | |
217 | /* | |
218 | * void __asm_invalidate_icache_all(void) | |
219 | * | |
220 | * invalidate all tlb entries. | |
221 | */ | |
e6a05862 | 222 | .pushsection .text.__asm_invalidate_icache_all, "ax" |
0ae76531 DF |
223 | ENTRY(__asm_invalidate_icache_all) |
224 | ic ialluis | |
225 | isb sy | |
226 | ret | |
227 | ENDPROC(__asm_invalidate_icache_all) | |
e6a05862 | 228 | .popsection |
dcd468b8 | 229 | |
e6a05862 | 230 | .pushsection .text.__asm_invalidate_l3_dcache, "ax" |
f7b845bf | 231 | WEAK(__asm_invalidate_l3_dcache) |
dcd468b8 YS |
232 | mov x0, #0 /* return status as success */ |
233 | ret | |
1ab557a0 | 234 | ENDPROC(__asm_invalidate_l3_dcache) |
e6a05862 | 235 | .popsection |
1ab557a0 | 236 | |
5e2ec773 AG |
237 | /* |
238 | * void __asm_switch_ttbr(ulong new_ttbr) | |
239 | * | |
240 | * Safely switches to a new page table. | |
241 | */ | |
e6a05862 | 242 | .pushsection .text.__asm_switch_ttbr, "ax" |
5e2ec773 AG |
243 | ENTRY(__asm_switch_ttbr) |
244 | /* x2 = SCTLR (alive throghout the function) */ | |
245 | switch_el x4, 3f, 2f, 1f | |
246 | 3: mrs x2, sctlr_el3 | |
247 | b 0f | |
248 | 2: mrs x2, sctlr_el2 | |
249 | b 0f | |
250 | 1: mrs x2, sctlr_el1 | |
251 | 0: | |
252 | ||
253 | /* Unset CR_M | CR_C | CR_I from SCTLR to disable all caches */ | |
254 | movn x1, #(CR_M | CR_C | CR_I) | |
255 | and x1, x2, x1 | |
256 | switch_el x4, 3f, 2f, 1f | |
257 | 3: msr sctlr_el3, x1 | |
258 | b 0f | |
259 | 2: msr sctlr_el2, x1 | |
260 | b 0f | |
261 | 1: msr sctlr_el1, x1 | |
262 | 0: isb | |
263 | ||
264 | /* This call only clobbers x30 (lr) and x9 (unused) */ | |
265 | mov x3, x30 | |
266 | bl __asm_invalidate_tlb_all | |
267 | ||
268 | /* From here on we're running safely with caches disabled */ | |
269 | ||
270 | /* Set TTBR to our first argument */ | |
271 | switch_el x4, 3f, 2f, 1f | |
272 | 3: msr ttbr0_el3, x0 | |
273 | b 0f | |
274 | 2: msr ttbr0_el2, x0 | |
275 | b 0f | |
276 | 1: msr ttbr0_el1, x0 | |
277 | 0: isb | |
278 | ||
279 | /* Restore original SCTLR and thus enable caches again */ | |
280 | switch_el x4, 3f, 2f, 1f | |
281 | 3: msr sctlr_el3, x2 | |
282 | b 0f | |
283 | 2: msr sctlr_el2, x2 | |
284 | b 0f | |
285 | 1: msr sctlr_el1, x2 | |
286 | 0: isb | |
287 | ||
288 | ret x3 | |
289 | ENDPROC(__asm_switch_ttbr) | |
e6a05862 | 290 | .popsection |