]>
Commit | Line | Data |
---|---|---|
2c451f78 A |
1 | /* |
2 | * (C) Copyright 2010 | |
3 | * Texas Instruments, <www.ti.com> | |
4 | * Aneesh V <aneesh@ti.com> | |
5 | * | |
1a459660 | 6 | * SPDX-License-Identifier: GPL-2.0+ |
2c451f78 A |
7 | */ |
8 | #include <linux/types.h> | |
9 | #include <common.h> | |
10 | #include <asm/armv7.h> | |
11 | #include <asm/utils.h> | |
12 | ||
13 | #define ARMV7_DCACHE_INVAL_ALL 1 | |
14 | #define ARMV7_DCACHE_CLEAN_INVAL_ALL 2 | |
15 | #define ARMV7_DCACHE_INVAL_RANGE 3 | |
16 | #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 4 | |
17 | ||
18 | #ifndef CONFIG_SYS_DCACHE_OFF | |
19 | /* | |
20 | * Write the level and type you want to Cache Size Selection Register(CSSELR) | |
21 | * to get size details from Current Cache Size ID Register(CCSIDR) | |
22 | */ | |
23 | static void set_csselr(u32 level, u32 type) | |
24 | { u32 csselr = level << 1 | type; | |
25 | ||
26 | /* Write to Cache Size Selection Register(CSSELR) */ | |
27 | asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr)); | |
28 | } | |
29 | ||
30 | static u32 get_ccsidr(void) | |
31 | { | |
32 | u32 ccsidr; | |
33 | ||
34 | /* Read current CP15 Cache Size ID Register */ | |
35 | asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr)); | |
36 | return ccsidr; | |
37 | } | |
38 | ||
39 | static u32 get_clidr(void) | |
40 | { | |
41 | u32 clidr; | |
42 | ||
43 | /* Read current CP15 Cache Level ID Register */ | |
44 | asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr)); | |
45 | return clidr; | |
46 | } | |
47 | ||
48 | static void v7_inval_dcache_level_setway(u32 level, u32 num_sets, | |
49 | u32 num_ways, u32 way_shift, | |
50 | u32 log2_line_len) | |
51 | { | |
52 | int way, set, setway; | |
53 | ||
54 | /* | |
55 | * For optimal assembly code: | |
56 | * a. count down | |
57 | * b. have bigger loop inside | |
58 | */ | |
59 | for (way = num_ways - 1; way >= 0 ; way--) { | |
60 | for (set = num_sets - 1; set >= 0; set--) { | |
61 | setway = (level << 1) | (set << log2_line_len) | | |
62 | (way << way_shift); | |
63 | /* Invalidate data/unified cache line by set/way */ | |
64 | asm volatile (" mcr p15, 0, %0, c7, c6, 2" | |
65 | : : "r" (setway)); | |
66 | } | |
67 | } | |
882f80b9 A |
68 | /* DSB to make sure the operation is complete */ |
69 | CP15DSB; | |
2c451f78 A |
70 | } |
71 | ||
72 | static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets, | |
73 | u32 num_ways, u32 way_shift, | |
74 | u32 log2_line_len) | |
75 | { | |
76 | int way, set, setway; | |
77 | ||
78 | /* | |
79 | * For optimal assembly code: | |
80 | * a. count down | |
81 | * b. have bigger loop inside | |
82 | */ | |
83 | for (way = num_ways - 1; way >= 0 ; way--) { | |
84 | for (set = num_sets - 1; set >= 0; set--) { | |
85 | setway = (level << 1) | (set << log2_line_len) | | |
86 | (way << way_shift); | |
87 | /* | |
88 | * Clean & Invalidate data/unified | |
89 | * cache line by set/way | |
90 | */ | |
91 | asm volatile (" mcr p15, 0, %0, c7, c14, 2" | |
92 | : : "r" (setway)); | |
93 | } | |
94 | } | |
882f80b9 A |
95 | /* DSB to make sure the operation is complete */ |
96 | CP15DSB; | |
2c451f78 A |
97 | } |
98 | ||
99 | static void v7_maint_dcache_level_setway(u32 level, u32 operation) | |
100 | { | |
101 | u32 ccsidr; | |
102 | u32 num_sets, num_ways, log2_line_len, log2_num_ways; | |
103 | u32 way_shift; | |
104 | ||
105 | set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED); | |
106 | ||
107 | ccsidr = get_ccsidr(); | |
108 | ||
109 | log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> | |
110 | CCSIDR_LINE_SIZE_OFFSET) + 2; | |
111 | /* Converting from words to bytes */ | |
112 | log2_line_len += 2; | |
113 | ||
114 | num_ways = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >> | |
115 | CCSIDR_ASSOCIATIVITY_OFFSET) + 1; | |
116 | num_sets = ((ccsidr & CCSIDR_NUM_SETS_MASK) >> | |
117 | CCSIDR_NUM_SETS_OFFSET) + 1; | |
118 | /* | |
119 | * According to ARMv7 ARM number of sets and number of ways need | |
120 | * not be a power of 2 | |
121 | */ | |
122 | log2_num_ways = log_2_n_round_up(num_ways); | |
123 | ||
124 | way_shift = (32 - log2_num_ways); | |
125 | if (operation == ARMV7_DCACHE_INVAL_ALL) { | |
126 | v7_inval_dcache_level_setway(level, num_sets, num_ways, | |
127 | way_shift, log2_line_len); | |
128 | } else if (operation == ARMV7_DCACHE_CLEAN_INVAL_ALL) { | |
129 | v7_clean_inval_dcache_level_setway(level, num_sets, num_ways, | |
130 | way_shift, log2_line_len); | |
131 | } | |
132 | } | |
133 | ||
134 | static void v7_maint_dcache_all(u32 operation) | |
135 | { | |
136 | u32 level, cache_type, level_start_bit = 0; | |
137 | ||
138 | u32 clidr = get_clidr(); | |
139 | ||
140 | for (level = 0; level < 7; level++) { | |
141 | cache_type = (clidr >> level_start_bit) & 0x7; | |
142 | if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) || | |
143 | (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) || | |
144 | (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED)) | |
145 | v7_maint_dcache_level_setway(level, operation); | |
146 | level_start_bit += 3; | |
147 | } | |
148 | } | |
149 | ||
150 | static void v7_dcache_clean_inval_range(u32 start, | |
151 | u32 stop, u32 line_len) | |
152 | { | |
153 | u32 mva; | |
154 | ||
155 | /* Align start to cache line boundary */ | |
156 | start &= ~(line_len - 1); | |
157 | for (mva = start; mva < stop; mva = mva + line_len) { | |
158 | /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */ | |
159 | asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva)); | |
160 | } | |
161 | } | |
162 | ||
163 | static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len) | |
164 | { | |
165 | u32 mva; | |
166 | ||
167 | /* | |
cabe2878 A |
168 | * If start address is not aligned to cache-line do not |
169 | * invalidate the first cache-line | |
2c451f78 A |
170 | */ |
171 | if (start & (line_len - 1)) { | |
cabe2878 A |
172 | printf("ERROR: %s - start address is not aligned - 0x%08x\n", |
173 | __func__, start); | |
2c451f78 A |
174 | /* move to next cache line */ |
175 | start = (start + line_len - 1) & ~(line_len - 1); | |
176 | } | |
177 | ||
178 | /* | |
cabe2878 A |
179 | * If stop address is not aligned to cache-line do not |
180 | * invalidate the last cache-line | |
2c451f78 A |
181 | */ |
182 | if (stop & (line_len - 1)) { | |
cabe2878 A |
183 | printf("ERROR: %s - stop address is not aligned - 0x%08x\n", |
184 | __func__, stop); | |
2c451f78 A |
185 | /* align to the beginning of this cache line */ |
186 | stop &= ~(line_len - 1); | |
187 | } | |
188 | ||
189 | for (mva = start; mva < stop; mva = mva + line_len) { | |
190 | /* DCIMVAC - Invalidate data cache by MVA to PoC */ | |
191 | asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva)); | |
192 | } | |
193 | } | |
194 | ||
195 | static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op) | |
196 | { | |
197 | u32 line_len, ccsidr; | |
198 | ||
199 | ccsidr = get_ccsidr(); | |
200 | line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> | |
201 | CCSIDR_LINE_SIZE_OFFSET) + 2; | |
202 | /* Converting from words to bytes */ | |
203 | line_len += 2; | |
204 | /* converting from log2(linelen) to linelen */ | |
205 | line_len = 1 << line_len; | |
206 | ||
207 | switch (range_op) { | |
208 | case ARMV7_DCACHE_CLEAN_INVAL_RANGE: | |
209 | v7_dcache_clean_inval_range(start, stop, line_len); | |
210 | break; | |
211 | case ARMV7_DCACHE_INVAL_RANGE: | |
212 | v7_dcache_inval_range(start, stop, line_len); | |
213 | break; | |
214 | } | |
215 | ||
882f80b9 A |
216 | /* DSB to make sure the operation is complete */ |
217 | CP15DSB; | |
2c451f78 A |
218 | } |
219 | ||
220 | /* Invalidate TLB */ | |
221 | static void v7_inval_tlb(void) | |
222 | { | |
223 | /* Invalidate entire unified TLB */ | |
224 | asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0)); | |
225 | /* Invalidate entire data TLB */ | |
226 | asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0)); | |
227 | /* Invalidate entire instruction TLB */ | |
228 | asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0)); | |
229 | /* Full system DSB - make sure that the invalidation is complete */ | |
230 | CP15DSB; | |
231 | /* Full system ISB - make sure the instruction stream sees it */ | |
232 | CP15ISB; | |
233 | } | |
234 | ||
235 | void invalidate_dcache_all(void) | |
236 | { | |
237 | v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL); | |
238 | ||
239 | v7_outer_cache_inval_all(); | |
240 | } | |
241 | ||
242 | /* | |
243 | * Performs a clean & invalidation of the entire data cache | |
244 | * at all levels | |
245 | */ | |
246 | void flush_dcache_all(void) | |
247 | { | |
248 | v7_maint_dcache_all(ARMV7_DCACHE_CLEAN_INVAL_ALL); | |
249 | ||
250 | v7_outer_cache_flush_all(); | |
251 | } | |
252 | ||
253 | /* | |
254 | * Invalidates range in all levels of D-cache/unified cache used: | |
255 | * Affects the range [start, stop - 1] | |
256 | */ | |
257 | void invalidate_dcache_range(unsigned long start, unsigned long stop) | |
258 | { | |
259 | ||
260 | v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE); | |
261 | ||
262 | v7_outer_cache_inval_range(start, stop); | |
263 | } | |
264 | ||
265 | /* | |
266 | * Flush range(clean & invalidate) from all levels of D-cache/unified | |
267 | * cache used: | |
268 | * Affects the range [start, stop - 1] | |
269 | */ | |
270 | void flush_dcache_range(unsigned long start, unsigned long stop) | |
271 | { | |
272 | v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE); | |
273 | ||
274 | v7_outer_cache_flush_range(start, stop); | |
275 | } | |
276 | ||
277 | void arm_init_before_mmu(void) | |
278 | { | |
279 | v7_outer_cache_enable(); | |
280 | invalidate_dcache_all(); | |
281 | v7_inval_tlb(); | |
282 | } | |
283 | ||
0dde7f53 SG |
284 | void mmu_page_table_flush(unsigned long start, unsigned long stop) |
285 | { | |
286 | flush_dcache_range(start, stop); | |
287 | v7_inval_tlb(); | |
288 | } | |
289 | ||
2c451f78 A |
290 | /* |
291 | * Flush range from all levels of d-cache/unified-cache used: | |
292 | * Affects the range [start, start + size - 1] | |
293 | */ | |
294 | void flush_cache(unsigned long start, unsigned long size) | |
295 | { | |
296 | flush_dcache_range(start, start + size); | |
297 | } | |
298 | #else /* #ifndef CONFIG_SYS_DCACHE_OFF */ | |
299 | void invalidate_dcache_all(void) | |
300 | { | |
301 | } | |
302 | ||
303 | void flush_dcache_all(void) | |
304 | { | |
305 | } | |
306 | ||
307 | void invalidate_dcache_range(unsigned long start, unsigned long stop) | |
308 | { | |
309 | } | |
310 | ||
311 | void flush_dcache_range(unsigned long start, unsigned long stop) | |
312 | { | |
313 | } | |
314 | ||
315 | void arm_init_before_mmu(void) | |
316 | { | |
317 | } | |
318 | ||
319 | void flush_cache(unsigned long start, unsigned long size) | |
320 | { | |
321 | } | |
0dde7f53 SG |
322 | |
323 | void mmu_page_table_flush(unsigned long start, unsigned long stop) | |
324 | { | |
325 | } | |
326 | ||
de63ac27 S |
327 | void arm_init_domains(void) |
328 | { | |
329 | } | |
2c451f78 A |
330 | #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */ |
331 | ||
332 | #ifndef CONFIG_SYS_ICACHE_OFF | |
333 | /* Invalidate entire I-cache and branch predictor array */ | |
334 | void invalidate_icache_all(void) | |
335 | { | |
336 | /* | |
337 | * Invalidate all instruction caches to PoU. | |
338 | * Also flushes branch target cache. | |
339 | */ | |
340 | asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0)); | |
341 | ||
342 | /* Invalidate entire branch predictor array */ | |
343 | asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0)); | |
344 | ||
345 | /* Full system DSB - make sure that the invalidation is complete */ | |
346 | CP15DSB; | |
347 | ||
348 | /* ISB - make sure the instruction stream sees it */ | |
349 | CP15ISB; | |
350 | } | |
351 | #else | |
352 | void invalidate_icache_all(void) | |
353 | { | |
354 | } | |
355 | #endif | |
356 | ||
357 | /* | |
358 | * Stub implementations for outer cache operations | |
359 | */ | |
360 | void __v7_outer_cache_enable(void) | |
361 | { | |
362 | } | |
363 | void v7_outer_cache_enable(void) | |
364 | __attribute__((weak, alias("__v7_outer_cache_enable"))); | |
365 | ||
366 | void __v7_outer_cache_disable(void) | |
367 | { | |
368 | } | |
369 | void v7_outer_cache_disable(void) | |
370 | __attribute__((weak, alias("__v7_outer_cache_disable"))); | |
371 | ||
372 | void __v7_outer_cache_flush_all(void) | |
373 | { | |
374 | } | |
375 | void v7_outer_cache_flush_all(void) | |
376 | __attribute__((weak, alias("__v7_outer_cache_flush_all"))); | |
377 | ||
378 | void __v7_outer_cache_inval_all(void) | |
379 | { | |
380 | } | |
381 | void v7_outer_cache_inval_all(void) | |
382 | __attribute__((weak, alias("__v7_outer_cache_inval_all"))); | |
383 | ||
384 | void __v7_outer_cache_flush_range(u32 start, u32 end) | |
385 | { | |
386 | } | |
387 | void v7_outer_cache_flush_range(u32 start, u32 end) | |
388 | __attribute__((weak, alias("__v7_outer_cache_flush_range"))); | |
389 | ||
390 | void __v7_outer_cache_inval_range(u32 start, u32 end) | |
391 | { | |
392 | } | |
393 | void v7_outer_cache_inval_range(u32 start, u32 end) | |
394 | __attribute__((weak, alias("__v7_outer_cache_inval_range"))); |