]> git.ipfire.org Git - people/ms/u-boot.git/blob - cpu/74xx_7xx/cache.S
rename CFG_ macros to CONFIG_SYS
[people/ms/u-boot.git] / cpu / 74xx_7xx / cache.S
1 #include <config.h>
2 #include <74xx_7xx.h>
3 #include <version.h>
4
5 #include <ppc_asm.tmpl>
6 #include <ppc_defs.h>
7
8 #include <asm/cache.h>
9 #include <asm/mmu.h>
10
11 #ifndef CACHE_LINE_SIZE
12 # define CACHE_LINE_SIZE L1_CACHE_BYTES
13 #endif
14
15 #if CACHE_LINE_SIZE == 128
16 #define LG_CACHE_LINE_SIZE 7
17 #elif CACHE_LINE_SIZE == 32
18 #define LG_CACHE_LINE_SIZE 5
19 #elif CACHE_LINE_SIZE == 16
20 #define LG_CACHE_LINE_SIZE 4
21 #elif CACHE_LINE_SIZE == 8
22 #define LG_CACHE_LINE_SIZE 3
23 #else
24 # error "Invalid cache line size!"
25 #endif
26
27 /*
28 * Invalidate L1 instruction cache.
29 */
30 _GLOBAL(invalidate_l1_instruction_cache)
31 mfspr r3,PVR
32 rlwinm r3,r3,16,16,31
33 cmpi 0,r3,1
34 beqlr /* for 601, do nothing */
35 /* 603/604 processor - use invalidate-all bit in HID0 */
36 mfspr r3,HID0
37 ori r3,r3,HID0_ICFI
38 mtspr HID0,r3
39 isync
40 blr
41
42 /*
43 * Invalidate L1 data cache.
44 */
45 _GLOBAL(invalidate_l1_data_cache)
46 mfspr r3,HID0
47 ori r3,r3,HID0_DCFI
48 mtspr HID0,r3
49 isync
50 blr
51
52 /*
53 * Flush data cache.
54 */
55 _GLOBAL(flush_dcache)
56 lis r3,0
57 lis r5,CACHE_LINE_SIZE
58 flush:
59 cmp 0,1,r3,r5
60 bge done
61 lwz r5,0(r3)
62 lis r5,CACHE_LINE_SIZE
63 addi r3,r3,0x4
64 b flush
65 done:
66 blr
67 /*
68 * Write any modified data cache blocks out to memory
69 * and invalidate the corresponding instruction cache blocks.
70 * This is a no-op on the 601.
71 *
72 * flush_icache_range(unsigned long start, unsigned long stop)
73 */
74 _GLOBAL(flush_icache_range)
75 mfspr r5,PVR
76 rlwinm r5,r5,16,16,31
77 cmpi 0,r5,1
78 beqlr /* for 601, do nothing */
79 li r5,CACHE_LINE_SIZE-1
80 andc r3,r3,r5
81 subf r4,r3,r4
82 add r4,r4,r5
83 srwi. r4,r4,LG_CACHE_LINE_SIZE
84 beqlr
85 mtctr r4
86 mr r6,r3
87 1: dcbst 0,r3
88 addi r3,r3,CACHE_LINE_SIZE
89 bdnz 1b
90 sync /* wait for dcbst's to get to ram */
91 mtctr r4
92 2: icbi 0,r6
93 addi r6,r6,CACHE_LINE_SIZE
94 bdnz 2b
95 sync /* additional sync needed on g4 */
96 isync
97 blr
98 /*
99 * Write any modified data cache blocks out to memory.
100 * Does not invalidate the corresponding cache lines (especially for
101 * any corresponding instruction cache).
102 *
103 * clean_dcache_range(unsigned long start, unsigned long stop)
104 */
105 _GLOBAL(clean_dcache_range)
106 li r5,CACHE_LINE_SIZE-1
107 andc r3,r3,r5 /* align r3 down to cache line */
108 subf r4,r3,r4 /* r4 = offset of stop from start of cache line */
109 add r4,r4,r5 /* r4 += cache_line_size-1 */
110 srwi. r4,r4,LG_CACHE_LINE_SIZE /* r4 = number of cache lines to flush */
111 beqlr /* if r4 == 0 return */
112 mtctr r4 /* ctr = r4 */
113
114 sync
115 1: dcbst 0,r3
116 addi r3,r3,CACHE_LINE_SIZE
117 bdnz 1b
118 sync /* wait for dcbst's to get to ram */
119 blr
120
121 /*
122 * Write any modified data cache blocks out to memory
123 * and invalidate the corresponding instruction cache blocks.
124 *
125 * flush_dcache_range(unsigned long start, unsigned long stop)
126 */
127 _GLOBAL(flush_dcache_range)
128 li r5,CACHE_LINE_SIZE-1
129 andc r3,r3,r5
130 subf r4,r3,r4
131 add r4,r4,r5
132 srwi. r4,r4,LG_CACHE_LINE_SIZE
133 beqlr
134 mtctr r4
135
136 sync
137 1: dcbf 0,r3
138 addi r3,r3,CACHE_LINE_SIZE
139 bdnz 1b
140 sync /* wait for dcbf's to get to ram */
141 blr
142
143 /*
144 * Like above, but invalidate the D-cache. This is used by the 8xx
145 * to invalidate the cache so the PPC core doesn't get stale data
146 * from the CPM (no cache snooping here :-).
147 *
148 * invalidate_dcache_range(unsigned long start, unsigned long stop)
149 */
150 _GLOBAL(invalidate_dcache_range)
151 li r5,CACHE_LINE_SIZE-1
152 andc r3,r3,r5
153 subf r4,r3,r4
154 add r4,r4,r5
155 srwi. r4,r4,LG_CACHE_LINE_SIZE
156 beqlr
157 mtctr r4
158
159 sync
160 1: dcbi 0,r3
161 addi r3,r3,CACHE_LINE_SIZE
162 bdnz 1b
163 sync /* wait for dcbi's to get to ram */
164 blr
165
166 /*
167 * Flush a particular page from the data cache to RAM.
168 * Note: this is necessary because the instruction cache does *not*
169 * snoop from the data cache.
170 * This is a no-op on the 601 which has a unified cache.
171 *
172 * void __flush_page_to_ram(void *page)
173 */
174 _GLOBAL(__flush_page_to_ram)
175 mfspr r5,PVR
176 rlwinm r5,r5,16,16,31
177 cmpi 0,r5,1
178 beqlr /* for 601, do nothing */
179 rlwinm r3,r3,0,0,19 /* Get page base address */
180 li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
181 mtctr r4
182 mr r6,r3
183 0: dcbst 0,r3 /* Write line to ram */
184 addi r3,r3,CACHE_LINE_SIZE
185 bdnz 0b
186 sync
187 mtctr r4
188 1: icbi 0,r6
189 addi r6,r6,CACHE_LINE_SIZE
190 bdnz 1b
191 sync
192 isync
193 blr
194
195 /*
196 * Flush a particular page from the instruction cache.
197 * Note: this is necessary because the instruction cache does *not*
198 * snoop from the data cache.
199 * This is a no-op on the 601 which has a unified cache.
200 *
201 * void __flush_icache_page(void *page)
202 */
203 _GLOBAL(__flush_icache_page)
204 mfspr r5,PVR
205 rlwinm r5,r5,16,16,31
206 cmpi 0,r5,1
207 beqlr /* for 601, do nothing */
208 li r4,4096/CACHE_LINE_SIZE /* Number of lines in a page */
209 mtctr r4
210 1: icbi 0,r3
211 addi r3,r3,CACHE_LINE_SIZE
212 bdnz 1b
213 sync
214 isync
215 blr
216
217 /*
218 * Clear a page using the dcbz instruction, which doesn't cause any
219 * memory traffic (except to write out any cache lines which get
220 * displaced). This only works on cacheable memory.
221 */
222 _GLOBAL(clear_page)
223 li r0,4096/CACHE_LINE_SIZE
224 mtctr r0
225 1: dcbz 0,r3
226 addi r3,r3,CACHE_LINE_SIZE
227 bdnz 1b
228 blr
229
230 /*
231 * Enable L1 Instruction cache
232 */
233 _GLOBAL(icache_enable)
234 mfspr r3, HID0
235 li r5, HID0_ICFI|HID0_ILOCK
236 andc r3, r3, r5
237 ori r3, r3, HID0_ICE
238 ori r5, r3, HID0_ICFI
239 mtspr HID0, r5
240 mtspr HID0, r3
241 isync
242 blr
243
244 /*
245 * Disable L1 Instruction cache
246 */
247 _GLOBAL(icache_disable)
248 mflr r4
249 bl invalidate_l1_instruction_cache /* uses r3 */
250 sync
251 mtlr r4
252 mfspr r3, HID0
253 li r5, 0
254 ori r5, r5, HID0_ICE
255 andc r3, r3, r5
256 mtspr HID0, r3
257 isync
258 blr
259
260 /*
261 * Is instruction cache enabled?
262 */
263 _GLOBAL(icache_status)
264 mfspr r3, HID0
265 andi. r3, r3, HID0_ICE
266 blr
267
268
269 _GLOBAL(l1dcache_enable)
270 mfspr r3, HID0
271 li r5, HID0_DCFI|HID0_DLOCK
272 andc r3, r3, r5
273 mtspr HID0, r3 /* no invalidate, unlock */
274 ori r3, r3, HID0_DCE
275 ori r5, r3, HID0_DCFI
276 mtspr HID0, r5 /* enable + invalidate */
277 mtspr HID0, r3 /* enable */
278 sync
279 blr
280
281 /*
282 * Enable data cache(s) - L1 and optionally L2
283 * Calls l2cache_enable. LR saved in r5
284 */
285 _GLOBAL(dcache_enable)
286 mfspr r3, HID0
287 li r5, HID0_DCFI|HID0_DLOCK
288 andc r3, r3, r5
289 mtspr HID0, r3 /* no invalidate, unlock */
290 ori r3, r3, HID0_DCE
291 ori r5, r3, HID0_DCFI
292 mtspr HID0, r5 /* enable + invalidate */
293 mtspr HID0, r3 /* enable */
294 sync
295 #ifdef CONFIG_SYS_L2
296 mflr r5
297 bl l2cache_enable /* uses r3 and r4 */
298 sync
299 mtlr r5
300 #endif
301 blr
302
303
304 /*
305 * Disable data cache(s) - L1 and optionally L2
306 * Calls flush_dcache and l2cache_disable_no_flush.
307 * LR saved in r4
308 */
309 _GLOBAL(dcache_disable)
310 mflr r4 /* save link register */
311 bl flush_dcache /* uses r3 and r5 */
312 sync
313 mfspr r3, HID0
314 li r5, HID0_DCFI|HID0_DLOCK
315 andc r3, r3, r5
316 mtspr HID0, r3 /* no invalidate, unlock */
317 li r5, HID0_DCE|HID0_DCFI
318 andc r3, r3, r5 /* no enable, no invalidate */
319 mtspr HID0, r3
320 sync
321 #ifdef CONFIG_SYS_L2
322 bl l2cache_disable_no_flush /* uses r3 */
323 #endif
324 mtlr r4 /* restore link register */
325 blr
326
327 /*
328 * Is data cache enabled?
329 */
330 _GLOBAL(dcache_status)
331 mfspr r3, HID0
332 andi. r3, r3, HID0_DCE
333 blr
334
335 /*
336 * Invalidate L2 cache using L2I and polling L2IP or L2I
337 */
338 _GLOBAL(l2cache_invalidate)
339 sync
340 mfspr r3, l2cr
341 oris r3, r3, L2CR_L2I@h
342 sync
343 mtspr l2cr, r3
344 sync
345 mfspr r3, PVR
346 sync
347 rlwinm r3, r3, 16,16,31
348 cmpli 0,r3,0x8000 /* 7451, 7441 */
349 beq 0,inv_7450
350 cmpli 0,r3,0x8001 /* 7455, 7445 */
351 beq 0,inv_7450
352 cmpli 0,r3,0x8002 /* 7457, 7447 */
353 beq 0,inv_7450
354 cmpli 0,r3,0x8003 /* 7447A */
355 beq 0,inv_7450
356 cmpli 0,r3,0x8004 /* 7448 */
357 beq 0,inv_7450
358 invl2:
359 mfspr r3, l2cr
360 andi. r3, r3, L2CR_L2IP
361 bne invl2
362 /* turn off the global invalidate bit */
363 mfspr r3, l2cr
364 rlwinm r3, r3, 0, 11, 9
365 sync
366 mtspr l2cr, r3
367 sync
368 blr
369 inv_7450:
370 mfspr r3, l2cr
371 andis. r3, r3, L2CR_L2I@h
372 bne inv_7450
373 blr
374
375 /*
376 * Enable L2 cache
377 * Calls l2cache_invalidate. LR is saved in r4
378 */
379 _GLOBAL(l2cache_enable)
380 mflr r4 /* save link register */
381 bl l2cache_invalidate /* uses r3 */
382 sync
383 lis r3, L2_ENABLE@h
384 ori r3, r3, L2_ENABLE@l
385 mtspr l2cr, r3
386 isync
387 mtlr r4 /* restore link register */
388 blr
389
390 /*
391 * Disable L2 cache
392 * Calls flush_dcache. LR is saved in r4
393 */
394 _GLOBAL(l2cache_disable)
395 mflr r4 /* save link register */
396 bl flush_dcache /* uses r3 and r5 */
397 sync
398 mtlr r4 /* restore link register */
399 l2cache_disable_no_flush: /* provide way to disable L2 w/o flushing */
400 lis r3, L2_INIT@h
401 ori r3, r3, L2_INIT@l
402 mtspr l2cr, r3
403 isync
404 blr