]>
Commit | Line | Data |
---|---|---|
945af8d7 WD |
1 | /* |
2 | * Copyright (C) 1998 Dan Malek <dmalek@jlc.net> | |
3 | * Copyright (C) 1999 Magnus Damm <kieraypc01.p.y.kie.era.ericsson.se> | |
4 | * Copyright (C) 2000 - 2003 Wolfgang Denk <wd@denx.de> | |
5 | * | |
6 | * See file CREDITS for list of people who contributed to this | |
7 | * project. | |
8 | * | |
9 | * This program is free software; you can redistribute it and/or | |
10 | * modify it under the terms of the GNU General Public License as | |
11 | * published by the Free Software Foundation; either version 2 of | |
12 | * the License, or (at your option) any later version. | |
13 | * | |
14 | * This program is distributed in the hope that it will be useful, | |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
17 | * GNU General Public License for more details. | |
18 | * | |
19 | * You should have received a copy of the GNU General Public License | |
20 | * along with this program; if not, write to the Free Software | |
21 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, | |
22 | * MA 02111-1307 USA | |
23 | */ | |
24 | ||
25 | /* | |
26 | * U-Boot - Startup Code for MPC5xxx CPUs | |
27 | */ | |
28 | #include <config.h> | |
29 | #include <mpc5xxx.h> | |
561858ee | 30 | #include <timestamp.h> |
945af8d7 WD |
31 | #include <version.h> |
32 | ||
cbd8a35c | 33 | #define CONFIG_MPC5xxx 1 /* needed for Linux kernel header files */ |
945af8d7 WD |
34 | #define _LINUX_CONFIG_H 1 /* avoid reading Linux autoconf.h file */ |
35 | ||
36 | #include <ppc_asm.tmpl> | |
37 | #include <ppc_defs.h> | |
38 | ||
39 | #include <asm/cache.h> | |
40 | #include <asm/mmu.h> | |
41 | ||
42 | #ifndef CONFIG_IDENT_STRING | |
43 | #define CONFIG_IDENT_STRING "" | |
44 | #endif | |
45 | ||
46 | /* We don't want the MMU yet. | |
47 | */ | |
48 | #undef MSR_KERNEL | |
49 | /* Floating Point enable, Machine Check and Recoverable Interr. */ | |
50 | #ifdef DEBUG | |
51 | #define MSR_KERNEL (MSR_FP|MSR_RI) | |
52 | #else | |
53 | #define MSR_KERNEL (MSR_FP|MSR_ME|MSR_RI) | |
54 | #endif | |
55 | ||
56 | /* | |
57 | * Set up GOT: Global Offset Table | |
58 | * | |
0f8aa159 | 59 | * Use r12 to access the GOT |
945af8d7 WD |
60 | */ |
61 | START_GOT | |
62 | GOT_ENTRY(_GOT2_TABLE_) | |
63 | GOT_ENTRY(_FIXUP_TABLE_) | |
64 | ||
65 | GOT_ENTRY(_start) | |
66 | GOT_ENTRY(_start_of_vectors) | |
67 | GOT_ENTRY(_end_of_vectors) | |
68 | GOT_ENTRY(transfer_to_handler) | |
69 | ||
70 | GOT_ENTRY(__init_end) | |
71 | GOT_ENTRY(_end) | |
72 | GOT_ENTRY(__bss_start) | |
73 | END_GOT | |
74 | ||
75 | /* | |
76 | * Version string | |
77 | */ | |
78 | .data | |
79 | .globl version_string | |
80 | version_string: | |
81 | .ascii U_BOOT_VERSION | |
561858ee | 82 | .ascii " (", U_BOOT_DATE, " - ", U_BOOT_TIME, ")" |
945af8d7 WD |
83 | .ascii CONFIG_IDENT_STRING, "\0" |
84 | ||
85 | /* | |
86 | * Exception vectors | |
87 | */ | |
88 | .text | |
89 | . = EXC_OFF_SYS_RESET | |
90 | .globl _start | |
91 | _start: | |
92 | li r21, BOOTFLAG_COLD /* Normal Power-On */ | |
93 | nop | |
94 | b boot_cold | |
95 | ||
96 | . = EXC_OFF_SYS_RESET + 0x10 | |
97 | ||
98 | .globl _start_warm | |
99 | _start_warm: | |
100 | li r21, BOOTFLAG_WARM /* Software reboot */ | |
101 | b boot_warm | |
102 | ||
103 | boot_cold: | |
104 | boot_warm: | |
105 | mfmsr r5 /* save msr contents */ | |
106 | ||
e35745bb WD |
107 | /* Move CSBoot and adjust instruction pointer */ |
108 | /*--------------------------------------------------------------*/ | |
109 | ||
6d0f6bcf JCPV |
110 | #if defined(CONFIG_SYS_LOWBOOT) |
111 | # if defined(CONFIG_SYS_RAMBOOT) | |
112 | # error CONFIG_SYS_LOWBOOT is incompatible with CONFIG_SYS_RAMBOOT | |
113 | # endif /* CONFIG_SYS_RAMBOOT */ | |
6d0f6bcf JCPV |
114 | lis r4, CONFIG_SYS_DEFAULT_MBAR@h |
115 | lis r3, START_REG(CONFIG_SYS_BOOTCS_START)@h | |
116 | ori r3, r3, START_REG(CONFIG_SYS_BOOTCS_START)@l | |
79d696fc | 117 | stw r3, 0x4(r4) /* CS0 start */ |
6d0f6bcf JCPV |
118 | lis r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@h |
119 | ori r3, r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@l | |
79d696fc | 120 | stw r3, 0x8(r4) /* CS0 stop */ |
5cf9da48 WD |
121 | lis r3, 0x02010000@h |
122 | ori r3, r3, 0x02010000@l | |
e35745bb | 123 | stw r3, 0x54(r4) /* CS0 and Boot enable */ |
5cf9da48 | 124 | |
e35745bb WD |
125 | lis r3, lowboot_reentry@h /* jump from bootlow address space (0x0000xxxx) */ |
126 | ori r3, r3, lowboot_reentry@l /* to the address space the linker used */ | |
5cf9da48 | 127 | mtlr r3 |
e35745bb | 128 | blr |
d4ca31c4 WD |
129 | |
130 | lowboot_reentry: | |
6d0f6bcf JCPV |
131 | lis r3, START_REG(CONFIG_SYS_BOOTCS_START)@h |
132 | ori r3, r3, START_REG(CONFIG_SYS_BOOTCS_START)@l | |
79d696fc | 133 | stw r3, 0x4c(r4) /* Boot start */ |
6d0f6bcf JCPV |
134 | lis r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@h |
135 | ori r3, r3, STOP_REG(CONFIG_SYS_BOOTCS_START, CONFIG_SYS_BOOTCS_SIZE)@l | |
79d696fc | 136 | stw r3, 0x50(r4) /* Boot stop */ |
5cf9da48 WD |
137 | lis r3, 0x02000001@h |
138 | ori r3, r3, 0x02000001@l | |
e35745bb | 139 | stw r3, 0x54(r4) /* Boot enable, CS0 disable */ |
6d0f6bcf | 140 | #endif /* CONFIG_SYS_LOWBOOT */ |
d4ca31c4 | 141 | |
6d0f6bcf JCPV |
142 | #if defined(CONFIG_SYS_DEFAULT_MBAR) && !defined(CONFIG_SYS_RAMBOOT) |
143 | lis r3, CONFIG_SYS_MBAR@h | |
144 | ori r3, r3, CONFIG_SYS_MBAR@l | |
3c74e32a WD |
145 | /* MBAR is mirrored into the MBAR SPR */ |
146 | mtspr MBAR,r3 | |
945af8d7 | 147 | rlwinm r3, r3, 16, 16, 31 |
6d0f6bcf | 148 | lis r4, CONFIG_SYS_DEFAULT_MBAR@h |
945af8d7 | 149 | stw r3, 0(r4) |
6d0f6bcf | 150 | #endif /* CONFIG_SYS_DEFAULT_MBAR */ |
945af8d7 WD |
151 | |
152 | /* Initialise the MPC5xxx processor core */ | |
153 | /*--------------------------------------------------------------*/ | |
154 | ||
155 | bl init_5xxx_core | |
156 | ||
157 | /* initialize some things that are hard to access from C */ | |
158 | /*--------------------------------------------------------------*/ | |
159 | ||
160 | /* set up stack in on-chip SRAM */ | |
6d0f6bcf JCPV |
161 | lis r3, CONFIG_SYS_INIT_RAM_ADDR@h |
162 | ori r3, r3, CONFIG_SYS_INIT_RAM_ADDR@l | |
163 | ori r1, r3, CONFIG_SYS_INIT_SP_OFFSET | |
945af8d7 WD |
164 | li r0, 0 /* Make room for stack frame header and */ |
165 | stwu r0, -4(r1) /* clear final stack frame so that */ | |
166 | stwu r0, -4(r1) /* stack backtraces terminate cleanly */ | |
167 | ||
168 | /* let the C-code set up the rest */ | |
169 | /* */ | |
170 | /* Be careful to keep code relocatable ! */ | |
171 | /*--------------------------------------------------------------*/ | |
172 | ||
173 | GET_GOT /* initialize GOT access */ | |
174 | ||
175 | /* r3: IMMR */ | |
176 | bl cpu_init_f /* run low-level CPU init code (in Flash)*/ | |
177 | ||
178 | mr r3, r21 | |
179 | /* r3: BOOTFLAG */ | |
180 | bl board_init_f /* run 1st part of board init code (in Flash)*/ | |
181 | ||
182 | /* | |
183 | * Vector Table | |
184 | */ | |
185 | ||
186 | .globl _start_of_vectors | |
187 | _start_of_vectors: | |
188 | ||
189 | /* Machine check */ | |
190 | STD_EXCEPTION(0x200, MachineCheck, MachineCheckException) | |
191 | ||
192 | /* Data Storage exception. */ | |
193 | STD_EXCEPTION(0x300, DataStorage, UnknownException) | |
194 | ||
195 | /* Instruction Storage exception. */ | |
196 | STD_EXCEPTION(0x400, InstStorage, UnknownException) | |
197 | ||
198 | /* External Interrupt exception. */ | |
199 | STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt) | |
200 | ||
201 | /* Alignment exception. */ | |
202 | . = 0x600 | |
203 | Alignment: | |
02032e8f | 204 | EXCEPTION_PROLOG(SRR0, SRR1) |
945af8d7 WD |
205 | mfspr r4,DAR |
206 | stw r4,_DAR(r21) | |
207 | mfspr r5,DSISR | |
208 | stw r5,_DSISR(r21) | |
209 | addi r3,r1,STACK_FRAME_OVERHEAD | |
fc4e1887 | 210 | EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE) |
945af8d7 WD |
211 | |
212 | /* Program check exception */ | |
213 | . = 0x700 | |
214 | ProgramCheck: | |
02032e8f | 215 | EXCEPTION_PROLOG(SRR0, SRR1) |
945af8d7 | 216 | addi r3,r1,STACK_FRAME_OVERHEAD |
fc4e1887 JT |
217 | EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException, |
218 | MSR_KERNEL, COPY_EE) | |
945af8d7 WD |
219 | |
220 | STD_EXCEPTION(0x800, FPUnavailable, UnknownException) | |
221 | ||
222 | /* I guess we could implement decrementer, and may have | |
223 | * to someday for timekeeping. | |
224 | */ | |
225 | STD_EXCEPTION(0x900, Decrementer, timer_interrupt) | |
226 | ||
227 | STD_EXCEPTION(0xa00, Trap_0a, UnknownException) | |
228 | STD_EXCEPTION(0xb00, Trap_0b, UnknownException) | |
27b207fd | 229 | STD_EXCEPTION(0xc00, SystemCall, UnknownException) |
945af8d7 WD |
230 | STD_EXCEPTION(0xd00, SingleStep, UnknownException) |
231 | ||
232 | STD_EXCEPTION(0xe00, Trap_0e, UnknownException) | |
233 | STD_EXCEPTION(0xf00, Trap_0f, UnknownException) | |
234 | ||
235 | STD_EXCEPTION(0x1000, InstructionTLBMiss, UnknownException) | |
236 | STD_EXCEPTION(0x1100, DataLoadTLBMiss, UnknownException) | |
237 | STD_EXCEPTION(0x1200, DataStoreTLBMiss, UnknownException) | |
238 | #ifdef DEBUG | |
239 | . = 0x1300 | |
240 | /* | |
241 | * This exception occurs when the program counter matches the | |
242 | * Instruction Address Breakpoint Register (IABR). | |
243 | * | |
244 | * I want the cpu to halt if this occurs so I can hunt around | |
245 | * with the debugger and look at things. | |
246 | * | |
247 | * When DEBUG is defined, both machine check enable (in the MSR) | |
248 | * and checkstop reset enable (in the reset mode register) are | |
249 | * turned off and so a checkstop condition will result in the cpu | |
250 | * halting. | |
251 | * | |
252 | * I force the cpu into a checkstop condition by putting an illegal | |
253 | * instruction here (at least this is the theory). | |
254 | * | |
255 | * well - that didnt work, so just do an infinite loop! | |
256 | */ | |
257 | 1: b 1b | |
258 | #else | |
259 | STD_EXCEPTION(0x1300, InstructionBreakpoint, DebugException) | |
260 | #endif | |
261 | STD_EXCEPTION(0x1400, SMI, UnknownException) | |
262 | ||
263 | STD_EXCEPTION(0x1500, Trap_15, UnknownException) | |
264 | STD_EXCEPTION(0x1600, Trap_16, UnknownException) | |
265 | STD_EXCEPTION(0x1700, Trap_17, UnknownException) | |
266 | STD_EXCEPTION(0x1800, Trap_18, UnknownException) | |
267 | STD_EXCEPTION(0x1900, Trap_19, UnknownException) | |
268 | STD_EXCEPTION(0x1a00, Trap_1a, UnknownException) | |
269 | STD_EXCEPTION(0x1b00, Trap_1b, UnknownException) | |
270 | STD_EXCEPTION(0x1c00, Trap_1c, UnknownException) | |
271 | STD_EXCEPTION(0x1d00, Trap_1d, UnknownException) | |
272 | STD_EXCEPTION(0x1e00, Trap_1e, UnknownException) | |
273 | STD_EXCEPTION(0x1f00, Trap_1f, UnknownException) | |
274 | STD_EXCEPTION(0x2000, Trap_20, UnknownException) | |
275 | STD_EXCEPTION(0x2100, Trap_21, UnknownException) | |
276 | STD_EXCEPTION(0x2200, Trap_22, UnknownException) | |
277 | STD_EXCEPTION(0x2300, Trap_23, UnknownException) | |
278 | STD_EXCEPTION(0x2400, Trap_24, UnknownException) | |
279 | STD_EXCEPTION(0x2500, Trap_25, UnknownException) | |
280 | STD_EXCEPTION(0x2600, Trap_26, UnknownException) | |
281 | STD_EXCEPTION(0x2700, Trap_27, UnknownException) | |
282 | STD_EXCEPTION(0x2800, Trap_28, UnknownException) | |
283 | STD_EXCEPTION(0x2900, Trap_29, UnknownException) | |
284 | STD_EXCEPTION(0x2a00, Trap_2a, UnknownException) | |
285 | STD_EXCEPTION(0x2b00, Trap_2b, UnknownException) | |
286 | STD_EXCEPTION(0x2c00, Trap_2c, UnknownException) | |
287 | STD_EXCEPTION(0x2d00, Trap_2d, UnknownException) | |
288 | STD_EXCEPTION(0x2e00, Trap_2e, UnknownException) | |
289 | STD_EXCEPTION(0x2f00, Trap_2f, UnknownException) | |
290 | ||
291 | ||
292 | .globl _end_of_vectors | |
293 | _end_of_vectors: | |
294 | ||
295 | . = 0x3000 | |
296 | ||
297 | /* | |
298 | * This code finishes saving the registers to the exception frame | |
299 | * and jumps to the appropriate handler for the exception. | |
300 | * Register r21 is pointer into trap frame, r1 has new stack pointer. | |
301 | */ | |
302 | .globl transfer_to_handler | |
303 | transfer_to_handler: | |
304 | stw r22,_NIP(r21) | |
305 | lis r22,MSR_POW@h | |
306 | andc r23,r23,r22 | |
307 | stw r23,_MSR(r21) | |
308 | SAVE_GPR(7, r21) | |
309 | SAVE_4GPRS(8, r21) | |
310 | SAVE_8GPRS(12, r21) | |
311 | SAVE_8GPRS(24, r21) | |
312 | mflr r23 | |
313 | andi. r24,r23,0x3f00 /* get vector offset */ | |
314 | stw r24,TRAP(r21) | |
315 | li r22,0 | |
316 | stw r22,RESULT(r21) | |
317 | lwz r24,0(r23) /* virtual address of handler */ | |
318 | lwz r23,4(r23) /* where to go when done */ | |
319 | mtspr SRR0,r24 | |
320 | mtspr SRR1,r20 | |
321 | mtlr r23 | |
322 | SYNC | |
323 | rfi /* jump to handler, enable MMU */ | |
324 | ||
325 | int_return: | |
326 | mfmsr r28 /* Disable interrupts */ | |
327 | li r4,0 | |
328 | ori r4,r4,MSR_EE | |
329 | andc r28,r28,r4 | |
330 | SYNC /* Some chip revs need this... */ | |
331 | mtmsr r28 | |
332 | SYNC | |
333 | lwz r2,_CTR(r1) | |
334 | lwz r0,_LINK(r1) | |
335 | mtctr r2 | |
336 | mtlr r0 | |
337 | lwz r2,_XER(r1) | |
338 | lwz r0,_CCR(r1) | |
339 | mtspr XER,r2 | |
340 | mtcrf 0xFF,r0 | |
341 | REST_10GPRS(3, r1) | |
342 | REST_10GPRS(13, r1) | |
343 | REST_8GPRS(23, r1) | |
344 | REST_GPR(31, r1) | |
345 | lwz r2,_NIP(r1) /* Restore environment */ | |
346 | lwz r0,_MSR(r1) | |
347 | mtspr SRR0,r2 | |
348 | mtspr SRR1,r0 | |
349 | lwz r0,GPR0(r1) | |
350 | lwz r2,GPR2(r1) | |
351 | lwz r1,GPR1(r1) | |
352 | SYNC | |
353 | rfi | |
354 | ||
355 | /* | |
356 | * This code initialises the MPC5xxx processor core | |
357 | * (conforms to PowerPC 603e spec) | |
358 | * Note: expects original MSR contents to be in r5. | |
359 | */ | |
360 | ||
361 | .globl init_5xx_core | |
362 | init_5xxx_core: | |
363 | ||
364 | /* Initialize machine status; enable machine check interrupt */ | |
365 | /*--------------------------------------------------------------*/ | |
366 | ||
367 | li r3, MSR_KERNEL /* Set ME and RI flags */ | |
368 | rlwimi r3, r5, 0, 25, 25 /* preserve IP bit set by HRCW */ | |
369 | #ifdef DEBUG | |
370 | rlwimi r3, r5, 0, 21, 22 /* debugger might set SE & BE bits */ | |
371 | #endif | |
372 | SYNC /* Some chip revs need this... */ | |
373 | mtmsr r3 | |
374 | SYNC | |
375 | mtspr SRR1, r3 /* Make SRR1 match MSR */ | |
376 | ||
377 | /* Initialize the Hardware Implementation-dependent Registers */ | |
378 | /* HID0 also contains cache control */ | |
379 | /*--------------------------------------------------------------*/ | |
380 | ||
6d0f6bcf JCPV |
381 | lis r3, CONFIG_SYS_HID0_INIT@h |
382 | ori r3, r3, CONFIG_SYS_HID0_INIT@l | |
945af8d7 WD |
383 | SYNC |
384 | mtspr HID0, r3 | |
385 | ||
6d0f6bcf JCPV |
386 | lis r3, CONFIG_SYS_HID0_FINAL@h |
387 | ori r3, r3, CONFIG_SYS_HID0_FINAL@l | |
945af8d7 WD |
388 | SYNC |
389 | mtspr HID0, r3 | |
390 | ||
391 | /* clear all BAT's */ | |
392 | /*--------------------------------------------------------------*/ | |
393 | ||
394 | li r0, 0 | |
395 | mtspr DBAT0U, r0 | |
396 | mtspr DBAT0L, r0 | |
397 | mtspr DBAT1U, r0 | |
398 | mtspr DBAT1L, r0 | |
399 | mtspr DBAT2U, r0 | |
400 | mtspr DBAT2L, r0 | |
401 | mtspr DBAT3U, r0 | |
402 | mtspr DBAT3L, r0 | |
35656de7 WD |
403 | mtspr DBAT4U, r0 |
404 | mtspr DBAT4L, r0 | |
405 | mtspr DBAT5U, r0 | |
406 | mtspr DBAT5L, r0 | |
407 | mtspr DBAT6U, r0 | |
408 | mtspr DBAT6L, r0 | |
409 | mtspr DBAT7U, r0 | |
410 | mtspr DBAT7L, r0 | |
945af8d7 WD |
411 | mtspr IBAT0U, r0 |
412 | mtspr IBAT0L, r0 | |
413 | mtspr IBAT1U, r0 | |
414 | mtspr IBAT1L, r0 | |
415 | mtspr IBAT2U, r0 | |
416 | mtspr IBAT2L, r0 | |
417 | mtspr IBAT3U, r0 | |
418 | mtspr IBAT3L, r0 | |
35656de7 WD |
419 | mtspr IBAT4U, r0 |
420 | mtspr IBAT4L, r0 | |
421 | mtspr IBAT5U, r0 | |
422 | mtspr IBAT5L, r0 | |
423 | mtspr IBAT6U, r0 | |
424 | mtspr IBAT6L, r0 | |
425 | mtspr IBAT7U, r0 | |
426 | mtspr IBAT7L, r0 | |
945af8d7 WD |
427 | SYNC |
428 | ||
429 | /* invalidate all tlb's */ | |
430 | /* */ | |
431 | /* From the 603e User Manual: "The 603e provides the ability to */ | |
432 | /* invalidate a TLB entry. The TLB Invalidate Entry (tlbie) */ | |
433 | /* instruction invalidates the TLB entry indexed by the EA, and */ | |
434 | /* operates on both the instruction and data TLBs simultaneously*/ | |
435 | /* invalidating four TLB entries (both sets in each TLB). The */ | |
436 | /* index corresponds to bits 15-19 of the EA. To invalidate all */ | |
437 | /* entries within both TLBs, 32 tlbie instructions should be */ | |
438 | /* issued, incrementing this field by one each time." */ | |
439 | /* */ | |
440 | /* "Note that the tlbia instruction is not implemented on the */ | |
441 | /* 603e." */ | |
442 | /* */ | |
443 | /* bits 15-19 correspond to addresses 0x00000000 to 0x0001F000 */ | |
444 | /* incrementing by 0x1000 each time. The code below is sort of */ | |
a47a12be | 445 | /* based on code in "flush_tlbs" from arch/powerpc/kernel/head.S */ |
945af8d7 WD |
446 | /* */ |
447 | /*--------------------------------------------------------------*/ | |
448 | ||
449 | li r3, 32 | |
450 | mtctr r3 | |
451 | li r3, 0 | |
452 | 1: tlbie r3 | |
453 | addi r3, r3, 0x1000 | |
454 | bdnz 1b | |
455 | SYNC | |
456 | ||
457 | /* Done! */ | |
458 | /*--------------------------------------------------------------*/ | |
459 | ||
460 | blr | |
461 | ||
462 | /* Cache functions. | |
463 | * | |
464 | * Note: requires that all cache bits in | |
465 | * HID0 are in the low half word. | |
466 | */ | |
467 | .globl icache_enable | |
468 | icache_enable: | |
469 | mfspr r3, HID0 | |
470 | ori r3, r3, HID0_ICE | |
471 | lis r4, 0 | |
472 | ori r4, r4, HID0_ILOCK | |
473 | andc r3, r3, r4 | |
474 | ori r4, r3, HID0_ICFI | |
475 | isync | |
476 | mtspr HID0, r4 /* sets enable and invalidate, clears lock */ | |
477 | isync | |
478 | mtspr HID0, r3 /* clears invalidate */ | |
479 | blr | |
480 | ||
481 | .globl icache_disable | |
482 | icache_disable: | |
483 | mfspr r3, HID0 | |
484 | lis r4, 0 | |
485 | ori r4, r4, HID0_ICE|HID0_ILOCK | |
486 | andc r3, r3, r4 | |
487 | ori r4, r3, HID0_ICFI | |
488 | isync | |
489 | mtspr HID0, r4 /* sets invalidate, clears enable and lock */ | |
490 | isync | |
491 | mtspr HID0, r3 /* clears invalidate */ | |
492 | blr | |
493 | ||
494 | .globl icache_status | |
495 | icache_status: | |
496 | mfspr r3, HID0 | |
497 | rlwinm r3, r3, HID0_ICE_BITPOS + 1, 31, 31 | |
498 | blr | |
499 | ||
500 | .globl dcache_enable | |
501 | dcache_enable: | |
502 | mfspr r3, HID0 | |
503 | ori r3, r3, HID0_DCE | |
504 | lis r4, 0 | |
505 | ori r4, r4, HID0_DLOCK | |
506 | andc r3, r3, r4 | |
507 | ori r4, r3, HID0_DCI | |
508 | sync | |
509 | mtspr HID0, r4 /* sets enable and invalidate, clears lock */ | |
510 | sync | |
511 | mtspr HID0, r3 /* clears invalidate */ | |
512 | blr | |
513 | ||
514 | .globl dcache_disable | |
515 | dcache_disable: | |
516 | mfspr r3, HID0 | |
517 | lis r4, 0 | |
518 | ori r4, r4, HID0_DCE|HID0_DLOCK | |
519 | andc r3, r3, r4 | |
520 | ori r4, r3, HID0_DCI | |
521 | sync | |
522 | mtspr HID0, r4 /* sets invalidate, clears enable and lock */ | |
523 | sync | |
524 | mtspr HID0, r3 /* clears invalidate */ | |
525 | blr | |
526 | ||
527 | .globl dcache_status | |
528 | dcache_status: | |
529 | mfspr r3, HID0 | |
530 | rlwinm r3, r3, HID0_DCE_BITPOS + 1, 31, 31 | |
531 | blr | |
532 | ||
36c72877 WD |
533 | .globl get_svr |
534 | get_svr: | |
535 | mfspr r3, SVR | |
536 | blr | |
537 | ||
945af8d7 WD |
538 | .globl get_pvr |
539 | get_pvr: | |
540 | mfspr r3, PVR | |
541 | blr | |
542 | ||
543 | /*------------------------------------------------------------------------------*/ | |
544 | ||
545 | /* | |
546 | * void relocate_code (addr_sp, gd, addr_moni) | |
547 | * | |
548 | * This "function" does not return, instead it continues in RAM | |
549 | * after relocating the monitor code. | |
550 | * | |
551 | * r3 = dest | |
552 | * r4 = src | |
553 | * r5 = length in bytes | |
554 | * r6 = cachelinesize | |
555 | */ | |
556 | .globl relocate_code | |
557 | relocate_code: | |
558 | mr r1, r3 /* Set new stack pointer */ | |
559 | mr r9, r4 /* Save copy of Global Data pointer */ | |
560 | mr r10, r5 /* Save copy of Destination Address */ | |
561 | ||
0f8aa159 | 562 | GET_GOT |
945af8d7 | 563 | mr r3, r5 /* Destination Address */ |
6d0f6bcf JCPV |
564 | lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */ |
565 | ori r4, r4, CONFIG_SYS_MONITOR_BASE@l | |
945af8d7 WD |
566 | lwz r5, GOT(__init_end) |
567 | sub r5, r5, r4 | |
6d0f6bcf | 568 | li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */ |
945af8d7 WD |
569 | |
570 | /* | |
571 | * Fix GOT pointer: | |
572 | * | |
6d0f6bcf | 573 | * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address |
945af8d7 WD |
574 | * |
575 | * Offset: | |
576 | */ | |
577 | sub r15, r10, r4 | |
578 | ||
579 | /* First our own GOT */ | |
0f8aa159 | 580 | add r12, r12, r15 |
945af8d7 WD |
581 | /* then the one used by the C code */ |
582 | add r30, r30, r15 | |
583 | ||
584 | /* | |
585 | * Now relocate code | |
586 | */ | |
587 | ||
588 | cmplw cr1,r3,r4 | |
589 | addi r0,r5,3 | |
590 | srwi. r0,r0,2 | |
591 | beq cr1,4f /* In place copy is not necessary */ | |
592 | beq 7f /* Protect against 0 count */ | |
593 | mtctr r0 | |
594 | bge cr1,2f | |
595 | ||
596 | la r8,-4(r4) | |
597 | la r7,-4(r3) | |
598 | 1: lwzu r0,4(r8) | |
599 | stwu r0,4(r7) | |
600 | bdnz 1b | |
601 | b 4f | |
602 | ||
603 | 2: slwi r0,r0,2 | |
604 | add r8,r4,r0 | |
605 | add r7,r3,r0 | |
606 | 3: lwzu r0,-4(r8) | |
607 | stwu r0,-4(r7) | |
608 | bdnz 3b | |
609 | ||
610 | /* | |
611 | * Now flush the cache: note that we must start from a cache aligned | |
612 | * address. Otherwise we might miss one cache line. | |
613 | */ | |
614 | 4: cmpwi r6,0 | |
615 | add r5,r3,r5 | |
616 | beq 7f /* Always flush prefetch queue in any case */ | |
617 | subi r0,r6,1 | |
618 | andc r3,r3,r0 | |
619 | mfspr r7,HID0 /* don't do dcbst if dcache is disabled */ | |
620 | rlwinm r7,r7,HID0_DCE_BITPOS+1,31,31 | |
621 | cmpwi r7,0 | |
622 | beq 9f | |
623 | mr r4,r3 | |
624 | 5: dcbst 0,r4 | |
625 | add r4,r4,r6 | |
626 | cmplw r4,r5 | |
627 | blt 5b | |
628 | sync /* Wait for all dcbst to complete on bus */ | |
629 | 9: mfspr r7,HID0 /* don't do icbi if icache is disabled */ | |
630 | rlwinm r7,r7,HID0_ICE_BITPOS+1,31,31 | |
631 | cmpwi r7,0 | |
632 | beq 7f | |
633 | mr r4,r3 | |
634 | 6: icbi 0,r4 | |
635 | add r4,r4,r6 | |
636 | cmplw r4,r5 | |
637 | blt 6b | |
638 | 7: sync /* Wait for all icbi to complete on bus */ | |
639 | isync | |
640 | ||
641 | /* | |
642 | * We are done. Do not return, instead branch to second part of board | |
643 | * initialization, now running from RAM. | |
644 | */ | |
645 | ||
646 | addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET | |
647 | mtlr r0 | |
648 | blr | |
649 | ||
650 | in_ram: | |
651 | ||
652 | /* | |
0f8aa159 | 653 | * Relocation Function, r12 point to got2+0x8000 |
945af8d7 WD |
654 | * |
655 | * Adjust got2 pointers, no need to check for 0, this code | |
656 | * already puts a few entries in the table. | |
657 | */ | |
658 | li r0,__got2_entries@sectoff@l | |
659 | la r3,GOT(_GOT2_TABLE_) | |
660 | lwz r11,GOT(_GOT2_TABLE_) | |
661 | mtctr r0 | |
662 | sub r11,r3,r11 | |
663 | addi r3,r3,-4 | |
664 | 1: lwzu r0,4(r3) | |
afc3ba0f JT |
665 | cmpwi r0,0 |
666 | beq- 2f | |
945af8d7 WD |
667 | add r0,r0,r11 |
668 | stw r0,0(r3) | |
afc3ba0f | 669 | 2: bdnz 1b |
945af8d7 WD |
670 | |
671 | /* | |
672 | * Now adjust the fixups and the pointers to the fixups | |
673 | * in case we need to move ourselves again. | |
674 | */ | |
afc3ba0f | 675 | li r0,__fixup_entries@sectoff@l |
945af8d7 WD |
676 | lwz r3,GOT(_FIXUP_TABLE_) |
677 | cmpwi r0,0 | |
678 | mtctr r0 | |
679 | addi r3,r3,-4 | |
680 | beq 4f | |
681 | 3: lwzu r4,4(r3) | |
682 | lwzux r0,r4,r11 | |
683 | add r0,r0,r11 | |
684 | stw r10,0(r3) | |
685 | stw r0,0(r4) | |
686 | bdnz 3b | |
687 | 4: | |
688 | clear_bss: | |
689 | /* | |
690 | * Now clear BSS segment | |
691 | */ | |
692 | lwz r3,GOT(__bss_start) | |
693 | lwz r4,GOT(_end) | |
694 | ||
695 | cmplw 0, r3, r4 | |
696 | beq 6f | |
697 | ||
698 | li r0, 0 | |
699 | 5: | |
700 | stw r0, 0(r3) | |
701 | addi r3, r3, 4 | |
702 | cmplw 0, r3, r4 | |
703 | bne 5b | |
704 | 6: | |
705 | ||
706 | mr r3, r9 /* Global Data pointer */ | |
707 | mr r4, r10 /* Destination Address */ | |
708 | bl board_init_r | |
709 | ||
710 | /* | |
711 | * Copy exception vector code to low memory | |
712 | * | |
713 | * r3: dest_addr | |
714 | * r7: source address, r8: end address, r9: target address | |
715 | */ | |
716 | .globl trap_init | |
717 | trap_init: | |
0f8aa159 JT |
718 | mflr r4 /* save link register */ |
719 | GET_GOT | |
945af8d7 WD |
720 | lwz r7, GOT(_start) |
721 | lwz r8, GOT(_end_of_vectors) | |
722 | ||
723 | li r9, 0x100 /* reset vector always at 0x100 */ | |
724 | ||
725 | cmplw 0, r7, r8 | |
726 | bgelr /* return if r7>=r8 - just in case */ | |
945af8d7 WD |
727 | 1: |
728 | lwz r0, 0(r7) | |
729 | stw r0, 0(r9) | |
730 | addi r7, r7, 4 | |
731 | addi r9, r9, 4 | |
732 | cmplw 0, r7, r8 | |
733 | bne 1b | |
734 | ||
735 | /* | |
736 | * relocate `hdlr' and `int_return' entries | |
737 | */ | |
738 | li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET | |
739 | li r8, Alignment - _start + EXC_OFF_SYS_RESET | |
740 | 2: | |
741 | bl trap_reloc | |
742 | addi r7, r7, 0x100 /* next exception vector */ | |
743 | cmplw 0, r7, r8 | |
744 | blt 2b | |
745 | ||
746 | li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET | |
747 | bl trap_reloc | |
748 | ||
749 | li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET | |
750 | bl trap_reloc | |
751 | ||
752 | li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET | |
753 | li r8, SystemCall - _start + EXC_OFF_SYS_RESET | |
754 | 3: | |
755 | bl trap_reloc | |
756 | addi r7, r7, 0x100 /* next exception vector */ | |
757 | cmplw 0, r7, r8 | |
758 | blt 3b | |
759 | ||
760 | li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET | |
761 | li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET | |
762 | 4: | |
763 | bl trap_reloc | |
764 | addi r7, r7, 0x100 /* next exception vector */ | |
765 | cmplw 0, r7, r8 | |
766 | blt 4b | |
767 | ||
768 | mfmsr r3 /* now that the vectors have */ | |
769 | lis r7, MSR_IP@h /* relocated into low memory */ | |
770 | ori r7, r7, MSR_IP@l /* MSR[IP] can be turned off */ | |
771 | andc r3, r3, r7 /* (if it was on) */ | |
772 | SYNC /* Some chip revs need this... */ | |
773 | mtmsr r3 | |
774 | SYNC | |
775 | ||
776 | mtlr r4 /* restore link register */ | |
777 | blr |