]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright 2004, 2007 Freescale Semiconductor. | |
3 | * Srikanth Srinivasan <srikanth.srinivaan@freescale.com> | |
4 | * | |
5 | * See file CREDITS for list of people who contributed to this | |
6 | * project. | |
7 | * | |
8 | * This program is free software; you can redistribute it and/or | |
9 | * modify it under the terms of the GNU General Public License as | |
10 | * published by the Free Software Foundation; either version 2 of | |
11 | * the License, or (at your option) any later version. | |
12 | * | |
13 | * This program is distributed in the hope that it will be useful, | |
14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | * GNU General Public License for more details. | |
17 | * | |
18 | * You should have received a copy of the GNU General Public License | |
19 | * along with this program; if not, write to the Free Software | |
20 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, | |
21 | * MA 02111-1307 USA | |
22 | */ | |
23 | ||
24 | /* U-Boot - Startup Code for 86xx PowerPC based Embedded Boards | |
25 | * | |
26 | * | |
27 | * The processor starts at 0xfff00100 and the code is executed | |
28 | * from flash. The code is organized to be at an other address | |
29 | * in memory, but as long we don't jump around before relocating. | |
30 | * board_init lies at a quite high address and when the cpu has | |
31 | * jumped there, everything is ok. | |
32 | */ | |
33 | #include <config.h> | |
34 | #include <mpc86xx.h> | |
35 | #include <version.h> | |
36 | ||
37 | #include <ppc_asm.tmpl> | |
38 | #include <ppc_defs.h> | |
39 | ||
40 | #include <asm/cache.h> | |
41 | #include <asm/mmu.h> | |
42 | ||
43 | #ifndef CONFIG_IDENT_STRING | |
44 | #define CONFIG_IDENT_STRING "" | |
45 | #endif | |
46 | ||
47 | /* | |
48 | * Need MSR_DR | MSR_IR enabled to access I/O (printf) in exceptions | |
49 | */ | |
50 | ||
51 | /* | |
52 | * Set up GOT: Global Offset Table | |
53 | * | |
54 | * Use r14 to access the GOT | |
55 | */ | |
56 | START_GOT | |
57 | GOT_ENTRY(_GOT2_TABLE_) | |
58 | GOT_ENTRY(_FIXUP_TABLE_) | |
59 | ||
60 | GOT_ENTRY(_start) | |
61 | GOT_ENTRY(_start_of_vectors) | |
62 | GOT_ENTRY(_end_of_vectors) | |
63 | GOT_ENTRY(transfer_to_handler) | |
64 | ||
65 | GOT_ENTRY(__init_end) | |
66 | GOT_ENTRY(_end) | |
67 | GOT_ENTRY(__bss_start) | |
68 | END_GOT | |
69 | ||
70 | /* | |
71 | * r3 - 1st arg to board_init(): IMMP pointer | |
72 | * r4 - 2nd arg to board_init(): boot flag | |
73 | */ | |
74 | .text | |
75 | .long 0x27051956 /* U-Boot Magic Number */ | |
76 | .globl version_string | |
77 | version_string: | |
78 | .ascii U_BOOT_VERSION | |
79 | .ascii " (", __DATE__, " - ", __TIME__, ")" | |
80 | .ascii CONFIG_IDENT_STRING, "\0" | |
81 | ||
82 | . = EXC_OFF_SYS_RESET | |
83 | .globl _start | |
84 | _start: | |
85 | li r21, BOOTFLAG_COLD /* Normal Power-On: Boot from FLASH */ | |
86 | b boot_cold | |
87 | sync | |
88 | ||
89 | . = EXC_OFF_SYS_RESET + 0x10 | |
90 | ||
91 | .globl _start_warm | |
92 | _start_warm: | |
93 | li r21, BOOTFLAG_WARM /* Software reboot */ | |
94 | b boot_warm | |
95 | sync | |
96 | ||
97 | /* the boot code is located below the exception table */ | |
98 | ||
99 | .globl _start_of_vectors | |
100 | _start_of_vectors: | |
101 | ||
102 | /* Machine check */ | |
103 | STD_EXCEPTION(0x200, MachineCheck, MachineCheckException) | |
104 | ||
105 | /* Data Storage exception. */ | |
106 | STD_EXCEPTION(0x300, DataStorage, UnknownException) | |
107 | ||
108 | /* Instruction Storage exception. */ | |
109 | STD_EXCEPTION(0x400, InstStorage, UnknownException) | |
110 | ||
111 | /* External Interrupt exception. */ | |
112 | STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt) | |
113 | ||
114 | /* Alignment exception. */ | |
115 | . = 0x600 | |
116 | Alignment: | |
117 | EXCEPTION_PROLOG(SRR0, SRR1) | |
118 | mfspr r4,DAR | |
119 | stw r4,_DAR(r21) | |
120 | mfspr r5,DSISR | |
121 | stw r5,_DSISR(r21) | |
122 | addi r3,r1,STACK_FRAME_OVERHEAD | |
123 | li r20,MSR_KERNEL | |
124 | rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */ | |
125 | lwz r6,GOT(transfer_to_handler) | |
126 | mtlr r6 | |
127 | blrl | |
128 | .L_Alignment: | |
129 | .long AlignmentException - _start + EXC_OFF_SYS_RESET | |
130 | .long int_return - _start + EXC_OFF_SYS_RESET | |
131 | ||
132 | /* Program check exception */ | |
133 | . = 0x700 | |
134 | ProgramCheck: | |
135 | EXCEPTION_PROLOG(SRR0, SRR1) | |
136 | addi r3,r1,STACK_FRAME_OVERHEAD | |
137 | li r20,MSR_KERNEL | |
138 | rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */ | |
139 | lwz r6,GOT(transfer_to_handler) | |
140 | mtlr r6 | |
141 | blrl | |
142 | .L_ProgramCheck: | |
143 | .long ProgramCheckException - _start + EXC_OFF_SYS_RESET | |
144 | .long int_return - _start + EXC_OFF_SYS_RESET | |
145 | ||
146 | STD_EXCEPTION(0x800, FPUnavailable, UnknownException) | |
147 | ||
148 | /* I guess we could implement decrementer, and may have | |
149 | * to someday for timekeeping. | |
150 | */ | |
151 | STD_EXCEPTION(0x900, Decrementer, timer_interrupt) | |
152 | STD_EXCEPTION(0xa00, Trap_0a, UnknownException) | |
153 | STD_EXCEPTION(0xb00, Trap_0b, UnknownException) | |
154 | STD_EXCEPTION(0xc00, SystemCall, UnknownException) | |
155 | STD_EXCEPTION(0xd00, SingleStep, UnknownException) | |
156 | STD_EXCEPTION(0xe00, Trap_0e, UnknownException) | |
157 | STD_EXCEPTION(0xf00, Trap_0f, UnknownException) | |
158 | STD_EXCEPTION(0x1000, SoftEmu, SoftEmuException) | |
159 | STD_EXCEPTION(0x1100, InstructionTLBMiss, UnknownException) | |
160 | STD_EXCEPTION(0x1200, DataTLBMiss, UnknownException) | |
161 | STD_EXCEPTION(0x1300, InstructionTLBError, UnknownException) | |
162 | STD_EXCEPTION(0x1400, DataTLBError, UnknownException) | |
163 | STD_EXCEPTION(0x1500, Reserved5, UnknownException) | |
164 | STD_EXCEPTION(0x1600, Reserved6, UnknownException) | |
165 | STD_EXCEPTION(0x1700, Reserved7, UnknownException) | |
166 | STD_EXCEPTION(0x1800, Reserved8, UnknownException) | |
167 | STD_EXCEPTION(0x1900, Reserved9, UnknownException) | |
168 | STD_EXCEPTION(0x1a00, ReservedA, UnknownException) | |
169 | STD_EXCEPTION(0x1b00, ReservedB, UnknownException) | |
170 | STD_EXCEPTION(0x1c00, DataBreakpoint, UnknownException) | |
171 | STD_EXCEPTION(0x1d00, InstructionBreakpoint, UnknownException) | |
172 | STD_EXCEPTION(0x1e00, PeripheralBreakpoint, UnknownException) | |
173 | STD_EXCEPTION(0x1f00, DevPortBreakpoint, UnknownException) | |
174 | ||
175 | .globl _end_of_vectors | |
176 | _end_of_vectors: | |
177 | ||
178 | . = 0x2000 | |
179 | ||
180 | boot_cold: | |
181 | boot_warm: | |
182 | ||
183 | /* if this is a multi-core system we need to check which cpu | |
184 | * this is, if it is not cpu 0 send the cpu to the linux reset | |
185 | * vector */ | |
186 | #if (CONFIG_NUM_CPUS > 1) | |
187 | mfspr r0, MSSCR0 | |
188 | andi. r0, r0, 0x0020 | |
189 | rlwinm r0,r0,27,31,31 | |
190 | mtspr PIR, r0 | |
191 | beq 1f | |
192 | ||
193 | bl secondary_cpu_setup | |
194 | #endif | |
195 | ||
196 | 1: | |
197 | #ifdef CONFIG_SYS_RAMBOOT | |
198 | /* disable everything */ | |
199 | li r0, 0 | |
200 | mtspr HID0, r0 | |
201 | sync | |
202 | mtmsr 0 | |
203 | #endif | |
204 | ||
205 | bl invalidate_bats | |
206 | sync | |
207 | ||
208 | #ifdef CONFIG_SYS_L2 | |
209 | /* init the L2 cache */ | |
210 | lis r3, L2_INIT@h | |
211 | ori r3, r3, L2_INIT@l | |
212 | mtspr l2cr, r3 | |
213 | /* invalidate the L2 cache */ | |
214 | bl l2cache_invalidate | |
215 | sync | |
216 | #endif | |
217 | ||
218 | /* | |
219 | * Calculate absolute address in FLASH and jump there | |
220 | *------------------------------------------------------*/ | |
221 | lis r3, CONFIG_SYS_MONITOR_BASE@h | |
222 | ori r3, r3, CONFIG_SYS_MONITOR_BASE@l | |
223 | addi r3, r3, in_flash - _start + EXC_OFF_SYS_RESET | |
224 | mtlr r3 | |
225 | blr | |
226 | ||
227 | in_flash: | |
228 | /* let the C-code set up the rest */ | |
229 | /* */ | |
230 | /* Be careful to keep code relocatable ! */ | |
231 | /*------------------------------------------------------*/ | |
232 | /* perform low-level init */ | |
233 | ||
234 | /* enable extended addressing */ | |
235 | bl enable_ext_addr | |
236 | ||
237 | /* setup the bats */ | |
238 | bl early_bats | |
239 | ||
240 | /* | |
241 | * Cache must be enabled here for stack-in-cache trick. | |
242 | * This means we need to enable the BATS. | |
243 | * Cache should be turned on after BATs, since by default | |
244 | * everything is write-through. | |
245 | */ | |
246 | ||
247 | /* enable address translation */ | |
248 | bl enable_addr_trans | |
249 | sync | |
250 | ||
251 | /* enable and invalidate the data cache */ | |
252 | /* bl l1dcache_enable */ | |
253 | bl dcache_enable | |
254 | sync | |
255 | ||
256 | #if 1 | |
257 | bl icache_enable | |
258 | #endif | |
259 | ||
260 | #ifdef CONFIG_SYS_INIT_RAM_LOCK | |
261 | bl lock_ram_in_cache | |
262 | sync | |
263 | #endif | |
264 | ||
265 | /* set up the stack pointer in our newly created | |
266 | * cache-ram (r1) */ | |
267 | lis r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@h | |
268 | ori r1, r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@l | |
269 | ||
270 | li r0, 0 /* Make room for stack frame header and */ | |
271 | stwu r0, -4(r1) /* clear final stack frame so that */ | |
272 | stwu r0, -4(r1) /* stack backtraces terminate cleanly */ | |
273 | ||
274 | GET_GOT /* initialize GOT access */ | |
275 | ||
276 | /* setup the rest of the bats */ | |
277 | bl setup_bats | |
278 | bl clear_tlbs | |
279 | sync | |
280 | ||
281 | #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR) | |
282 | /* setup ccsrbar */ | |
283 | bl setup_ccsrbar | |
284 | #endif | |
285 | ||
286 | /* run low-level CPU init code (from Flash) */ | |
287 | bl cpu_init_f | |
288 | sync | |
289 | ||
290 | #ifdef RUN_DIAG | |
291 | ||
292 | /* Load PX_AUX register address in r4 */ | |
293 | lis r4, 0xf810 | |
294 | ori r4, r4, 0x6 | |
295 | /* Load contents of PX_AUX in r3 bits 24 to 31*/ | |
296 | lbz r3, 0(r4) | |
297 | ||
298 | /* Mask and obtain the bit in r3 */ | |
299 | rlwinm. r3, r3, 0, 24, 24 | |
300 | /* If not zero, jump and continue with u-boot */ | |
301 | bne diag_done | |
302 | ||
303 | /* Load back contents of PX_AUX in r3 bits 24 to 31 */ | |
304 | lbz r3, 0(r4) | |
305 | /* Set the MSB of the register value */ | |
306 | ori r3, r3, 0x80 | |
307 | /* Write value in r3 back to PX_AUX */ | |
308 | stb r3, 0(r4) | |
309 | ||
310 | /* Get the address to jump to in r3*/ | |
311 | lis r3, CONFIG_SYS_DIAG_ADDR@h | |
312 | ori r3, r3, CONFIG_SYS_DIAG_ADDR@l | |
313 | ||
314 | /* Load the LR with the branch address */ | |
315 | mtlr r3 | |
316 | ||
317 | /* Branch to diagnostic */ | |
318 | blr | |
319 | ||
320 | diag_done: | |
321 | #endif | |
322 | ||
323 | /* bl l2cache_enable */ | |
324 | mr r3, r21 | |
325 | ||
326 | /* r3: BOOTFLAG */ | |
327 | /* run 1st part of board init code (from Flash) */ | |
328 | bl board_init_f | |
329 | sync | |
330 | ||
331 | /* NOTREACHED */ | |
332 | ||
333 | .globl invalidate_bats | |
334 | invalidate_bats: | |
335 | ||
336 | li r0, 0 | |
337 | /* invalidate BATs */ | |
338 | mtspr IBAT0U, r0 | |
339 | mtspr IBAT1U, r0 | |
340 | mtspr IBAT2U, r0 | |
341 | mtspr IBAT3U, r0 | |
342 | mtspr IBAT4U, r0 | |
343 | mtspr IBAT5U, r0 | |
344 | mtspr IBAT6U, r0 | |
345 | mtspr IBAT7U, r0 | |
346 | ||
347 | isync | |
348 | mtspr DBAT0U, r0 | |
349 | mtspr DBAT1U, r0 | |
350 | mtspr DBAT2U, r0 | |
351 | mtspr DBAT3U, r0 | |
352 | mtspr DBAT4U, r0 | |
353 | mtspr DBAT5U, r0 | |
354 | mtspr DBAT6U, r0 | |
355 | mtspr DBAT7U, r0 | |
356 | ||
357 | isync | |
358 | sync | |
359 | blr | |
360 | ||
361 | /* | |
362 | * early_bats: | |
363 | * | |
364 | * Set up bats needed early on - this is usually the BAT for the | |
365 | * stack-in-cache and the Flash | |
366 | */ | |
367 | .globl early_bats | |
368 | early_bats: | |
369 | /* IBAT 5 */ | |
370 | lis r4, CONFIG_SYS_IBAT5L@h | |
371 | ori r4, r4, CONFIG_SYS_IBAT5L@l | |
372 | lis r3, CONFIG_SYS_IBAT5U@h | |
373 | ori r3, r3, CONFIG_SYS_IBAT5U@l | |
374 | mtspr IBAT5L, r4 | |
375 | mtspr IBAT5U, r3 | |
376 | isync | |
377 | ||
378 | /* DBAT 5 */ | |
379 | lis r4, CONFIG_SYS_DBAT5L@h | |
380 | ori r4, r4, CONFIG_SYS_DBAT5L@l | |
381 | lis r3, CONFIG_SYS_DBAT5U@h | |
382 | ori r3, r3, CONFIG_SYS_DBAT5U@l | |
383 | mtspr DBAT5L, r4 | |
384 | mtspr DBAT5U, r3 | |
385 | isync | |
386 | ||
387 | /* IBAT 6 */ | |
388 | lis r4, CONFIG_SYS_IBAT6L@h | |
389 | ori r4, r4, CONFIG_SYS_IBAT6L@l | |
390 | lis r3, CONFIG_SYS_IBAT6U@h | |
391 | ori r3, r3, CONFIG_SYS_IBAT6U@l | |
392 | mtspr IBAT6L, r4 | |
393 | mtspr IBAT6U, r3 | |
394 | isync | |
395 | ||
396 | /* DBAT 6 */ | |
397 | lis r4, CONFIG_SYS_DBAT6L@h | |
398 | ori r4, r4, CONFIG_SYS_DBAT6L@l | |
399 | lis r3, CONFIG_SYS_DBAT6U@h | |
400 | ori r3, r3, CONFIG_SYS_DBAT6U@l | |
401 | mtspr DBAT6L, r4 | |
402 | mtspr DBAT6U, r3 | |
403 | isync | |
404 | blr | |
405 | ||
406 | .globl clear_tlbs | |
407 | clear_tlbs: | |
408 | addis r3, 0, 0x0000 | |
409 | addis r5, 0, 0x4 | |
410 | isync | |
411 | tlblp: | |
412 | tlbie r3 | |
413 | sync | |
414 | addi r3, r3, 0x1000 | |
415 | cmp 0, 0, r3, r5 | |
416 | blt tlblp | |
417 | blr | |
418 | ||
419 | .globl enable_addr_trans | |
420 | enable_addr_trans: | |
421 | /* enable address translation */ | |
422 | mfmsr r5 | |
423 | ori r5, r5, (MSR_IR | MSR_DR) | |
424 | mtmsr r5 | |
425 | isync | |
426 | blr | |
427 | ||
428 | .globl disable_addr_trans | |
429 | disable_addr_trans: | |
430 | /* disable address translation */ | |
431 | mflr r4 | |
432 | mfmsr r3 | |
433 | andi. r0, r3, (MSR_IR | MSR_DR) | |
434 | beqlr | |
435 | andc r3, r3, r0 | |
436 | mtspr SRR0, r4 | |
437 | mtspr SRR1, r3 | |
438 | rfi | |
439 | ||
440 | /* | |
441 | * This code finishes saving the registers to the exception frame | |
442 | * and jumps to the appropriate handler for the exception. | |
443 | * Register r21 is pointer into trap frame, r1 has new stack pointer. | |
444 | */ | |
445 | .globl transfer_to_handler | |
446 | transfer_to_handler: | |
447 | stw r22,_NIP(r21) | |
448 | lis r22,MSR_POW@h | |
449 | andc r23,r23,r22 | |
450 | stw r23,_MSR(r21) | |
451 | SAVE_GPR(7, r21) | |
452 | SAVE_4GPRS(8, r21) | |
453 | SAVE_8GPRS(12, r21) | |
454 | SAVE_8GPRS(24, r21) | |
455 | mflr r23 | |
456 | andi. r24,r23,0x3f00 /* get vector offset */ | |
457 | stw r24,TRAP(r21) | |
458 | li r22,0 | |
459 | stw r22,RESULT(r21) | |
460 | mtspr SPRG2,r22 /* r1 is now kernel sp */ | |
461 | lwz r24,0(r23) /* virtual address of handler */ | |
462 | lwz r23,4(r23) /* where to go when done */ | |
463 | mtspr SRR0,r24 | |
464 | mtspr SRR1,r20 | |
465 | mtlr r23 | |
466 | SYNC | |
467 | rfi /* jump to handler, enable MMU */ | |
468 | ||
469 | int_return: | |
470 | mfmsr r28 /* Disable interrupts */ | |
471 | li r4,0 | |
472 | ori r4,r4,MSR_EE | |
473 | andc r28,r28,r4 | |
474 | SYNC /* Some chip revs need this... */ | |
475 | mtmsr r28 | |
476 | SYNC | |
477 | lwz r2,_CTR(r1) | |
478 | lwz r0,_LINK(r1) | |
479 | mtctr r2 | |
480 | mtlr r0 | |
481 | lwz r2,_XER(r1) | |
482 | lwz r0,_CCR(r1) | |
483 | mtspr XER,r2 | |
484 | mtcrf 0xFF,r0 | |
485 | REST_10GPRS(3, r1) | |
486 | REST_10GPRS(13, r1) | |
487 | REST_8GPRS(23, r1) | |
488 | REST_GPR(31, r1) | |
489 | lwz r2,_NIP(r1) /* Restore environment */ | |
490 | lwz r0,_MSR(r1) | |
491 | mtspr SRR0,r2 | |
492 | mtspr SRR1,r0 | |
493 | lwz r0,GPR0(r1) | |
494 | lwz r2,GPR2(r1) | |
495 | lwz r1,GPR1(r1) | |
496 | SYNC | |
497 | rfi | |
498 | ||
499 | .globl dc_read | |
500 | dc_read: | |
501 | blr | |
502 | ||
503 | .globl get_pvr | |
504 | get_pvr: | |
505 | mfspr r3, PVR | |
506 | blr | |
507 | ||
508 | .globl get_svr | |
509 | get_svr: | |
510 | mfspr r3, SVR | |
511 | blr | |
512 | ||
513 | ||
514 | /* | |
515 | * Function: in8 | |
516 | * Description: Input 8 bits | |
517 | */ | |
518 | .globl in8 | |
519 | in8: | |
520 | lbz r3,0x0000(r3) | |
521 | blr | |
522 | ||
523 | /* | |
524 | * Function: out8 | |
525 | * Description: Output 8 bits | |
526 | */ | |
527 | .globl out8 | |
528 | out8: | |
529 | stb r4,0x0000(r3) | |
530 | blr | |
531 | ||
532 | /* | |
533 | * Function: out16 | |
534 | * Description: Output 16 bits | |
535 | */ | |
536 | .globl out16 | |
537 | out16: | |
538 | sth r4,0x0000(r3) | |
539 | blr | |
540 | ||
541 | /* | |
542 | * Function: out16r | |
543 | * Description: Byte reverse and output 16 bits | |
544 | */ | |
545 | .globl out16r | |
546 | out16r: | |
547 | sthbrx r4,r0,r3 | |
548 | blr | |
549 | ||
550 | /* | |
551 | * Function: out32 | |
552 | * Description: Output 32 bits | |
553 | */ | |
554 | .globl out32 | |
555 | out32: | |
556 | stw r4,0x0000(r3) | |
557 | blr | |
558 | ||
559 | /* | |
560 | * Function: out32r | |
561 | * Description: Byte reverse and output 32 bits | |
562 | */ | |
563 | .globl out32r | |
564 | out32r: | |
565 | stwbrx r4,r0,r3 | |
566 | blr | |
567 | ||
568 | /* | |
569 | * Function: in16 | |
570 | * Description: Input 16 bits | |
571 | */ | |
572 | .globl in16 | |
573 | in16: | |
574 | lhz r3,0x0000(r3) | |
575 | blr | |
576 | ||
577 | /* | |
578 | * Function: in16r | |
579 | * Description: Input 16 bits and byte reverse | |
580 | */ | |
581 | .globl in16r | |
582 | in16r: | |
583 | lhbrx r3,r0,r3 | |
584 | blr | |
585 | ||
586 | /* | |
587 | * Function: in32 | |
588 | * Description: Input 32 bits | |
589 | */ | |
590 | .globl in32 | |
591 | in32: | |
592 | lwz 3,0x0000(3) | |
593 | blr | |
594 | ||
595 | /* | |
596 | * Function: in32r | |
597 | * Description: Input 32 bits and byte reverse | |
598 | */ | |
599 | .globl in32r | |
600 | in32r: | |
601 | lwbrx r3,r0,r3 | |
602 | blr | |
603 | ||
604 | /* | |
605 | * void relocate_code (addr_sp, gd, addr_moni) | |
606 | * | |
607 | * This "function" does not return, instead it continues in RAM | |
608 | * after relocating the monitor code. | |
609 | * | |
610 | * r3 = dest | |
611 | * r4 = src | |
612 | * r5 = length in bytes | |
613 | * r6 = cachelinesize | |
614 | */ | |
615 | .globl relocate_code | |
616 | relocate_code: | |
617 | ||
618 | mr r1, r3 /* Set new stack pointer */ | |
619 | mr r9, r4 /* Save copy of Global Data pointer */ | |
620 | mr r2, r9 /* Save for DECLARE_GLOBAL_DATA_PTR */ | |
621 | mr r10, r5 /* Save copy of Destination Address */ | |
622 | ||
623 | mr r3, r5 /* Destination Address */ | |
624 | lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */ | |
625 | ori r4, r4, CONFIG_SYS_MONITOR_BASE@l | |
626 | lwz r5, GOT(__init_end) | |
627 | sub r5, r5, r4 | |
628 | li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */ | |
629 | ||
630 | /* | |
631 | * Fix GOT pointer: | |
632 | * | |
633 | * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address | |
634 | * | |
635 | * Offset: | |
636 | */ | |
637 | sub r15, r10, r4 | |
638 | ||
639 | /* First our own GOT */ | |
640 | add r14, r14, r15 | |
641 | /* then the one used by the C code */ | |
642 | add r30, r30, r15 | |
643 | ||
644 | /* | |
645 | * Now relocate code | |
646 | */ | |
647 | #ifdef CONFIG_ECC | |
648 | bl board_relocate_rom | |
649 | sync | |
650 | mr r3, r10 /* Destination Address */ | |
651 | lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */ | |
652 | ori r4, r4, CONFIG_SYS_MONITOR_BASE@l | |
653 | lwz r5, GOT(__init_end) | |
654 | sub r5, r5, r4 | |
655 | li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */ | |
656 | #else | |
657 | cmplw cr1,r3,r4 | |
658 | addi r0,r5,3 | |
659 | srwi. r0,r0,2 | |
660 | beq cr1,4f /* In place copy is not necessary */ | |
661 | beq 7f /* Protect against 0 count */ | |
662 | mtctr r0 | |
663 | bge cr1,2f | |
664 | ||
665 | la r8,-4(r4) | |
666 | la r7,-4(r3) | |
667 | 1: lwzu r0,4(r8) | |
668 | stwu r0,4(r7) | |
669 | bdnz 1b | |
670 | b 4f | |
671 | ||
672 | 2: slwi r0,r0,2 | |
673 | add r8,r4,r0 | |
674 | add r7,r3,r0 | |
675 | 3: lwzu r0,-4(r8) | |
676 | stwu r0,-4(r7) | |
677 | bdnz 3b | |
678 | #endif | |
679 | /* | |
680 | * Now flush the cache: note that we must start from a cache aligned | |
681 | * address. Otherwise we might miss one cache line. | |
682 | */ | |
683 | 4: cmpwi r6,0 | |
684 | add r5,r3,r5 | |
685 | beq 7f /* Always flush prefetch queue in any case */ | |
686 | subi r0,r6,1 | |
687 | andc r3,r3,r0 | |
688 | mr r4,r3 | |
689 | 5: dcbst 0,r4 | |
690 | add r4,r4,r6 | |
691 | cmplw r4,r5 | |
692 | blt 5b | |
693 | sync /* Wait for all dcbst to complete on bus */ | |
694 | mr r4,r3 | |
695 | 6: icbi 0,r4 | |
696 | add r4,r4,r6 | |
697 | cmplw r4,r5 | |
698 | blt 6b | |
699 | 7: sync /* Wait for all icbi to complete on bus */ | |
700 | isync | |
701 | ||
702 | /* | |
703 | * We are done. Do not return, instead branch to second part of board | |
704 | * initialization, now running from RAM. | |
705 | */ | |
706 | addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET | |
707 | mtlr r0 | |
708 | blr | |
709 | ||
710 | in_ram: | |
711 | #ifdef CONFIG_ECC | |
712 | bl board_init_ecc | |
713 | #endif | |
714 | /* | |
715 | * Relocation Function, r14 point to got2+0x8000 | |
716 | * | |
717 | * Adjust got2 pointers, no need to check for 0, this code | |
718 | * already puts a few entries in the table. | |
719 | */ | |
720 | li r0,__got2_entries@sectoff@l | |
721 | la r3,GOT(_GOT2_TABLE_) | |
722 | lwz r11,GOT(_GOT2_TABLE_) | |
723 | mtctr r0 | |
724 | sub r11,r3,r11 | |
725 | addi r3,r3,-4 | |
726 | 1: lwzu r0,4(r3) | |
727 | add r0,r0,r11 | |
728 | stw r0,0(r3) | |
729 | bdnz 1b | |
730 | ||
731 | /* | |
732 | * Now adjust the fixups and the pointers to the fixups | |
733 | * in case we need to move ourselves again. | |
734 | */ | |
735 | 2: li r0,__fixup_entries@sectoff@l | |
736 | lwz r3,GOT(_FIXUP_TABLE_) | |
737 | cmpwi r0,0 | |
738 | mtctr r0 | |
739 | addi r3,r3,-4 | |
740 | beq 4f | |
741 | 3: lwzu r4,4(r3) | |
742 | lwzux r0,r4,r11 | |
743 | add r0,r0,r11 | |
744 | stw r10,0(r3) | |
745 | stw r0,0(r4) | |
746 | bdnz 3b | |
747 | 4: | |
748 | /* clear_bss: */ | |
749 | /* | |
750 | * Now clear BSS segment | |
751 | */ | |
752 | lwz r3,GOT(__bss_start) | |
753 | lwz r4,GOT(_end) | |
754 | ||
755 | cmplw 0, r3, r4 | |
756 | beq 6f | |
757 | ||
758 | li r0, 0 | |
759 | 5: | |
760 | stw r0, 0(r3) | |
761 | addi r3, r3, 4 | |
762 | cmplw 0, r3, r4 | |
763 | bne 5b | |
764 | 6: | |
765 | mr r3, r9 /* Init Date pointer */ | |
766 | mr r4, r10 /* Destination Address */ | |
767 | bl board_init_r | |
768 | ||
769 | /* not reached - end relocate_code */ | |
770 | /*-----------------------------------------------------------------------*/ | |
771 | ||
772 | /* | |
773 | * Copy exception vector code to low memory | |
774 | * | |
775 | * r3: dest_addr | |
776 | * r7: source address, r8: end address, r9: target address | |
777 | */ | |
778 | .globl trap_init | |
779 | trap_init: | |
780 | lwz r7, GOT(_start) | |
781 | lwz r8, GOT(_end_of_vectors) | |
782 | ||
783 | li r9, 0x100 /* reset vector always at 0x100 */ | |
784 | ||
785 | cmplw 0, r7, r8 | |
786 | bgelr /* return if r7>=r8 - just in case */ | |
787 | ||
788 | mflr r4 /* save link register */ | |
789 | 1: | |
790 | lwz r0, 0(r7) | |
791 | stw r0, 0(r9) | |
792 | addi r7, r7, 4 | |
793 | addi r9, r9, 4 | |
794 | cmplw 0, r7, r8 | |
795 | bne 1b | |
796 | ||
797 | /* | |
798 | * relocate `hdlr' and `int_return' entries | |
799 | */ | |
800 | li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET | |
801 | li r8, Alignment - _start + EXC_OFF_SYS_RESET | |
802 | 2: | |
803 | bl trap_reloc | |
804 | addi r7, r7, 0x100 /* next exception vector */ | |
805 | cmplw 0, r7, r8 | |
806 | blt 2b | |
807 | ||
808 | li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET | |
809 | bl trap_reloc | |
810 | ||
811 | li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET | |
812 | bl trap_reloc | |
813 | ||
814 | li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET | |
815 | li r8, SystemCall - _start + EXC_OFF_SYS_RESET | |
816 | 3: | |
817 | bl trap_reloc | |
818 | addi r7, r7, 0x100 /* next exception vector */ | |
819 | cmplw 0, r7, r8 | |
820 | blt 3b | |
821 | ||
822 | li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET | |
823 | li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET | |
824 | 4: | |
825 | bl trap_reloc | |
826 | addi r7, r7, 0x100 /* next exception vector */ | |
827 | cmplw 0, r7, r8 | |
828 | blt 4b | |
829 | ||
830 | /* enable execptions from RAM vectors */ | |
831 | mfmsr r7 | |
832 | li r8,MSR_IP | |
833 | andc r7,r7,r8 | |
834 | ori r7,r7,MSR_ME /* Enable Machine Check */ | |
835 | mtmsr r7 | |
836 | ||
837 | mtlr r4 /* restore link register */ | |
838 | blr | |
839 | ||
840 | /* | |
841 | * Function: relocate entries for one exception vector | |
842 | */ | |
843 | trap_reloc: | |
844 | lwz r0, 0(r7) /* hdlr ... */ | |
845 | add r0, r0, r3 /* ... += dest_addr */ | |
846 | stw r0, 0(r7) | |
847 | ||
848 | lwz r0, 4(r7) /* int_return ... */ | |
849 | add r0, r0, r3 /* ... += dest_addr */ | |
850 | stw r0, 4(r7) | |
851 | ||
852 | sync | |
853 | isync | |
854 | ||
855 | blr | |
856 | ||
857 | .globl enable_ext_addr | |
858 | enable_ext_addr: | |
859 | mfspr r0, HID0 | |
860 | lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h | |
861 | ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l | |
862 | mtspr HID0, r0 | |
863 | sync | |
864 | isync | |
865 | blr | |
866 | ||
867 | #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR) | |
868 | .globl setup_ccsrbar | |
869 | setup_ccsrbar: | |
870 | /* Special sequence needed to update CCSRBAR itself */ | |
871 | lis r4, CONFIG_SYS_CCSRBAR_DEFAULT@h | |
872 | ori r4, r4, CONFIG_SYS_CCSRBAR_DEFAULT@l | |
873 | ||
874 | lis r5, CONFIG_SYS_CCSRBAR@h | |
875 | ori r5, r5, CONFIG_SYS_CCSRBAR@l | |
876 | srwi r6,r5,12 | |
877 | stw r6, 0(r4) | |
878 | isync | |
879 | ||
880 | lis r5, 0xffff | |
881 | ori r5,r5,0xf000 | |
882 | lwz r5, 0(r5) | |
883 | isync | |
884 | ||
885 | lis r3, CONFIG_SYS_CCSRBAR@h | |
886 | lwz r5, CONFIG_SYS_CCSRBAR@l(r3) | |
887 | isync | |
888 | ||
889 | blr | |
890 | #endif | |
891 | ||
892 | #ifdef CONFIG_SYS_INIT_RAM_LOCK | |
893 | lock_ram_in_cache: | |
894 | /* Allocate Initial RAM in data cache. | |
895 | */ | |
896 | lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h | |
897 | ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l | |
898 | li r4, ((CONFIG_SYS_INIT_RAM_END & ~31) + \ | |
899 | (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32 | |
900 | mtctr r4 | |
901 | 1: | |
902 | dcbz r0, r3 | |
903 | addi r3, r3, 32 | |
904 | bdnz 1b | |
905 | #if 1 | |
906 | /* Lock the data cache */ | |
907 | mfspr r0, HID0 | |
908 | ori r0, r0, 0x1000 | |
909 | sync | |
910 | mtspr HID0, r0 | |
911 | sync | |
912 | blr | |
913 | #endif | |
914 | #if 0 | |
915 | /* Lock the first way of the data cache */ | |
916 | mfspr r0, LDSTCR | |
917 | ori r0, r0, 0x0080 | |
918 | #if defined(CONFIG_ALTIVEC) | |
919 | dssall | |
920 | #endif | |
921 | sync | |
922 | mtspr LDSTCR, r0 | |
923 | sync | |
924 | isync | |
925 | blr | |
926 | #endif | |
927 | ||
928 | .globl unlock_ram_in_cache | |
929 | unlock_ram_in_cache: | |
930 | /* invalidate the INIT_RAM section */ | |
931 | lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h | |
932 | ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l | |
933 | li r4, ((CONFIG_SYS_INIT_RAM_END & ~31) + \ | |
934 | (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32 | |
935 | mtctr r4 | |
936 | 1: icbi r0, r3 | |
937 | addi r3, r3, 32 | |
938 | bdnz 1b | |
939 | sync /* Wait for all icbi to complete on bus */ | |
940 | isync | |
941 | #if 1 | |
942 | /* Unlock the data cache and invalidate it */ | |
943 | mfspr r0, HID0 | |
944 | li r3,0x1000 | |
945 | andc r0,r0,r3 | |
946 | li r3,0x0400 | |
947 | or r0,r0,r3 | |
948 | sync | |
949 | mtspr HID0, r0 | |
950 | sync | |
951 | blr | |
952 | #endif | |
953 | #if 0 | |
954 | /* Unlock the first way of the data cache */ | |
955 | mfspr r0, LDSTCR | |
956 | li r3,0x0080 | |
957 | andc r0,r0,r3 | |
958 | #ifdef CONFIG_ALTIVEC | |
959 | dssall | |
960 | #endif | |
961 | sync | |
962 | mtspr LDSTCR, r0 | |
963 | sync | |
964 | isync | |
965 | li r3,0x0400 | |
966 | or r0,r0,r3 | |
967 | sync | |
968 | mtspr HID0, r0 | |
969 | sync | |
970 | blr | |
971 | #endif | |
972 | #endif | |
973 | ||
974 | /* If this is a multi-cpu system then we need to handle the | |
975 | * 2nd cpu. The assumption is that the 2nd cpu is being | |
976 | * held in boot holdoff mode until the 1st cpu unlocks it | |
977 | * from Linux. We'll do some basic cpu init and then pass | |
978 | * it to the Linux Reset Vector. | |
979 | * Sri: Much of this initialization is not required. Linux | |
980 | * rewrites the bats, and the sprs and also enables the L1 cache. | |
981 | */ | |
982 | #if (CONFIG_NUM_CPUS > 1) | |
983 | .globl secondary_cpu_setup | |
984 | secondary_cpu_setup: | |
985 | /* Do only core setup on all cores except cpu0 */ | |
986 | bl invalidate_bats | |
987 | sync | |
988 | bl enable_ext_addr | |
989 | ||
990 | #ifdef CONFIG_SYS_L2 | |
991 | /* init the L2 cache */ | |
992 | addis r3, r0, L2_INIT@h | |
993 | ori r3, r3, L2_INIT@l | |
994 | sync | |
995 | mtspr l2cr, r3 | |
996 | #ifdef CONFIG_ALTIVEC | |
997 | dssall | |
998 | #endif | |
999 | /* invalidate the L2 cache */ | |
1000 | bl l2cache_invalidate | |
1001 | sync | |
1002 | #endif | |
1003 | ||
1004 | /* enable and invalidate the data cache */ | |
1005 | bl dcache_enable | |
1006 | sync | |
1007 | ||
1008 | /* enable and invalidate the instruction cache*/ | |
1009 | bl icache_enable | |
1010 | sync | |
1011 | ||
1012 | /* TBEN in HID0 */ | |
1013 | mfspr r4, HID0 | |
1014 | oris r4, r4, 0x0400 | |
1015 | mtspr HID0, r4 | |
1016 | sync | |
1017 | isync | |
1018 | ||
1019 | /* MCP|SYNCBE|ABE in HID1 */ | |
1020 | mfspr r4, HID1 | |
1021 | oris r4, r4, 0x8000 | |
1022 | ori r4, r4, 0x0C00 | |
1023 | mtspr HID1, r4 | |
1024 | sync | |
1025 | isync | |
1026 | ||
1027 | lis r3, CONFIG_LINUX_RESET_VEC@h | |
1028 | ori r3, r3, CONFIG_LINUX_RESET_VEC@l | |
1029 | mtlr r3 | |
1030 | blr | |
1031 | ||
1032 | /* Never Returns, Running in Linux Now */ | |
1033 | #endif |