]>
Commit | Line | Data |
---|---|---|
0ae76531 DF |
1 | /* |
2 | * (C) Copyright 2013 | |
3 | * David Feng <fenghua@phytium.com.cn> | |
4 | * | |
5 | * SPDX-License-Identifier: GPL-2.0+ | |
6 | */ | |
7 | ||
8 | #include <asm-offsets.h> | |
9 | #include <config.h> | |
0ae76531 DF |
10 | #include <linux/linkage.h> |
11 | #include <asm/macro.h> | |
12 | #include <asm/armv8/mmu.h> | |
13 | ||
14 | /************************************************************************* | |
15 | * | |
16 | * Startup Code (reset vector) | |
17 | * | |
18 | *************************************************************************/ | |
19 | ||
20 | .globl _start | |
21 | _start: | |
cdaa633f AP |
22 | #ifdef CONFIG_ENABLE_ARM_SOC_BOOT0_HOOK |
23 | /* | |
24 | * Various SoCs need something special and SoC-specific up front in | |
25 | * order to boot, allow them to set that in their boot0.h file and then | |
26 | * use it here. | |
27 | */ | |
28 | #include <asm/arch/boot0.h> | |
a5168a59 AP |
29 | #else |
30 | b reset | |
cdaa633f AP |
31 | #endif |
32 | ||
0ae76531 DF |
33 | .align 3 |
34 | ||
35 | .globl _TEXT_BASE | |
36 | _TEXT_BASE: | |
37 | .quad CONFIG_SYS_TEXT_BASE | |
38 | ||
39 | /* | |
40 | * These are defined in the linker script. | |
41 | */ | |
42 | .globl _end_ofs | |
43 | _end_ofs: | |
44 | .quad _end - _start | |
45 | ||
46 | .globl _bss_start_ofs | |
47 | _bss_start_ofs: | |
48 | .quad __bss_start - _start | |
49 | ||
50 | .globl _bss_end_ofs | |
51 | _bss_end_ofs: | |
52 | .quad __bss_end - _start | |
53 | ||
54 | reset: | |
0e2b5350 SW |
55 | /* Allow the board to save important registers */ |
56 | b save_boot_params | |
57 | .globl save_boot_params_ret | |
58 | save_boot_params_ret: | |
59 | ||
94f7ff36 ST |
60 | #ifdef CONFIG_SYS_RESET_SCTRL |
61 | bl reset_sctrl | |
62 | #endif | |
0ae76531 DF |
63 | /* |
64 | * Could be EL3/EL2/EL1, Initial State: | |
65 | * Little Endian, MMU Disabled, i/dCache Disabled | |
66 | */ | |
67 | adr x0, vectors | |
68 | switch_el x1, 3f, 2f, 1f | |
1277bac0 DF |
69 | 3: msr vbar_el3, x0 |
70 | mrs x0, scr_el3 | |
c71645ad DF |
71 | orr x0, x0, #0xf /* SCR_EL3.NS|IRQ|FIQ|EA */ |
72 | msr scr_el3, x0 | |
0ae76531 | 73 | msr cptr_el3, xzr /* Enable FP/SIMD */ |
70bcb43e | 74 | #ifdef COUNTER_FREQUENCY |
0ae76531 DF |
75 | ldr x0, =COUNTER_FREQUENCY |
76 | msr cntfrq_el0, x0 /* Initialize CNTFRQ */ | |
70bcb43e | 77 | #endif |
0ae76531 DF |
78 | b 0f |
79 | 2: msr vbar_el2, x0 | |
80 | mov x0, #0x33ff | |
81 | msr cptr_el2, x0 /* Enable FP/SIMD */ | |
82 | b 0f | |
83 | 1: msr vbar_el1, x0 | |
84 | mov x0, #3 << 20 | |
85 | msr cpacr_el1, x0 /* Enable FP/SIMD */ | |
86 | 0: | |
87 | ||
3aec452e | 88 | /* |
9ad7147b | 89 | * Enable SMPEN bit for coherency. |
3aec452e MH |
90 | * This register is not architectural but at the moment |
91 | * this bit should be set for A53/A57/A72. | |
92 | */ | |
93 | #ifdef CONFIG_ARMV8_SET_SMPEN | |
9ad7147b | 94 | mrs x0, S3_1_c15_c2_1 /* cpuectlr_el1 */ |
3aec452e MH |
95 | orr x0, x0, #0x40 |
96 | msr S3_1_c15_c2_1, x0 | |
97 | #endif | |
98 | ||
37118fb2 BS |
99 | /* Apply ARM core specific erratas */ |
100 | bl apply_core_errata | |
101 | ||
1e6ad55c YS |
102 | /* |
103 | * Cache/BPB/TLB Invalidate | |
104 | * i-cache is invalidated before enabled in icache_enable() | |
105 | * tlb is invalidated before mmu is enabled in dcache_enable() | |
106 | * d-cache is invalidated before enabled in dcache_enable() | |
107 | */ | |
0ae76531 DF |
108 | |
109 | /* Processor specific initialization */ | |
110 | bl lowlevel_init | |
111 | ||
4b105f6c | 112 | #if defined(CONFIG_ARMV8_SPIN_TABLE) && !defined(CONFIG_SPL_BUILD) |
6b6024ea MY |
113 | branch_if_master x0, x1, master_cpu |
114 | b spin_table_secondary_jump | |
115 | /* never return */ | |
116 | #elif defined(CONFIG_ARMV8_MULTIENTRY) | |
0ae76531 DF |
117 | branch_if_master x0, x1, master_cpu |
118 | ||
119 | /* | |
120 | * Slave CPUs | |
121 | */ | |
122 | slave_cpu: | |
123 | wfe | |
124 | ldr x1, =CPU_RELEASE_ADDR | |
125 | ldr x0, [x1] | |
126 | cbz x0, slave_cpu | |
127 | br x0 /* branch to the given address */ | |
23b5877c | 128 | #endif /* CONFIG_ARMV8_MULTIENTRY */ |
6b6024ea | 129 | master_cpu: |
0ae76531 DF |
130 | bl _main |
131 | ||
94f7ff36 ST |
132 | #ifdef CONFIG_SYS_RESET_SCTRL |
133 | reset_sctrl: | |
134 | switch_el x1, 3f, 2f, 1f | |
135 | 3: | |
136 | mrs x0, sctlr_el3 | |
137 | b 0f | |
138 | 2: | |
139 | mrs x0, sctlr_el2 | |
140 | b 0f | |
141 | 1: | |
142 | mrs x0, sctlr_el1 | |
143 | ||
144 | 0: | |
145 | ldr x1, =0xfdfffffa | |
146 | and x0, x0, x1 | |
147 | ||
148 | switch_el x1, 6f, 5f, 4f | |
149 | 6: | |
150 | msr sctlr_el3, x0 | |
151 | b 7f | |
152 | 5: | |
153 | msr sctlr_el2, x0 | |
154 | b 7f | |
155 | 4: | |
156 | msr sctlr_el1, x0 | |
157 | ||
158 | 7: | |
159 | dsb sy | |
160 | isb | |
161 | b __asm_invalidate_tlb_all | |
162 | ret | |
163 | #endif | |
164 | ||
0ae76531 DF |
165 | /*-----------------------------------------------------------------------*/ |
166 | ||
37118fb2 BS |
167 | WEAK(apply_core_errata) |
168 | ||
169 | mov x29, lr /* Save LR */ | |
170 | /* For now, we support Cortex-A57 specific errata only */ | |
171 | ||
172 | /* Check if we are running on a Cortex-A57 core */ | |
173 | branch_if_a57_core x0, apply_a57_core_errata | |
174 | 0: | |
175 | mov lr, x29 /* Restore LR */ | |
176 | ret | |
177 | ||
178 | apply_a57_core_errata: | |
179 | ||
180 | #ifdef CONFIG_ARM_ERRATA_828024 | |
181 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
182 | /* Disable non-allocate hint of w-b-n-a memory type */ | |
f299b5b0 | 183 | orr x0, x0, #1 << 49 |
37118fb2 | 184 | /* Disable write streaming no L1-allocate threshold */ |
f299b5b0 | 185 | orr x0, x0, #3 << 25 |
37118fb2 | 186 | /* Disable write streaming no-allocate threshold */ |
f299b5b0 | 187 | orr x0, x0, #3 << 27 |
37118fb2 BS |
188 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ |
189 | #endif | |
190 | ||
191 | #ifdef CONFIG_ARM_ERRATA_826974 | |
192 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
193 | /* Disable speculative load execution ahead of a DMB */ | |
f299b5b0 | 194 | orr x0, x0, #1 << 59 |
37118fb2 BS |
195 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ |
196 | #endif | |
197 | ||
2ea3a448 A |
198 | #ifdef CONFIG_ARM_ERRATA_833471 |
199 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
200 | /* FPSCR write flush. | |
201 | * Note that in some cases where a flush is unnecessary this | |
202 | could impact performance. */ | |
203 | orr x0, x0, #1 << 38 | |
204 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
205 | #endif | |
206 | ||
207 | #ifdef CONFIG_ARM_ERRATA_829520 | |
208 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
209 | /* Disable Indirect Predictor bit will prevent this erratum | |
210 | from occurring | |
211 | * Note that in some cases where a flush is unnecessary this | |
212 | could impact performance. */ | |
213 | orr x0, x0, #1 << 4 | |
214 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
215 | #endif | |
216 | ||
37118fb2 BS |
217 | #ifdef CONFIG_ARM_ERRATA_833069 |
218 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
219 | /* Disable Enable Invalidates of BTB bit */ | |
220 | and x0, x0, #0xE | |
221 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
222 | #endif | |
223 | b 0b | |
224 | ENDPROC(apply_core_errata) | |
225 | ||
226 | /*-----------------------------------------------------------------------*/ | |
227 | ||
0ae76531 | 228 | WEAK(lowlevel_init) |
0ae76531 | 229 | mov x29, lr /* Save LR */ |
0ae76531 | 230 | |
c71645ad DF |
231 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
232 | branch_if_slave x0, 1f | |
233 | ldr x0, =GICD_BASE | |
234 | bl gic_init_secure | |
235 | 1: | |
236 | #if defined(CONFIG_GICV3) | |
237 | ldr x0, =GICR_BASE | |
238 | bl gic_init_secure_percpu | |
239 | #elif defined(CONFIG_GICV2) | |
240 | ldr x0, =GICD_BASE | |
241 | ldr x1, =GICC_BASE | |
242 | bl gic_init_secure_percpu | |
c71645ad | 243 | #endif |
11661193 | 244 | #endif |
c71645ad | 245 | |
d38fca40 | 246 | #ifdef CONFIG_ARMV8_MULTIENTRY |
c71645ad | 247 | branch_if_master x0, x1, 2f |
0ae76531 DF |
248 | |
249 | /* | |
250 | * Slave should wait for master clearing spin table. | |
251 | * This sync prevent salves observing incorrect | |
252 | * value of spin table and jumping to wrong place. | |
253 | */ | |
c71645ad DF |
254 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
255 | #ifdef CONFIG_GICV2 | |
256 | ldr x0, =GICC_BASE | |
257 | #endif | |
258 | bl gic_wait_for_interrupt | |
259 | #endif | |
0ae76531 DF |
260 | |
261 | /* | |
c71645ad | 262 | * All slaves will enter EL2 and optionally EL1. |
0ae76531 | 263 | */ |
7c5e1feb AW |
264 | adr x4, lowlevel_in_el2 |
265 | ldr x5, =ES_TO_AARCH64 | |
0ae76531 | 266 | bl armv8_switch_to_el2 |
ec6617c3 AW |
267 | |
268 | lowlevel_in_el2: | |
0ae76531 | 269 | #ifdef CONFIG_ARMV8_SWITCH_TO_EL1 |
7c5e1feb AW |
270 | adr x4, lowlevel_in_el1 |
271 | ldr x5, =ES_TO_AARCH64 | |
0ae76531 | 272 | bl armv8_switch_to_el1 |
ec6617c3 AW |
273 | |
274 | lowlevel_in_el1: | |
0ae76531 DF |
275 | #endif |
276 | ||
23b5877c LW |
277 | #endif /* CONFIG_ARMV8_MULTIENTRY */ |
278 | ||
c71645ad | 279 | 2: |
0ae76531 DF |
280 | mov lr, x29 /* Restore LR */ |
281 | ret | |
282 | ENDPROC(lowlevel_init) | |
283 | ||
c71645ad DF |
284 | WEAK(smp_kick_all_cpus) |
285 | /* Kick secondary cpus up by SGI 0 interrupt */ | |
c71645ad DF |
286 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
287 | ldr x0, =GICD_BASE | |
afedf548 | 288 | b gic_kick_secondary_cpus |
c71645ad | 289 | #endif |
c71645ad DF |
290 | ret |
291 | ENDPROC(smp_kick_all_cpus) | |
292 | ||
0ae76531 DF |
293 | /*-----------------------------------------------------------------------*/ |
294 | ||
295 | ENTRY(c_runtime_cpu_setup) | |
0ae76531 DF |
296 | /* Relocate vBAR */ |
297 | adr x0, vectors | |
298 | switch_el x1, 3f, 2f, 1f | |
299 | 3: msr vbar_el3, x0 | |
300 | b 0f | |
301 | 2: msr vbar_el2, x0 | |
302 | b 0f | |
303 | 1: msr vbar_el1, x0 | |
304 | 0: | |
305 | ||
306 | ret | |
307 | ENDPROC(c_runtime_cpu_setup) | |
0e2b5350 SW |
308 | |
309 | WEAK(save_boot_params) | |
310 | b save_boot_params_ret /* back to my caller */ | |
311 | ENDPROC(save_boot_params) |