]>
Commit | Line | Data |
---|---|---|
0ae76531 DF |
1 | /* |
2 | * (C) Copyright 2013 | |
3 | * David Feng <fenghua@phytium.com.cn> | |
4 | * | |
5 | * SPDX-License-Identifier: GPL-2.0+ | |
6 | */ | |
7 | ||
8 | #include <asm-offsets.h> | |
9 | #include <config.h> | |
0ae76531 DF |
10 | #include <linux/linkage.h> |
11 | #include <asm/macro.h> | |
12 | #include <asm/armv8/mmu.h> | |
13 | ||
14 | /************************************************************************* | |
15 | * | |
16 | * Startup Code (reset vector) | |
17 | * | |
18 | *************************************************************************/ | |
19 | ||
20 | .globl _start | |
21 | _start: | |
22 | b reset | |
23 | ||
24 | .align 3 | |
25 | ||
26 | .globl _TEXT_BASE | |
27 | _TEXT_BASE: | |
28 | .quad CONFIG_SYS_TEXT_BASE | |
29 | ||
30 | /* | |
31 | * These are defined in the linker script. | |
32 | */ | |
33 | .globl _end_ofs | |
34 | _end_ofs: | |
35 | .quad _end - _start | |
36 | ||
37 | .globl _bss_start_ofs | |
38 | _bss_start_ofs: | |
39 | .quad __bss_start - _start | |
40 | ||
41 | .globl _bss_end_ofs | |
42 | _bss_end_ofs: | |
43 | .quad __bss_end - _start | |
44 | ||
45 | reset: | |
94f7ff36 ST |
46 | #ifdef CONFIG_SYS_RESET_SCTRL |
47 | bl reset_sctrl | |
48 | #endif | |
0ae76531 DF |
49 | /* |
50 | * Could be EL3/EL2/EL1, Initial State: | |
51 | * Little Endian, MMU Disabled, i/dCache Disabled | |
52 | */ | |
53 | adr x0, vectors | |
54 | switch_el x1, 3f, 2f, 1f | |
1277bac0 DF |
55 | 3: msr vbar_el3, x0 |
56 | mrs x0, scr_el3 | |
c71645ad DF |
57 | orr x0, x0, #0xf /* SCR_EL3.NS|IRQ|FIQ|EA */ |
58 | msr scr_el3, x0 | |
0ae76531 | 59 | msr cptr_el3, xzr /* Enable FP/SIMD */ |
70bcb43e | 60 | #ifdef COUNTER_FREQUENCY |
0ae76531 DF |
61 | ldr x0, =COUNTER_FREQUENCY |
62 | msr cntfrq_el0, x0 /* Initialize CNTFRQ */ | |
70bcb43e | 63 | #endif |
0ae76531 DF |
64 | b 0f |
65 | 2: msr vbar_el2, x0 | |
66 | mov x0, #0x33ff | |
67 | msr cptr_el2, x0 /* Enable FP/SIMD */ | |
68 | b 0f | |
69 | 1: msr vbar_el1, x0 | |
70 | mov x0, #3 << 20 | |
71 | msr cpacr_el1, x0 /* Enable FP/SIMD */ | |
72 | 0: | |
73 | ||
37118fb2 BS |
74 | /* Apply ARM core specific erratas */ |
75 | bl apply_core_errata | |
76 | ||
1e6ad55c YS |
77 | /* |
78 | * Cache/BPB/TLB Invalidate | |
79 | * i-cache is invalidated before enabled in icache_enable() | |
80 | * tlb is invalidated before mmu is enabled in dcache_enable() | |
81 | * d-cache is invalidated before enabled in dcache_enable() | |
82 | */ | |
0ae76531 DF |
83 | |
84 | /* Processor specific initialization */ | |
85 | bl lowlevel_init | |
86 | ||
23b5877c | 87 | #ifdef CONFIG_ARMV8_MULTIENTRY |
0ae76531 DF |
88 | branch_if_master x0, x1, master_cpu |
89 | ||
90 | /* | |
91 | * Slave CPUs | |
92 | */ | |
93 | slave_cpu: | |
94 | wfe | |
95 | ldr x1, =CPU_RELEASE_ADDR | |
96 | ldr x0, [x1] | |
97 | cbz x0, slave_cpu | |
98 | br x0 /* branch to the given address */ | |
0ae76531 | 99 | master_cpu: |
23b5877c LW |
100 | /* On the master CPU */ |
101 | #endif /* CONFIG_ARMV8_MULTIENTRY */ | |
102 | ||
0ae76531 DF |
103 | bl _main |
104 | ||
94f7ff36 ST |
105 | #ifdef CONFIG_SYS_RESET_SCTRL |
106 | reset_sctrl: | |
107 | switch_el x1, 3f, 2f, 1f | |
108 | 3: | |
109 | mrs x0, sctlr_el3 | |
110 | b 0f | |
111 | 2: | |
112 | mrs x0, sctlr_el2 | |
113 | b 0f | |
114 | 1: | |
115 | mrs x0, sctlr_el1 | |
116 | ||
117 | 0: | |
118 | ldr x1, =0xfdfffffa | |
119 | and x0, x0, x1 | |
120 | ||
121 | switch_el x1, 6f, 5f, 4f | |
122 | 6: | |
123 | msr sctlr_el3, x0 | |
124 | b 7f | |
125 | 5: | |
126 | msr sctlr_el2, x0 | |
127 | b 7f | |
128 | 4: | |
129 | msr sctlr_el1, x0 | |
130 | ||
131 | 7: | |
132 | dsb sy | |
133 | isb | |
134 | b __asm_invalidate_tlb_all | |
135 | ret | |
136 | #endif | |
137 | ||
0ae76531 DF |
138 | /*-----------------------------------------------------------------------*/ |
139 | ||
37118fb2 BS |
140 | WEAK(apply_core_errata) |
141 | ||
142 | mov x29, lr /* Save LR */ | |
143 | /* For now, we support Cortex-A57 specific errata only */ | |
144 | ||
145 | /* Check if we are running on a Cortex-A57 core */ | |
146 | branch_if_a57_core x0, apply_a57_core_errata | |
147 | 0: | |
148 | mov lr, x29 /* Restore LR */ | |
149 | ret | |
150 | ||
151 | apply_a57_core_errata: | |
152 | ||
153 | #ifdef CONFIG_ARM_ERRATA_828024 | |
154 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
155 | /* Disable non-allocate hint of w-b-n-a memory type */ | |
f299b5b0 | 156 | orr x0, x0, #1 << 49 |
37118fb2 | 157 | /* Disable write streaming no L1-allocate threshold */ |
f299b5b0 | 158 | orr x0, x0, #3 << 25 |
37118fb2 | 159 | /* Disable write streaming no-allocate threshold */ |
f299b5b0 | 160 | orr x0, x0, #3 << 27 |
37118fb2 BS |
161 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ |
162 | #endif | |
163 | ||
164 | #ifdef CONFIG_ARM_ERRATA_826974 | |
165 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
166 | /* Disable speculative load execution ahead of a DMB */ | |
f299b5b0 | 167 | orr x0, x0, #1 << 59 |
37118fb2 BS |
168 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ |
169 | #endif | |
170 | ||
2ea3a448 A |
171 | #ifdef CONFIG_ARM_ERRATA_833471 |
172 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
173 | /* FPSCR write flush. | |
174 | * Note that in some cases where a flush is unnecessary this | |
175 | could impact performance. */ | |
176 | orr x0, x0, #1 << 38 | |
177 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
178 | #endif | |
179 | ||
180 | #ifdef CONFIG_ARM_ERRATA_829520 | |
181 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
182 | /* Disable Indirect Predictor bit will prevent this erratum | |
183 | from occurring | |
184 | * Note that in some cases where a flush is unnecessary this | |
185 | could impact performance. */ | |
186 | orr x0, x0, #1 << 4 | |
187 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
188 | #endif | |
189 | ||
37118fb2 BS |
190 | #ifdef CONFIG_ARM_ERRATA_833069 |
191 | mrs x0, S3_1_c15_c2_0 /* cpuactlr_el1 */ | |
192 | /* Disable Enable Invalidates of BTB bit */ | |
193 | and x0, x0, #0xE | |
194 | msr S3_1_c15_c2_0, x0 /* cpuactlr_el1 */ | |
195 | #endif | |
196 | b 0b | |
197 | ENDPROC(apply_core_errata) | |
198 | ||
199 | /*-----------------------------------------------------------------------*/ | |
200 | ||
0ae76531 | 201 | WEAK(lowlevel_init) |
0ae76531 | 202 | mov x29, lr /* Save LR */ |
0ae76531 | 203 | |
c71645ad DF |
204 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
205 | branch_if_slave x0, 1f | |
206 | ldr x0, =GICD_BASE | |
207 | bl gic_init_secure | |
208 | 1: | |
209 | #if defined(CONFIG_GICV3) | |
210 | ldr x0, =GICR_BASE | |
211 | bl gic_init_secure_percpu | |
212 | #elif defined(CONFIG_GICV2) | |
213 | ldr x0, =GICD_BASE | |
214 | ldr x1, =GICC_BASE | |
215 | bl gic_init_secure_percpu | |
c71645ad DF |
216 | #endif |
217 | ||
218 | branch_if_master x0, x1, 2f | |
0ae76531 DF |
219 | |
220 | /* | |
221 | * Slave should wait for master clearing spin table. | |
222 | * This sync prevent salves observing incorrect | |
223 | * value of spin table and jumping to wrong place. | |
224 | */ | |
c71645ad DF |
225 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) |
226 | #ifdef CONFIG_GICV2 | |
227 | ldr x0, =GICC_BASE | |
228 | #endif | |
229 | bl gic_wait_for_interrupt | |
230 | #endif | |
0ae76531 DF |
231 | |
232 | /* | |
c71645ad | 233 | * All slaves will enter EL2 and optionally EL1. |
0ae76531 DF |
234 | */ |
235 | bl armv8_switch_to_el2 | |
236 | #ifdef CONFIG_ARMV8_SWITCH_TO_EL1 | |
237 | bl armv8_switch_to_el1 | |
238 | #endif | |
239 | ||
23b5877c LW |
240 | #endif /* CONFIG_ARMV8_MULTIENTRY */ |
241 | ||
c71645ad | 242 | 2: |
0ae76531 DF |
243 | mov lr, x29 /* Restore LR */ |
244 | ret | |
245 | ENDPROC(lowlevel_init) | |
246 | ||
c71645ad DF |
247 | WEAK(smp_kick_all_cpus) |
248 | /* Kick secondary cpus up by SGI 0 interrupt */ | |
249 | mov x29, lr /* Save LR */ | |
250 | #if defined(CONFIG_GICV2) || defined(CONFIG_GICV3) | |
251 | ldr x0, =GICD_BASE | |
252 | bl gic_kick_secondary_cpus | |
253 | #endif | |
254 | mov lr, x29 /* Restore LR */ | |
255 | ret | |
256 | ENDPROC(smp_kick_all_cpus) | |
257 | ||
0ae76531 DF |
258 | /*-----------------------------------------------------------------------*/ |
259 | ||
260 | ENTRY(c_runtime_cpu_setup) | |
0ae76531 DF |
261 | /* Relocate vBAR */ |
262 | adr x0, vectors | |
263 | switch_el x1, 3f, 2f, 1f | |
264 | 3: msr vbar_el3, x0 | |
265 | b 0f | |
266 | 2: msr vbar_el2, x0 | |
267 | b 0f | |
268 | 1: msr vbar_el1, x0 | |
269 | 0: | |
270 | ||
271 | ret | |
272 | ENDPROC(c_runtime_cpu_setup) |