]>
Commit | Line | Data |
---|---|---|
ef509b90 VA |
1 | /* |
2 | * Keystone2: DDR3 initialization | |
3 | * | |
4 | * (C) Copyright 2012-2014 | |
5 | * Texas Instruments Incorporated, <www.ti.com> | |
6 | * | |
7 | * SPDX-License-Identifier: GPL-2.0+ | |
8 | */ | |
9 | ||
ef509b90 | 10 | #include <asm/io.h> |
101eec50 | 11 | #include <common.h> |
89f44bb0 | 12 | #include <asm/arch/msmc.h> |
0b868589 | 13 | #include <asm/arch/ddr3.h> |
6c343825 | 14 | #include <asm/arch/psc_defs.h> |
ef509b90 | 15 | |
89f44bb0 VA |
16 | #include <asm/ti-common/ti-edma3.h> |
17 | ||
18 | #define DDR3_EDMA_BLK_SIZE_SHIFT 10 | |
19 | #define DDR3_EDMA_BLK_SIZE (1 << DDR3_EDMA_BLK_SIZE_SHIFT) | |
20 | #define DDR3_EDMA_BCNT 0x8000 | |
21 | #define DDR3_EDMA_CCNT 1 | |
22 | #define DDR3_EDMA_XF_SIZE (DDR3_EDMA_BLK_SIZE * DDR3_EDMA_BCNT) | |
23 | #define DDR3_EDMA_SLOT_NUM 1 | |
24 | ||
0b868589 | 25 | void ddr3_init_ddrphy(u32 base, struct ddr3_phy_config *phy_cfg) |
ef509b90 VA |
26 | { |
27 | unsigned int tmp; | |
28 | ||
29 | while ((__raw_readl(base + KS2_DDRPHY_PGSR0_OFFSET) | |
30 | & 0x00000001) != 0x00000001) | |
31 | ; | |
32 | ||
33 | __raw_writel(phy_cfg->pllcr, base + KS2_DDRPHY_PLLCR_OFFSET); | |
34 | ||
35 | tmp = __raw_readl(base + KS2_DDRPHY_PGCR1_OFFSET); | |
36 | tmp &= ~(phy_cfg->pgcr1_mask); | |
37 | tmp |= phy_cfg->pgcr1_val; | |
38 | __raw_writel(tmp, base + KS2_DDRPHY_PGCR1_OFFSET); | |
39 | ||
40 | __raw_writel(phy_cfg->ptr0, base + KS2_DDRPHY_PTR0_OFFSET); | |
41 | __raw_writel(phy_cfg->ptr1, base + KS2_DDRPHY_PTR1_OFFSET); | |
42 | __raw_writel(phy_cfg->ptr3, base + KS2_DDRPHY_PTR3_OFFSET); | |
43 | __raw_writel(phy_cfg->ptr4, base + KS2_DDRPHY_PTR4_OFFSET); | |
44 | ||
45 | tmp = __raw_readl(base + KS2_DDRPHY_DCR_OFFSET); | |
46 | tmp &= ~(phy_cfg->dcr_mask); | |
47 | tmp |= phy_cfg->dcr_val; | |
48 | __raw_writel(tmp, base + KS2_DDRPHY_DCR_OFFSET); | |
49 | ||
50 | __raw_writel(phy_cfg->dtpr0, base + KS2_DDRPHY_DTPR0_OFFSET); | |
51 | __raw_writel(phy_cfg->dtpr1, base + KS2_DDRPHY_DTPR1_OFFSET); | |
52 | __raw_writel(phy_cfg->dtpr2, base + KS2_DDRPHY_DTPR2_OFFSET); | |
53 | __raw_writel(phy_cfg->mr0, base + KS2_DDRPHY_MR0_OFFSET); | |
54 | __raw_writel(phy_cfg->mr1, base + KS2_DDRPHY_MR1_OFFSET); | |
a76a6f3e | 55 | __raw_writel(phy_cfg->mr2, base + KS2_DDRPHY_MR2_OFFSET); |
ef509b90 VA |
56 | __raw_writel(phy_cfg->dtcr, base + KS2_DDRPHY_DTCR_OFFSET); |
57 | __raw_writel(phy_cfg->pgcr2, base + KS2_DDRPHY_PGCR2_OFFSET); | |
58 | ||
59 | __raw_writel(phy_cfg->zq0cr1, base + KS2_DDRPHY_ZQ0CR1_OFFSET); | |
60 | __raw_writel(phy_cfg->zq1cr1, base + KS2_DDRPHY_ZQ1CR1_OFFSET); | |
61 | __raw_writel(phy_cfg->zq2cr1, base + KS2_DDRPHY_ZQ2CR1_OFFSET); | |
62 | ||
63 | __raw_writel(phy_cfg->pir_v1, base + KS2_DDRPHY_PIR_OFFSET); | |
64 | while ((__raw_readl(base + KS2_DDRPHY_PGSR0_OFFSET) & 0x1) != 0x1) | |
65 | ; | |
66 | ||
235dd6e8 | 67 | if (cpu_is_k2g()) { |
e1ae357d | 68 | setbits_le32(base + KS2_DDRPHY_DATX8_4_OFFSET, 0x1); |
235dd6e8 VA |
69 | clrbits_le32(base + KS2_DDRPHY_DATX8_5_OFFSET, 0x1); |
70 | clrbits_le32(base + KS2_DDRPHY_DATX8_6_OFFSET, 0x1); | |
71 | clrbits_le32(base + KS2_DDRPHY_DATX8_7_OFFSET, 0x1); | |
72 | clrbits_le32(base + KS2_DDRPHY_DATX8_8_OFFSET, 0x1); | |
73 | } | |
74 | ||
ef509b90 VA |
75 | __raw_writel(phy_cfg->pir_v2, base + KS2_DDRPHY_PIR_OFFSET); |
76 | while ((__raw_readl(base + KS2_DDRPHY_PGSR0_OFFSET) & 0x1) != 0x1) | |
77 | ; | |
78 | } | |
79 | ||
0b868589 | 80 | void ddr3_init_ddremif(u32 base, struct ddr3_emif_config *emif_cfg) |
ef509b90 VA |
81 | { |
82 | __raw_writel(emif_cfg->sdcfg, base + KS2_DDR3_SDCFG_OFFSET); | |
83 | __raw_writel(emif_cfg->sdtim1, base + KS2_DDR3_SDTIM1_OFFSET); | |
84 | __raw_writel(emif_cfg->sdtim2, base + KS2_DDR3_SDTIM2_OFFSET); | |
85 | __raw_writel(emif_cfg->sdtim3, base + KS2_DDR3_SDTIM3_OFFSET); | |
86 | __raw_writel(emif_cfg->sdtim4, base + KS2_DDR3_SDTIM4_OFFSET); | |
87 | __raw_writel(emif_cfg->zqcfg, base + KS2_DDR3_ZQCFG_OFFSET); | |
88 | __raw_writel(emif_cfg->sdrfc, base + KS2_DDR3_SDRFC_OFFSET); | |
89 | } | |
101eec50 | 90 | |
89f44bb0 VA |
91 | int ddr3_ecc_support_rmw(u32 base) |
92 | { | |
93 | u32 value = __raw_readl(base + KS2_DDR3_MIDR_OFFSET); | |
94 | ||
95 | /* Check the DDR3 controller ID reg if the controllers | |
96 | supports ECC RMW or not */ | |
97 | if (value == 0x40461C02) | |
98 | return 1; | |
99 | ||
100 | return 0; | |
101 | } | |
102 | ||
103 | static void ddr3_ecc_config(u32 base, u32 value) | |
104 | { | |
105 | u32 data; | |
106 | ||
107 | __raw_writel(value, base + KS2_DDR3_ECC_CTRL_OFFSET); | |
108 | udelay(100000); /* delay required to synchronize across clock domains */ | |
109 | ||
110 | if (value & KS2_DDR3_ECC_EN) { | |
111 | /* Clear the 1-bit error count */ | |
112 | data = __raw_readl(base + KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET); | |
113 | __raw_writel(data, base + KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET); | |
114 | ||
115 | /* enable the ECC interrupt */ | |
116 | __raw_writel(KS2_DDR3_1B_ECC_ERR_SYS | KS2_DDR3_2B_ECC_ERR_SYS | | |
117 | KS2_DDR3_WR_ECC_ERR_SYS, | |
118 | base + KS2_DDR3_ECC_INT_ENABLE_SET_SYS_OFFSET); | |
119 | ||
120 | /* Clear the ECC error interrupt status */ | |
121 | __raw_writel(KS2_DDR3_1B_ECC_ERR_SYS | KS2_DDR3_2B_ECC_ERR_SYS | | |
122 | KS2_DDR3_WR_ECC_ERR_SYS, | |
123 | base + KS2_DDR3_ECC_INT_STATUS_OFFSET); | |
124 | } | |
125 | } | |
126 | ||
127 | static void ddr3_reset_data(u32 base, u32 ddr3_size) | |
128 | { | |
129 | u32 mpax[2]; | |
130 | u32 seg_num; | |
131 | u32 seg, blks, dst, edma_blks; | |
132 | struct edma3_slot_config slot; | |
133 | struct edma3_channel_config edma_channel; | |
134 | u32 edma_src[DDR3_EDMA_BLK_SIZE/4] __aligned(16) = {0, }; | |
135 | ||
136 | /* Setup an edma to copy the 1k block to the entire DDR */ | |
137 | puts("\nClear entire DDR3 memory to enable ECC\n"); | |
138 | ||
139 | /* save the SES MPAX regs */ | |
4361220d NM |
140 | if (cpu_is_k2g()) |
141 | msmc_get_ses_mpax(K2G_MSMC_SEGMENT_ARM, 0, mpax); | |
142 | else | |
143 | msmc_get_ses_mpax(K2HKLE_MSMC_SEGMENT_ARM, 0, mpax); | |
89f44bb0 VA |
144 | |
145 | /* setup edma slot 1 configuration */ | |
146 | slot.opt = EDMA3_SLOPT_TRANS_COMP_INT_ENB | | |
147 | EDMA3_SLOPT_COMP_CODE(0) | | |
148 | EDMA3_SLOPT_STATIC | EDMA3_SLOPT_AB_SYNC; | |
149 | slot.bcnt = DDR3_EDMA_BCNT; | |
150 | slot.acnt = DDR3_EDMA_BLK_SIZE; | |
151 | slot.ccnt = DDR3_EDMA_CCNT; | |
152 | slot.src_bidx = 0; | |
153 | slot.dst_bidx = DDR3_EDMA_BLK_SIZE; | |
154 | slot.src_cidx = 0; | |
155 | slot.dst_cidx = 0; | |
156 | slot.link = EDMA3_PARSET_NULL_LINK; | |
157 | slot.bcntrld = 0; | |
158 | edma3_slot_configure(KS2_EDMA0_BASE, DDR3_EDMA_SLOT_NUM, &slot); | |
159 | ||
160 | /* configure quik edma channel */ | |
161 | edma_channel.slot = DDR3_EDMA_SLOT_NUM; | |
162 | edma_channel.chnum = 0; | |
163 | edma_channel.complete_code = 0; | |
164 | /* event trigger after dst update */ | |
165 | edma_channel.trigger_slot_word = EDMA3_TWORD(dst); | |
166 | qedma3_start(KS2_EDMA0_BASE, &edma_channel); | |
167 | ||
168 | /* DDR3 size in segments (4KB seg size) */ | |
169 | seg_num = ddr3_size << (30 - KS2_MSMC_SEG_SIZE_SHIFT); | |
170 | ||
171 | for (seg = 0; seg < seg_num; seg += KS2_MSMC_MAP_SEG_NUM) { | |
172 | /* map 2GB 36-bit DDR address to 32-bit DDR address in EMIF | |
173 | access slave interface so that edma driver can access */ | |
4361220d NM |
174 | if (cpu_is_k2g()) { |
175 | msmc_map_ses_segment(K2G_MSMC_SEGMENT_ARM, 0, | |
176 | base >> KS2_MSMC_SEG_SIZE_SHIFT, | |
177 | KS2_MSMC_DST_SEG_BASE + seg, | |
178 | MPAX_SEG_2G); | |
179 | } else { | |
180 | msmc_map_ses_segment(K2HKLE_MSMC_SEGMENT_ARM, 0, | |
181 | base >> KS2_MSMC_SEG_SIZE_SHIFT, | |
182 | KS2_MSMC_DST_SEG_BASE + seg, | |
183 | MPAX_SEG_2G); | |
184 | } | |
89f44bb0 VA |
185 | |
186 | if ((seg_num - seg) > KS2_MSMC_MAP_SEG_NUM) | |
187 | edma_blks = KS2_MSMC_MAP_SEG_NUM << | |
188 | (KS2_MSMC_SEG_SIZE_SHIFT | |
189 | - DDR3_EDMA_BLK_SIZE_SHIFT); | |
190 | else | |
191 | edma_blks = (seg_num - seg) << (KS2_MSMC_SEG_SIZE_SHIFT | |
192 | - DDR3_EDMA_BLK_SIZE_SHIFT); | |
193 | ||
194 | /* Use edma driver to scrub 2GB DDR memory */ | |
195 | for (dst = base, blks = 0; blks < edma_blks; | |
196 | blks += DDR3_EDMA_BCNT, dst += DDR3_EDMA_XF_SIZE) { | |
197 | edma3_set_src_addr(KS2_EDMA0_BASE, | |
198 | edma_channel.slot, (u32)edma_src); | |
199 | edma3_set_dest_addr(KS2_EDMA0_BASE, | |
200 | edma_channel.slot, (u32)dst); | |
201 | ||
202 | while (edma3_check_for_transfer(KS2_EDMA0_BASE, | |
203 | &edma_channel)) | |
204 | udelay(10); | |
205 | } | |
206 | } | |
207 | ||
208 | qedma3_stop(KS2_EDMA0_BASE, &edma_channel); | |
209 | ||
210 | /* restore the SES MPAX regs */ | |
4361220d NM |
211 | if (cpu_is_k2g()) |
212 | msmc_set_ses_mpax(K2G_MSMC_SEGMENT_ARM, 0, mpax); | |
213 | else | |
214 | msmc_set_ses_mpax(K2HKLE_MSMC_SEGMENT_ARM, 0, mpax); | |
89f44bb0 VA |
215 | } |
216 | ||
217 | static void ddr3_ecc_init_range(u32 base) | |
218 | { | |
219 | u32 ecc_val = KS2_DDR3_ECC_EN; | |
220 | u32 rmw = ddr3_ecc_support_rmw(base); | |
221 | ||
222 | if (rmw) | |
223 | ecc_val |= KS2_DDR3_ECC_RMW_EN; | |
224 | ||
225 | __raw_writel(0, base + KS2_DDR3_ECC_ADDR_RANGE1_OFFSET); | |
226 | ||
227 | ddr3_ecc_config(base, ecc_val); | |
228 | } | |
229 | ||
230 | void ddr3_enable_ecc(u32 base, int test) | |
231 | { | |
232 | u32 ecc_val = KS2_DDR3_ECC_ENABLE; | |
233 | u32 rmw = ddr3_ecc_support_rmw(base); | |
234 | ||
235 | if (test) | |
236 | ecc_val |= KS2_DDR3_ECC_ADDR_RNG_1_EN; | |
237 | ||
238 | if (!rmw) { | |
239 | if (!test) | |
240 | /* by default, disable ecc when rmw = 0 and no | |
241 | ecc test */ | |
242 | ecc_val = 0; | |
243 | } else { | |
244 | ecc_val |= KS2_DDR3_ECC_RMW_EN; | |
245 | } | |
246 | ||
247 | ddr3_ecc_config(base, ecc_val); | |
248 | } | |
249 | ||
250 | void ddr3_disable_ecc(u32 base) | |
251 | { | |
252 | ddr3_ecc_config(base, 0); | |
253 | } | |
254 | ||
255 | #if defined(CONFIG_SOC_K2HK) || defined(CONFIG_SOC_K2L) | |
256 | static void cic_init(u32 base) | |
257 | { | |
258 | /* Disable CIC global interrupts */ | |
259 | __raw_writel(0, base + KS2_CIC_GLOBAL_ENABLE); | |
260 | ||
261 | /* Set to normal mode, no nesting, no priority hold */ | |
262 | __raw_writel(0, base + KS2_CIC_CTRL); | |
263 | __raw_writel(0, base + KS2_CIC_HOST_CTRL); | |
264 | ||
265 | /* Enable CIC global interrupts */ | |
266 | __raw_writel(1, base + KS2_CIC_GLOBAL_ENABLE); | |
267 | } | |
268 | ||
269 | static void cic_map_cic_to_gic(u32 base, u32 chan_num, u32 irq_num) | |
270 | { | |
271 | /* Map the system interrupt to a CIC channel */ | |
272 | __raw_writeb(chan_num, base + KS2_CIC_CHAN_MAP(0) + irq_num); | |
273 | ||
274 | /* Enable CIC system interrupt */ | |
275 | __raw_writel(irq_num, base + KS2_CIC_SYS_ENABLE_IDX_SET); | |
276 | ||
277 | /* Enable CIC Host interrupt */ | |
278 | __raw_writel(chan_num, base + KS2_CIC_HOST_ENABLE_IDX_SET); | |
279 | } | |
280 | ||
281 | static void ddr3_map_ecc_cic2_irq(u32 base) | |
282 | { | |
283 | cic_init(base); | |
284 | cic_map_cic_to_gic(base, KS2_CIC2_DDR3_ECC_CHAN_NUM, | |
285 | KS2_CIC2_DDR3_ECC_IRQ_NUM); | |
286 | } | |
287 | #endif | |
288 | ||
66c98a0c | 289 | void ddr3_init_ecc(u32 base, u32 ddr3_size) |
89f44bb0 | 290 | { |
89f44bb0 VA |
291 | if (!ddr3_ecc_support_rmw(base)) { |
292 | ddr3_disable_ecc(base); | |
293 | return; | |
294 | } | |
295 | ||
296 | ddr3_ecc_init_range(base); | |
89f44bb0 VA |
297 | ddr3_reset_data(CONFIG_SYS_SDRAM_BASE, ddr3_size); |
298 | ||
299 | /* mapping DDR3 ECC system interrupt from CIC2 to GIC */ | |
300 | #if defined(CONFIG_SOC_K2HK) || defined(CONFIG_SOC_K2L) | |
301 | ddr3_map_ecc_cic2_irq(KS2_CIC2_BASE); | |
302 | #endif | |
303 | ddr3_enable_ecc(base, 0); | |
304 | } | |
305 | ||
306 | void ddr3_check_ecc_int(u32 base) | |
307 | { | |
308 | char *env; | |
309 | int ecc_test = 0; | |
310 | u32 value = __raw_readl(base + KS2_DDR3_ECC_INT_STATUS_OFFSET); | |
311 | ||
312 | env = getenv("ecc_test"); | |
313 | if (env) | |
314 | ecc_test = simple_strtol(env, NULL, 0); | |
315 | ||
316 | if (value & KS2_DDR3_WR_ECC_ERR_SYS) | |
317 | puts("DDR3 ECC write error interrupted\n"); | |
318 | ||
319 | if (value & KS2_DDR3_2B_ECC_ERR_SYS) { | |
320 | puts("DDR3 ECC 2-bit error interrupted\n"); | |
321 | ||
322 | if (!ecc_test) { | |
323 | puts("Reseting the device ...\n"); | |
324 | reset_cpu(0); | |
325 | } | |
326 | } | |
327 | ||
328 | value = __raw_readl(base + KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET); | |
329 | if (value) { | |
330 | printf("1-bit ECC err count: 0x%x\n", value); | |
331 | value = __raw_readl(base + | |
332 | KS2_DDR3_ONE_BIT_ECC_ERR_ADDR_LOG_OFFSET); | |
333 | printf("1-bit ECC err address log: 0x%x\n", value); | |
334 | } | |
335 | } | |
336 | ||
101eec50 HZ |
337 | void ddr3_reset_ddrphy(void) |
338 | { | |
339 | u32 tmp; | |
340 | ||
341 | /* Assert DDR3A PHY reset */ | |
3d315386 | 342 | tmp = readl(KS2_DDR3APLLCTL1); |
101eec50 | 343 | tmp |= KS2_DDR3_PLLCTRL_PHY_RESET; |
3d315386 | 344 | writel(tmp, KS2_DDR3APLLCTL1); |
101eec50 HZ |
345 | |
346 | /* wait 10us to catch the reset */ | |
347 | udelay(10); | |
348 | ||
349 | /* Release DDR3A PHY reset */ | |
3d315386 | 350 | tmp = readl(KS2_DDR3APLLCTL1); |
101eec50 | 351 | tmp &= ~KS2_DDR3_PLLCTRL_PHY_RESET; |
3d315386 | 352 | __raw_writel(tmp, KS2_DDR3APLLCTL1); |
101eec50 | 353 | } |
6c343825 MK |
354 | |
355 | #ifdef CONFIG_SOC_K2HK | |
356 | /** | |
357 | * ddr3_reset_workaround - reset workaround in case if leveling error | |
358 | * detected for PG 1.0 and 1.1 k2hk SoCs | |
359 | */ | |
360 | void ddr3_err_reset_workaround(void) | |
361 | { | |
362 | unsigned int tmp; | |
363 | unsigned int tmp_a; | |
364 | unsigned int tmp_b; | |
365 | ||
366 | /* | |
367 | * Check for PGSR0 error bits of DDR3 PHY. | |
368 | * Check for WLERR, QSGERR, WLAERR, | |
369 | * RDERR, WDERR, REERR, WEERR error to see if they are set or not | |
370 | */ | |
371 | tmp_a = __raw_readl(KS2_DDR3A_DDRPHYC + KS2_DDRPHY_PGSR0_OFFSET); | |
372 | tmp_b = __raw_readl(KS2_DDR3B_DDRPHYC + KS2_DDRPHY_PGSR0_OFFSET); | |
373 | ||
374 | if (((tmp_a & 0x0FE00000) != 0) || ((tmp_b & 0x0FE00000) != 0)) { | |
375 | printf("DDR Leveling Error Detected!\n"); | |
376 | printf("DDR3A PGSR0 = 0x%x\n", tmp_a); | |
377 | printf("DDR3B PGSR0 = 0x%x\n", tmp_b); | |
378 | ||
379 | /* | |
380 | * Write Keys to KICK registers to enable writes to registers | |
381 | * in boot config space | |
382 | */ | |
383 | __raw_writel(KS2_KICK0_MAGIC, KS2_KICK0); | |
384 | __raw_writel(KS2_KICK1_MAGIC, KS2_KICK1); | |
385 | ||
386 | /* | |
387 | * Move DDR3A Module out of reset isolation by setting | |
388 | * MDCTL23[12] = 0 | |
389 | */ | |
390 | tmp_a = __raw_readl(KS2_PSC_BASE + | |
391 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3A)); | |
392 | ||
393 | tmp_a = PSC_REG_MDCTL_SET_RESET_ISO(tmp_a, 0); | |
394 | __raw_writel(tmp_a, KS2_PSC_BASE + | |
395 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3A)); | |
396 | ||
397 | /* | |
398 | * Move DDR3B Module out of reset isolation by setting | |
399 | * MDCTL24[12] = 0 | |
400 | */ | |
401 | tmp_b = __raw_readl(KS2_PSC_BASE + | |
402 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3B)); | |
403 | tmp_b = PSC_REG_MDCTL_SET_RESET_ISO(tmp_b, 0); | |
404 | __raw_writel(tmp_b, KS2_PSC_BASE + | |
405 | PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3B)); | |
406 | ||
407 | /* | |
408 | * Write 0x5A69 Key to RSTCTRL[15:0] to unlock writes | |
409 | * to RSTCTRL and RSTCFG | |
410 | */ | |
411 | tmp = __raw_readl(KS2_RSTCTRL); | |
412 | tmp &= KS2_RSTCTRL_MASK; | |
413 | tmp |= KS2_RSTCTRL_KEY; | |
414 | __raw_writel(tmp, KS2_RSTCTRL); | |
415 | ||
416 | /* | |
417 | * Set PLL Controller to drive hard reset on SW trigger by | |
418 | * setting RSTCFG[13] = 0 | |
419 | */ | |
420 | tmp = __raw_readl(KS2_RSTCTRL_RSCFG); | |
421 | tmp &= ~KS2_RSTYPE_PLL_SOFT; | |
422 | __raw_writel(tmp, KS2_RSTCTRL_RSCFG); | |
423 | ||
424 | reset_cpu(0); | |
425 | } | |
426 | } | |
427 | #endif |