2 * Keystone2: DDR3 initialization
4 * (C) Copyright 2012-2014
5 * Texas Instruments Incorporated, <www.ti.com>
7 * SPDX-License-Identifier: GPL-2.0+
12 #include <asm/arch/msmc.h>
13 #include <asm/arch/ddr3.h>
14 #include <asm/arch/psc_defs.h>
16 #include <asm/ti-common/ti-edma3.h>
18 #define DDR3_EDMA_BLK_SIZE_SHIFT 10
19 #define DDR3_EDMA_BLK_SIZE (1 << DDR3_EDMA_BLK_SIZE_SHIFT)
20 #define DDR3_EDMA_BCNT 0x8000
21 #define DDR3_EDMA_CCNT 1
22 #define DDR3_EDMA_XF_SIZE (DDR3_EDMA_BLK_SIZE * DDR3_EDMA_BCNT)
23 #define DDR3_EDMA_SLOT_NUM 1
25 void ddr3_init_ddrphy(u32 base
, struct ddr3_phy_config
*phy_cfg
)
29 while ((__raw_readl(base
+ KS2_DDRPHY_PGSR0_OFFSET
)
30 & 0x00000001) != 0x00000001)
33 __raw_writel(phy_cfg
->pllcr
, base
+ KS2_DDRPHY_PLLCR_OFFSET
);
35 tmp
= __raw_readl(base
+ KS2_DDRPHY_PGCR1_OFFSET
);
36 tmp
&= ~(phy_cfg
->pgcr1_mask
);
37 tmp
|= phy_cfg
->pgcr1_val
;
38 __raw_writel(tmp
, base
+ KS2_DDRPHY_PGCR1_OFFSET
);
40 __raw_writel(phy_cfg
->ptr0
, base
+ KS2_DDRPHY_PTR0_OFFSET
);
41 __raw_writel(phy_cfg
->ptr1
, base
+ KS2_DDRPHY_PTR1_OFFSET
);
42 __raw_writel(phy_cfg
->ptr3
, base
+ KS2_DDRPHY_PTR3_OFFSET
);
43 __raw_writel(phy_cfg
->ptr4
, base
+ KS2_DDRPHY_PTR4_OFFSET
);
45 tmp
= __raw_readl(base
+ KS2_DDRPHY_DCR_OFFSET
);
46 tmp
&= ~(phy_cfg
->dcr_mask
);
47 tmp
|= phy_cfg
->dcr_val
;
48 __raw_writel(tmp
, base
+ KS2_DDRPHY_DCR_OFFSET
);
50 __raw_writel(phy_cfg
->dtpr0
, base
+ KS2_DDRPHY_DTPR0_OFFSET
);
51 __raw_writel(phy_cfg
->dtpr1
, base
+ KS2_DDRPHY_DTPR1_OFFSET
);
52 __raw_writel(phy_cfg
->dtpr2
, base
+ KS2_DDRPHY_DTPR2_OFFSET
);
53 __raw_writel(phy_cfg
->mr0
, base
+ KS2_DDRPHY_MR0_OFFSET
);
54 __raw_writel(phy_cfg
->mr1
, base
+ KS2_DDRPHY_MR1_OFFSET
);
55 __raw_writel(phy_cfg
->mr2
, base
+ KS2_DDRPHY_MR2_OFFSET
);
56 __raw_writel(phy_cfg
->dtcr
, base
+ KS2_DDRPHY_DTCR_OFFSET
);
57 __raw_writel(phy_cfg
->pgcr2
, base
+ KS2_DDRPHY_PGCR2_OFFSET
);
59 __raw_writel(phy_cfg
->zq0cr1
, base
+ KS2_DDRPHY_ZQ0CR1_OFFSET
);
60 __raw_writel(phy_cfg
->zq1cr1
, base
+ KS2_DDRPHY_ZQ1CR1_OFFSET
);
61 __raw_writel(phy_cfg
->zq2cr1
, base
+ KS2_DDRPHY_ZQ2CR1_OFFSET
);
63 __raw_writel(phy_cfg
->pir_v1
, base
+ KS2_DDRPHY_PIR_OFFSET
);
64 while ((__raw_readl(base
+ KS2_DDRPHY_PGSR0_OFFSET
) & 0x1) != 0x1)
68 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_2_OFFSET
,
69 phy_cfg
->datx8_2_mask
,
70 phy_cfg
->datx8_2_val
);
72 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_3_OFFSET
,
73 phy_cfg
->datx8_3_mask
,
74 phy_cfg
->datx8_3_val
);
76 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_4_OFFSET
,
77 phy_cfg
->datx8_4_mask
,
78 phy_cfg
->datx8_4_val
);
80 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_5_OFFSET
,
81 phy_cfg
->datx8_5_mask
,
82 phy_cfg
->datx8_5_val
);
84 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_6_OFFSET
,
85 phy_cfg
->datx8_6_mask
,
86 phy_cfg
->datx8_6_val
);
88 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_7_OFFSET
,
89 phy_cfg
->datx8_7_mask
,
90 phy_cfg
->datx8_7_val
);
92 clrsetbits_le32(base
+ KS2_DDRPHY_DATX8_8_OFFSET
,
93 phy_cfg
->datx8_8_mask
,
94 phy_cfg
->datx8_8_val
);
97 __raw_writel(phy_cfg
->pir_v2
, base
+ KS2_DDRPHY_PIR_OFFSET
);
98 while ((__raw_readl(base
+ KS2_DDRPHY_PGSR0_OFFSET
) & 0x1) != 0x1)
102 void ddr3_init_ddremif(u32 base
, struct ddr3_emif_config
*emif_cfg
)
104 __raw_writel(emif_cfg
->sdcfg
, base
+ KS2_DDR3_SDCFG_OFFSET
);
105 __raw_writel(emif_cfg
->sdtim1
, base
+ KS2_DDR3_SDTIM1_OFFSET
);
106 __raw_writel(emif_cfg
->sdtim2
, base
+ KS2_DDR3_SDTIM2_OFFSET
);
107 __raw_writel(emif_cfg
->sdtim3
, base
+ KS2_DDR3_SDTIM3_OFFSET
);
108 __raw_writel(emif_cfg
->sdtim4
, base
+ KS2_DDR3_SDTIM4_OFFSET
);
109 __raw_writel(emif_cfg
->zqcfg
, base
+ KS2_DDR3_ZQCFG_OFFSET
);
110 __raw_writel(emif_cfg
->sdrfc
, base
+ KS2_DDR3_SDRFC_OFFSET
);
113 int ddr3_ecc_support_rmw(u32 base
)
115 u32 value
= __raw_readl(base
+ KS2_DDR3_MIDR_OFFSET
);
117 /* Check the DDR3 controller ID reg if the controllers
118 supports ECC RMW or not */
119 if (value
== 0x40461C02)
125 static void ddr3_ecc_config(u32 base
, u32 value
)
129 __raw_writel(value
, base
+ KS2_DDR3_ECC_CTRL_OFFSET
);
130 udelay(100000); /* delay required to synchronize across clock domains */
132 if (value
& KS2_DDR3_ECC_EN
) {
133 /* Clear the 1-bit error count */
134 data
= __raw_readl(base
+ KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET
);
135 __raw_writel(data
, base
+ KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET
);
137 /* enable the ECC interrupt */
138 __raw_writel(KS2_DDR3_1B_ECC_ERR_SYS
| KS2_DDR3_2B_ECC_ERR_SYS
|
139 KS2_DDR3_WR_ECC_ERR_SYS
,
140 base
+ KS2_DDR3_ECC_INT_ENABLE_SET_SYS_OFFSET
);
142 /* Clear the ECC error interrupt status */
143 __raw_writel(KS2_DDR3_1B_ECC_ERR_SYS
| KS2_DDR3_2B_ECC_ERR_SYS
|
144 KS2_DDR3_WR_ECC_ERR_SYS
,
145 base
+ KS2_DDR3_ECC_INT_STATUS_OFFSET
);
149 static void ddr3_reset_data(u32 base
, u32 ddr3_size
)
153 u32 seg
, blks
, dst
, edma_blks
;
154 struct edma3_slot_config slot
;
155 struct edma3_channel_config edma_channel
;
156 u32 edma_src
[DDR3_EDMA_BLK_SIZE
/4] __aligned(16) = {0, };
158 /* Setup an edma to copy the 1k block to the entire DDR */
159 puts("\nClear entire DDR3 memory to enable ECC\n");
161 /* save the SES MPAX regs */
163 msmc_get_ses_mpax(K2G_MSMC_SEGMENT_ARM
, 0, mpax
);
165 msmc_get_ses_mpax(K2HKLE_MSMC_SEGMENT_ARM
, 0, mpax
);
167 /* setup edma slot 1 configuration */
168 slot
.opt
= EDMA3_SLOPT_TRANS_COMP_INT_ENB
|
169 EDMA3_SLOPT_COMP_CODE(0) |
170 EDMA3_SLOPT_STATIC
| EDMA3_SLOPT_AB_SYNC
;
171 slot
.bcnt
= DDR3_EDMA_BCNT
;
172 slot
.acnt
= DDR3_EDMA_BLK_SIZE
;
173 slot
.ccnt
= DDR3_EDMA_CCNT
;
175 slot
.dst_bidx
= DDR3_EDMA_BLK_SIZE
;
178 slot
.link
= EDMA3_PARSET_NULL_LINK
;
180 edma3_slot_configure(KS2_EDMA0_BASE
, DDR3_EDMA_SLOT_NUM
, &slot
);
182 /* configure quik edma channel */
183 edma_channel
.slot
= DDR3_EDMA_SLOT_NUM
;
184 edma_channel
.chnum
= 0;
185 edma_channel
.complete_code
= 0;
186 /* event trigger after dst update */
187 edma_channel
.trigger_slot_word
= EDMA3_TWORD(dst
);
188 qedma3_start(KS2_EDMA0_BASE
, &edma_channel
);
190 /* DDR3 size in segments (4KB seg size) */
191 seg_num
= ddr3_size
<< (30 - KS2_MSMC_SEG_SIZE_SHIFT
);
193 for (seg
= 0; seg
< seg_num
; seg
+= KS2_MSMC_MAP_SEG_NUM
) {
194 /* map 2GB 36-bit DDR address to 32-bit DDR address in EMIF
195 access slave interface so that edma driver can access */
197 msmc_map_ses_segment(K2G_MSMC_SEGMENT_ARM
, 0,
198 base
>> KS2_MSMC_SEG_SIZE_SHIFT
,
199 KS2_MSMC_DST_SEG_BASE
+ seg
,
202 msmc_map_ses_segment(K2HKLE_MSMC_SEGMENT_ARM
, 0,
203 base
>> KS2_MSMC_SEG_SIZE_SHIFT
,
204 KS2_MSMC_DST_SEG_BASE
+ seg
,
208 if ((seg_num
- seg
) > KS2_MSMC_MAP_SEG_NUM
)
209 edma_blks
= KS2_MSMC_MAP_SEG_NUM
<<
210 (KS2_MSMC_SEG_SIZE_SHIFT
211 - DDR3_EDMA_BLK_SIZE_SHIFT
);
213 edma_blks
= (seg_num
- seg
) << (KS2_MSMC_SEG_SIZE_SHIFT
214 - DDR3_EDMA_BLK_SIZE_SHIFT
);
216 /* Use edma driver to scrub 2GB DDR memory */
217 for (dst
= base
, blks
= 0; blks
< edma_blks
;
218 blks
+= DDR3_EDMA_BCNT
, dst
+= DDR3_EDMA_XF_SIZE
) {
219 edma3_set_src_addr(KS2_EDMA0_BASE
,
220 edma_channel
.slot
, (u32
)edma_src
);
221 edma3_set_dest_addr(KS2_EDMA0_BASE
,
222 edma_channel
.slot
, (u32
)dst
);
224 while (edma3_check_for_transfer(KS2_EDMA0_BASE
,
230 qedma3_stop(KS2_EDMA0_BASE
, &edma_channel
);
232 /* restore the SES MPAX regs */
234 msmc_set_ses_mpax(K2G_MSMC_SEGMENT_ARM
, 0, mpax
);
236 msmc_set_ses_mpax(K2HKLE_MSMC_SEGMENT_ARM
, 0, mpax
);
239 static void ddr3_ecc_init_range(u32 base
)
241 u32 ecc_val
= KS2_DDR3_ECC_EN
;
242 u32 rmw
= ddr3_ecc_support_rmw(base
);
245 ecc_val
|= KS2_DDR3_ECC_RMW_EN
;
247 __raw_writel(0, base
+ KS2_DDR3_ECC_ADDR_RANGE1_OFFSET
);
249 ddr3_ecc_config(base
, ecc_val
);
252 void ddr3_enable_ecc(u32 base
, int test
)
254 u32 ecc_val
= KS2_DDR3_ECC_ENABLE
;
255 u32 rmw
= ddr3_ecc_support_rmw(base
);
258 ecc_val
|= KS2_DDR3_ECC_ADDR_RNG_1_EN
;
262 /* by default, disable ecc when rmw = 0 and no
266 ecc_val
|= KS2_DDR3_ECC_RMW_EN
;
269 ddr3_ecc_config(base
, ecc_val
);
272 void ddr3_disable_ecc(u32 base
)
274 ddr3_ecc_config(base
, 0);
277 #if defined(CONFIG_SOC_K2HK) || defined(CONFIG_SOC_K2L)
278 static void cic_init(u32 base
)
280 /* Disable CIC global interrupts */
281 __raw_writel(0, base
+ KS2_CIC_GLOBAL_ENABLE
);
283 /* Set to normal mode, no nesting, no priority hold */
284 __raw_writel(0, base
+ KS2_CIC_CTRL
);
285 __raw_writel(0, base
+ KS2_CIC_HOST_CTRL
);
287 /* Enable CIC global interrupts */
288 __raw_writel(1, base
+ KS2_CIC_GLOBAL_ENABLE
);
291 static void cic_map_cic_to_gic(u32 base
, u32 chan_num
, u32 irq_num
)
293 /* Map the system interrupt to a CIC channel */
294 __raw_writeb(chan_num
, base
+ KS2_CIC_CHAN_MAP(0) + irq_num
);
296 /* Enable CIC system interrupt */
297 __raw_writel(irq_num
, base
+ KS2_CIC_SYS_ENABLE_IDX_SET
);
299 /* Enable CIC Host interrupt */
300 __raw_writel(chan_num
, base
+ KS2_CIC_HOST_ENABLE_IDX_SET
);
303 static void ddr3_map_ecc_cic2_irq(u32 base
)
306 cic_map_cic_to_gic(base
, KS2_CIC2_DDR3_ECC_CHAN_NUM
,
307 KS2_CIC2_DDR3_ECC_IRQ_NUM
);
311 void ddr3_init_ecc(u32 base
, u32 ddr3_size
)
313 if (!ddr3_ecc_support_rmw(base
)) {
314 ddr3_disable_ecc(base
);
318 ddr3_ecc_init_range(base
);
319 ddr3_reset_data(CONFIG_SYS_SDRAM_BASE
, ddr3_size
);
321 /* mapping DDR3 ECC system interrupt from CIC2 to GIC */
322 #if defined(CONFIG_SOC_K2HK) || defined(CONFIG_SOC_K2L)
323 ddr3_map_ecc_cic2_irq(KS2_CIC2_BASE
);
325 ddr3_enable_ecc(base
, 0);
328 void ddr3_check_ecc_int(u32 base
)
332 u32 value
= __raw_readl(base
+ KS2_DDR3_ECC_INT_STATUS_OFFSET
);
334 env
= env_get("ecc_test");
336 ecc_test
= simple_strtol(env
, NULL
, 0);
338 if (value
& KS2_DDR3_WR_ECC_ERR_SYS
)
339 puts("DDR3 ECC write error interrupted\n");
341 if (value
& KS2_DDR3_2B_ECC_ERR_SYS
) {
342 puts("DDR3 ECC 2-bit error interrupted\n");
345 puts("Reseting the device ...\n");
350 value
= __raw_readl(base
+ KS2_DDR3_ONE_BIT_ECC_ERR_CNT_OFFSET
);
352 printf("1-bit ECC err count: 0x%x\n", value
);
353 value
= __raw_readl(base
+
354 KS2_DDR3_ONE_BIT_ECC_ERR_ADDR_LOG_OFFSET
);
355 printf("1-bit ECC err address log: 0x%x\n", value
);
359 void ddr3_reset_ddrphy(void)
363 /* Assert DDR3A PHY reset */
364 tmp
= readl(KS2_DDR3APLLCTL1
);
365 tmp
|= KS2_DDR3_PLLCTRL_PHY_RESET
;
366 writel(tmp
, KS2_DDR3APLLCTL1
);
368 /* wait 10us to catch the reset */
371 /* Release DDR3A PHY reset */
372 tmp
= readl(KS2_DDR3APLLCTL1
);
373 tmp
&= ~KS2_DDR3_PLLCTRL_PHY_RESET
;
374 __raw_writel(tmp
, KS2_DDR3APLLCTL1
);
377 #ifdef CONFIG_SOC_K2HK
379 * ddr3_reset_workaround - reset workaround in case if leveling error
380 * detected for PG 1.0 and 1.1 k2hk SoCs
382 void ddr3_err_reset_workaround(void)
389 * Check for PGSR0 error bits of DDR3 PHY.
390 * Check for WLERR, QSGERR, WLAERR,
391 * RDERR, WDERR, REERR, WEERR error to see if they are set or not
393 tmp_a
= __raw_readl(KS2_DDR3A_DDRPHYC
+ KS2_DDRPHY_PGSR0_OFFSET
);
394 tmp_b
= __raw_readl(KS2_DDR3B_DDRPHYC
+ KS2_DDRPHY_PGSR0_OFFSET
);
396 if (((tmp_a
& 0x0FE00000) != 0) || ((tmp_b
& 0x0FE00000) != 0)) {
397 printf("DDR Leveling Error Detected!\n");
398 printf("DDR3A PGSR0 = 0x%x\n", tmp_a
);
399 printf("DDR3B PGSR0 = 0x%x\n", tmp_b
);
402 * Write Keys to KICK registers to enable writes to registers
403 * in boot config space
405 __raw_writel(KS2_KICK0_MAGIC
, KS2_KICK0
);
406 __raw_writel(KS2_KICK1_MAGIC
, KS2_KICK1
);
409 * Move DDR3A Module out of reset isolation by setting
412 tmp_a
= __raw_readl(KS2_PSC_BASE
+
413 PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3A
));
415 tmp_a
= PSC_REG_MDCTL_SET_RESET_ISO(tmp_a
, 0);
416 __raw_writel(tmp_a
, KS2_PSC_BASE
+
417 PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3A
));
420 * Move DDR3B Module out of reset isolation by setting
423 tmp_b
= __raw_readl(KS2_PSC_BASE
+
424 PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3B
));
425 tmp_b
= PSC_REG_MDCTL_SET_RESET_ISO(tmp_b
, 0);
426 __raw_writel(tmp_b
, KS2_PSC_BASE
+
427 PSC_REG_MDCTL(KS2_LPSC_EMIF4F_DDR3B
));
430 * Write 0x5A69 Key to RSTCTRL[15:0] to unlock writes
431 * to RSTCTRL and RSTCFG
433 tmp
= __raw_readl(KS2_RSTCTRL
);
434 tmp
&= KS2_RSTCTRL_MASK
;
435 tmp
|= KS2_RSTCTRL_KEY
;
436 __raw_writel(tmp
, KS2_RSTCTRL
);
439 * Set PLL Controller to drive hard reset on SW trigger by
440 * setting RSTCFG[13] = 0
442 tmp
= __raw_readl(KS2_RSTCTRL_RSCFG
);
443 tmp
&= ~KS2_RSTYPE_PLL_SOFT
;
444 __raw_writel(tmp
, KS2_RSTCTRL_RSCFG
);