]> git.ipfire.org Git - people/ms/u-boot.git/blob - arch/arm/mach-rockchip/rk3288/sdram_rk3288.c
dm: Use uclass_first_device_err() where it is useful
[people/ms/u-boot.git] / arch / arm / mach-rockchip / rk3288 / sdram_rk3288.c
1 /*
2 * (C) Copyright 2015 Google, Inc
3 * Copyright 2014 Rockchip Inc.
4 *
5 * SPDX-License-Identifier: GPL-2.0
6 *
7 * Adapted from coreboot.
8 */
9
10 #include <common.h>
11 #include <clk.h>
12 #include <dm.h>
13 #include <errno.h>
14 #include <ram.h>
15 #include <regmap.h>
16 #include <syscon.h>
17 #include <asm/io.h>
18 #include <asm/arch/clock.h>
19 #include <asm/arch/cru_rk3288.h>
20 #include <asm/arch/ddr_rk3288.h>
21 #include <asm/arch/grf_rk3288.h>
22 #include <asm/arch/pmu_rk3288.h>
23 #include <asm/arch/sdram.h>
24 #include <linux/err.h>
25 #include <power/regulator.h>
26 #include <power/rk808_pmic.h>
27
28 DECLARE_GLOBAL_DATA_PTR;
29
30 struct chan_info {
31 struct rk3288_ddr_pctl *pctl;
32 struct rk3288_ddr_publ *publ;
33 struct rk3288_msch *msch;
34 };
35
36 struct dram_info {
37 struct chan_info chan[2];
38 struct ram_info info;
39 struct udevice *ddr_clk;
40 struct rk3288_cru *cru;
41 struct rk3288_grf *grf;
42 struct rk3288_sgrf *sgrf;
43 struct rk3288_pmu *pmu;
44 };
45
46 #ifdef CONFIG_SPL_BUILD
47 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
48 {
49 int i;
50
51 for (i = 0; i < n / sizeof(u32); i++) {
52 writel(*src, dest);
53 src++;
54 dest++;
55 }
56 }
57
58 static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
59 {
60 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
61 u32 ctl_psrstn_shift = 3 + 5 * ch;
62 u32 ctl_srstn_shift = 2 + 5 * ch;
63 u32 phy_psrstn_shift = 1 + 5 * ch;
64 u32 phy_srstn_shift = 5 * ch;
65
66 rk_clrsetreg(&cru->cru_softrst_con[10],
67 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
68 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
69 1 << phy_srstn_shift,
70 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
71 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
72 phy << phy_srstn_shift);
73 }
74
75 static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
76 {
77 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
78
79 rk_clrsetreg(&cru->cru_softrst_con[10],
80 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
81 }
82
83 static void phy_pctrl_reset(struct rk3288_cru *cru,
84 struct rk3288_ddr_publ *publ,
85 u32 channel)
86 {
87 int i;
88
89 ddr_reset(cru, channel, 1, 1);
90 udelay(1);
91 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
92 for (i = 0; i < 4; i++)
93 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
94
95 udelay(10);
96 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
97 for (i = 0; i < 4; i++)
98 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
99
100 udelay(10);
101 ddr_reset(cru, channel, 1, 0);
102 udelay(10);
103 ddr_reset(cru, channel, 0, 0);
104 udelay(10);
105 }
106
107 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
108 u32 freq)
109 {
110 int i;
111 if (freq <= 250000000) {
112 if (freq <= 150000000)
113 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
114 else
115 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
116 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
117 for (i = 0; i < 4; i++)
118 setbits_le32(&publ->datx8[i].dxdllcr,
119 DXDLLCR_DLLDIS);
120
121 setbits_le32(&publ->pir, PIR_DLLBYP);
122 } else {
123 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
124 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
125 for (i = 0; i < 4; i++) {
126 clrbits_le32(&publ->datx8[i].dxdllcr,
127 DXDLLCR_DLLDIS);
128 }
129
130 clrbits_le32(&publ->pir, PIR_DLLBYP);
131 }
132 }
133
134 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
135 {
136 writel(DFI_INIT_START, &pctl->dfistcfg0);
137 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
138 &pctl->dfistcfg1);
139 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
140 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
141 &pctl->dfilpcfg0);
142
143 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
144 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
145 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
146 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
147 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
148 writel(1, &pctl->dfitphyupdtype0);
149
150 /* cs0 and cs1 write odt enable */
151 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
152 &pctl->dfiodtcfg);
153 /* odt write length */
154 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
155 /* phyupd and ctrlupd disabled */
156 writel(0, &pctl->dfiupdcfg);
157 }
158
159 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
160 {
161 uint val = 0;
162
163 if (enable) {
164 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
165 DDR0_16BIT_EN_SHIFT);
166 }
167 rk_clrsetreg(&grf->soc_con0,
168 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
169 val);
170 }
171
172 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
173 bool ddr3_mode)
174 {
175 uint mask, val;
176
177 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
178 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
179 MSCH0_MAINDDR3_SHIFT);
180 rk_clrsetreg(&grf->soc_con0, mask, val);
181 }
182
183 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
184 bool enable, bool enable_bst, bool enable_odt)
185 {
186 uint mask;
187 bool disable_bst = !enable_bst;
188
189 mask = channel ?
190 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
191 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
192 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
193 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
194 rk_clrsetreg(&grf->soc_con2, mask,
195 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
196 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
197 UPCTL0_BST_DIABLE_SHIFT) |
198 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
199 UPCTL0_LPDDR3_ODT_EN_SHIFT));
200 }
201
202 static void pctl_cfg(u32 channel, struct rk3288_ddr_pctl *pctl,
203 const struct rk3288_sdram_params *sdram_params,
204 struct rk3288_grf *grf)
205 {
206 unsigned int burstlen;
207
208 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
209 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
210 sizeof(sdram_params->pctl_timing));
211 switch (sdram_params->base.dramtype) {
212 case LPDDR3:
213 writel(sdram_params->pctl_timing.tcl - 1,
214 &pctl->dfitrddataen);
215 writel(sdram_params->pctl_timing.tcwl,
216 &pctl->dfitphywrlat);
217 burstlen >>= 1;
218 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
219 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
220 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
221 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
222 &pctl->mcfg);
223 ddr_set_ddr3_mode(grf, channel, false);
224 ddr_set_enable(grf, channel, true);
225 ddr_set_en_bst_odt(grf, channel, true, false,
226 sdram_params->base.odt);
227 break;
228 case DDR3:
229 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
230 writel(sdram_params->pctl_timing.tcl - 3,
231 &pctl->dfitrddataen);
232 } else {
233 writel(sdram_params->pctl_timing.tcl - 2,
234 &pctl->dfitrddataen);
235 }
236 writel(sdram_params->pctl_timing.tcwl - 1,
237 &pctl->dfitphywrlat);
238 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
239 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
240 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
241 &pctl->mcfg);
242 ddr_set_ddr3_mode(grf, channel, true);
243 ddr_set_enable(grf, channel, true);
244
245 ddr_set_en_bst_odt(grf, channel, false, true, false);
246 break;
247 }
248
249 setbits_le32(&pctl->scfg, 1);
250 }
251
252 static void phy_cfg(const struct chan_info *chan, u32 channel,
253 const struct rk3288_sdram_params *sdram_params)
254 {
255 struct rk3288_ddr_publ *publ = chan->publ;
256 struct rk3288_msch *msch = chan->msch;
257 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
258 u32 dinit2, tmp;
259 int i;
260
261 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
262 /* DDR PHY Timing */
263 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
264 sizeof(sdram_params->phy_timing));
265 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
266 writel(0x3f, &msch->readlatency);
267 writel(sdram_params->base.noc_activate, &msch->activate);
268 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
269 1 << BUSRDTORD_SHIFT, &msch->devtodev);
270 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
271 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
272 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
273 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
274 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
275 &publ->ptr[1]);
276 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
277 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
278 &publ->ptr[2]);
279
280 switch (sdram_params->base.dramtype) {
281 case LPDDR3:
282 clrsetbits_le32(&publ->pgcr, 0x1F,
283 0 << PGCR_DFTLMT_SHIFT |
284 0 << PGCR_DFTCMP_SHIFT |
285 1 << PGCR_DQSCFG_SHIFT |
286 0 << PGCR_ITMDMD_SHIFT);
287 /* DDRMODE select LPDDR3 */
288 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
289 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
290 clrsetbits_le32(&publ->dxccr,
291 DQSNRES_MASK << DQSNRES_SHIFT |
292 DQSRES_MASK << DQSRES_SHIFT,
293 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
294 tmp = readl(&publ->dtpr[1]);
295 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
296 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
297 clrsetbits_le32(&publ->dsgcr,
298 DQSGE_MASK << DQSGE_SHIFT |
299 DQSGX_MASK << DQSGX_SHIFT,
300 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
301 break;
302 case DDR3:
303 clrbits_le32(&publ->pgcr, 0x1f);
304 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
305 DDRMD_DDR3 << DDRMD_SHIFT);
306 break;
307 }
308 if (sdram_params->base.odt) {
309 /*dynamic RTT enable */
310 for (i = 0; i < 4; i++)
311 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
312 } else {
313 /*dynamic RTT disable */
314 for (i = 0; i < 4; i++)
315 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
316 }
317 }
318
319 static void phy_init(struct rk3288_ddr_publ *publ)
320 {
321 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
322 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
323 udelay(1);
324 while ((readl(&publ->pgsr) &
325 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
326 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
327 ;
328 }
329
330 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
331 u32 cmd, u32 arg)
332 {
333 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
334 udelay(1);
335 while (readl(&pctl->mcmd) & START_CMD)
336 ;
337 }
338
339 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
340 u32 rank, u32 cmd, u32 ma, u32 op)
341 {
342 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
343 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
344 }
345
346 static void memory_init(struct rk3288_ddr_publ *publ,
347 u32 dramtype)
348 {
349 setbits_le32(&publ->pir,
350 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
351 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
352 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
353 udelay(1);
354 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
355 != (PGSR_IDONE | PGSR_DLDONE))
356 ;
357 }
358
359 static void move_to_config_state(struct rk3288_ddr_publ *publ,
360 struct rk3288_ddr_pctl *pctl)
361 {
362 unsigned int state;
363
364 while (1) {
365 state = readl(&pctl->stat) & PCTL_STAT_MSK;
366
367 switch (state) {
368 case LOW_POWER:
369 writel(WAKEUP_STATE, &pctl->sctl);
370 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
371 != ACCESS)
372 ;
373 /* wait DLL lock */
374 while ((readl(&publ->pgsr) & PGSR_DLDONE)
375 != PGSR_DLDONE)
376 ;
377 /* if at low power state,need wakeup first,
378 * and then enter the config
379 * so here no break.
380 */
381 case ACCESS:
382 /* no break */
383 case INIT_MEM:
384 writel(CFG_STATE, &pctl->sctl);
385 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
386 ;
387 break;
388 case CONFIG:
389 return;
390 default:
391 break;
392 }
393 }
394 }
395
396 static void set_bandwidth_ratio(const struct chan_info *chan, u32 channel,
397 u32 n, struct rk3288_grf *grf)
398 {
399 struct rk3288_ddr_pctl *pctl = chan->pctl;
400 struct rk3288_ddr_publ *publ = chan->publ;
401 struct rk3288_msch *msch = chan->msch;
402
403 if (n == 1) {
404 setbits_le32(&pctl->ppcfg, 1);
405 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
406 setbits_le32(&msch->ddrtiming, 1 << 31);
407 /* Data Byte disable*/
408 clrbits_le32(&publ->datx8[2].dxgcr, 1);
409 clrbits_le32(&publ->datx8[3].dxgcr, 1);
410 /* disable DLL */
411 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
412 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
413 } else {
414 clrbits_le32(&pctl->ppcfg, 1);
415 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
416 clrbits_le32(&msch->ddrtiming, 1 << 31);
417 /* Data Byte enable*/
418 setbits_le32(&publ->datx8[2].dxgcr, 1);
419 setbits_le32(&publ->datx8[3].dxgcr, 1);
420
421 /* enable DLL */
422 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
423 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
424 /* reset DLL */
425 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
426 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
427 udelay(10);
428 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
429 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
430 }
431 setbits_le32(&pctl->dfistcfg0, 1 << 2);
432 }
433
434 static int data_training(const struct chan_info *chan, u32 channel,
435 const struct rk3288_sdram_params *sdram_params)
436 {
437 unsigned int j;
438 int ret = 0;
439 u32 rank;
440 int i;
441 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
442 struct rk3288_ddr_publ *publ = chan->publ;
443 struct rk3288_ddr_pctl *pctl = chan->pctl;
444
445 /* disable auto refresh */
446 writel(0, &pctl->trefi);
447
448 if (sdram_params->base.dramtype != LPDDR3)
449 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
450 rank = sdram_params->ch[channel].rank | 1;
451 for (j = 0; j < ARRAY_SIZE(step); j++) {
452 /*
453 * trigger QSTRN and RVTRN
454 * clear DTDONE status
455 */
456 setbits_le32(&publ->pir, PIR_CLRSR);
457
458 /* trigger DTT */
459 setbits_le32(&publ->pir,
460 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
461 PIR_CLRSR);
462 udelay(1);
463 /* wait echo byte DTDONE */
464 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
465 != rank)
466 ;
467 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
468 != rank)
469 ;
470 if (!(readl(&pctl->ppcfg) & 1)) {
471 while ((readl(&publ->datx8[2].dxgsr[0])
472 & rank) != rank)
473 ;
474 while ((readl(&publ->datx8[3].dxgsr[0])
475 & rank) != rank)
476 ;
477 }
478 if (readl(&publ->pgsr) &
479 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
480 ret = -1;
481 break;
482 }
483 }
484 /* send some auto refresh to complement the lost while DTT */
485 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
486 send_command(pctl, rank, REF_CMD, 0);
487
488 if (sdram_params->base.dramtype != LPDDR3)
489 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
490
491 /* resume auto refresh */
492 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
493
494 return ret;
495 }
496
497 static void move_to_access_state(const struct chan_info *chan)
498 {
499 struct rk3288_ddr_publ *publ = chan->publ;
500 struct rk3288_ddr_pctl *pctl = chan->pctl;
501 unsigned int state;
502
503 while (1) {
504 state = readl(&pctl->stat) & PCTL_STAT_MSK;
505
506 switch (state) {
507 case LOW_POWER:
508 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
509 LP_TRIG_MASK) == 1)
510 return;
511
512 writel(WAKEUP_STATE, &pctl->sctl);
513 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
514 ;
515 /* wait DLL lock */
516 while ((readl(&publ->pgsr) & PGSR_DLDONE)
517 != PGSR_DLDONE)
518 ;
519 break;
520 case INIT_MEM:
521 writel(CFG_STATE, &pctl->sctl);
522 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
523 ;
524 case CONFIG:
525 writel(GO_STATE, &pctl->sctl);
526 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
527 ;
528 break;
529 case ACCESS:
530 return;
531 default:
532 break;
533 }
534 }
535 }
536
537 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
538 const struct rk3288_sdram_params *sdram_params)
539 {
540 struct rk3288_ddr_publ *publ = chan->publ;
541
542 if (sdram_params->ch[chnum].bk == 3)
543 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
544 1 << PDQ_SHIFT);
545 else
546 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
547
548 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
549 }
550
551 static void dram_all_config(const struct dram_info *dram,
552 const struct rk3288_sdram_params *sdram_params)
553 {
554 unsigned int chan;
555 u32 sys_reg = 0;
556
557 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
558 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
559 for (chan = 0; chan < sdram_params->num_channels; chan++) {
560 const struct rk3288_sdram_channel *info =
561 &sdram_params->ch[chan];
562
563 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
564 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
565 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
566 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
567 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
568 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
569 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
570 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
571 sys_reg |= (2 >>info->dbw) << SYS_REG_DBW_SHIFT(chan);
572
573 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
574 }
575 writel(sys_reg, &dram->pmu->sys_reg[2]);
576 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
577 }
578
579 static int sdram_init(const struct dram_info *dram,
580 const struct rk3288_sdram_params *sdram_params)
581 {
582 int channel;
583 int zqcr;
584 int ret;
585
586 debug("%s start\n", __func__);
587 if ((sdram_params->base.dramtype == DDR3 &&
588 sdram_params->base.ddr_freq > 800000000) ||
589 (sdram_params->base.dramtype == LPDDR3 &&
590 sdram_params->base.ddr_freq > 533000000)) {
591 debug("SDRAM frequency is too high!");
592 return -E2BIG;
593 }
594
595 debug("ddr clk %s\n", dram->ddr_clk->name);
596 ret = clk_set_rate(dram->ddr_clk, sdram_params->base.ddr_freq);
597 debug("ret=%d\n", ret);
598 if (ret) {
599 debug("Could not set DDR clock\n");
600 return ret;
601 }
602
603 for (channel = 0; channel < 2; channel++) {
604 const struct chan_info *chan = &dram->chan[channel];
605 struct rk3288_ddr_pctl *pctl = chan->pctl;
606 struct rk3288_ddr_publ *publ = chan->publ;
607
608 phy_pctrl_reset(dram->cru, publ, channel);
609 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
610
611 if (channel >= sdram_params->num_channels)
612 continue;
613
614 dfi_cfg(pctl, sdram_params->base.dramtype);
615
616 pctl_cfg(channel, pctl, sdram_params, dram->grf);
617
618 phy_cfg(chan, channel, sdram_params);
619
620 phy_init(publ);
621
622 writel(POWER_UP_START, &pctl->powctl);
623 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
624 ;
625
626 memory_init(publ, sdram_params->base.dramtype);
627 move_to_config_state(publ, pctl);
628
629 if (sdram_params->base.dramtype == LPDDR3) {
630 send_command(pctl, 3, DESELECT_CMD, 0);
631 udelay(1);
632 send_command(pctl, 3, PREA_CMD, 0);
633 udelay(1);
634 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
635 udelay(1);
636 send_command_op(pctl, 3, MRS_CMD, 1,
637 sdram_params->phy_timing.mr[1]);
638 udelay(1);
639 send_command_op(pctl, 3, MRS_CMD, 2,
640 sdram_params->phy_timing.mr[2]);
641 udelay(1);
642 send_command_op(pctl, 3, MRS_CMD, 3,
643 sdram_params->phy_timing.mr[3]);
644 udelay(1);
645 }
646
647 set_bandwidth_ratio(chan, channel,
648 sdram_params->ch[channel].bw, dram->grf);
649 /*
650 * set cs
651 * CS0, n=1
652 * CS1, n=2
653 * CS0 & CS1, n = 3
654 */
655 clrsetbits_le32(&publ->pgcr, 0xF << 18,
656 (sdram_params->ch[channel].rank | 1) << 18);
657 /* DS=40ohm,ODT=155ohm */
658 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
659 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
660 0x19 << PD_OUTPUT_SHIFT;
661 writel(zqcr, &publ->zq1cr[0]);
662 writel(zqcr, &publ->zq0cr[0]);
663
664 if (sdram_params->base.dramtype == LPDDR3) {
665 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
666 udelay(10);
667 send_command_op(pctl,
668 sdram_params->ch[channel].rank | 1,
669 MRS_CMD, 11,
670 sdram_params->base.odt ? 3 : 0);
671 if (channel == 0) {
672 writel(0, &pctl->mrrcfg0);
673 send_command_op(pctl, 1, MRR_CMD, 8, 0);
674 /* S8 */
675 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
676 debug("failed!");
677 return -EREMOTEIO;
678 }
679 }
680 }
681
682 if (-1 == data_training(chan, channel, sdram_params)) {
683 if (sdram_params->base.dramtype == LPDDR3) {
684 ddr_phy_ctl_reset(dram->cru, channel, 1);
685 udelay(10);
686 ddr_phy_ctl_reset(dram->cru, channel, 0);
687 udelay(10);
688 }
689 debug("failed!");
690 return -EIO;
691 }
692
693 if (sdram_params->base.dramtype == LPDDR3) {
694 u32 i;
695 writel(0, &pctl->mrrcfg0);
696 for (i = 0; i < 17; i++)
697 send_command_op(pctl, 1, MRR_CMD, i, 0);
698 }
699 move_to_access_state(chan);
700 }
701 dram_all_config(dram, sdram_params);
702 debug("%s done\n", __func__);
703
704 return 0;
705 }
706 #endif
707
708 size_t sdram_size_mb(struct rk3288_pmu *pmu)
709 {
710 u32 rank, col, bk, cs0_row, cs1_row, bw, row_3_4;
711 size_t chipsize_mb = 0;
712 size_t size_mb = 0;
713 u32 ch;
714 u32 sys_reg = readl(&pmu->sys_reg[2]);
715 u32 chans;
716
717 chans = 1 + ((sys_reg >> SYS_REG_NUM_CH_SHIFT) & SYS_REG_NUM_CH_MASK);
718
719 for (ch = 0; ch < chans; ch++) {
720 rank = 1 + (sys_reg >> SYS_REG_RANK_SHIFT(ch) &
721 SYS_REG_RANK_MASK);
722 col = 9 + (sys_reg >> SYS_REG_COL_SHIFT(ch) & SYS_REG_COL_MASK);
723 bk = 3 - ((sys_reg >> SYS_REG_BK_SHIFT(ch)) & SYS_REG_BK_MASK) ;
724 cs0_row = 13 + (sys_reg >> SYS_REG_CS0_ROW_SHIFT(ch) &
725 SYS_REG_CS0_ROW_MASK);
726 cs1_row = 13 + (sys_reg >> SYS_REG_CS1_ROW_SHIFT(ch) &
727 SYS_REG_CS1_ROW_MASK);
728 bw = (2 >> (sys_reg >> SYS_REG_BW_SHIFT(ch)) &
729 SYS_REG_BW_MASK);
730 row_3_4 = sys_reg >> SYS_REG_ROW_3_4_SHIFT(ch) &
731 SYS_REG_ROW_3_4_MASK;
732
733 chipsize_mb = (1 << (cs0_row + col + bk + bw - 20));
734
735 if (rank > 1)
736 chipsize_mb += chipsize_mb >>
737 (cs0_row - cs1_row);
738 if (row_3_4)
739 chipsize_mb = chipsize_mb * 3 / 4;
740 size_mb += chipsize_mb;
741 }
742
743 /*
744 * we use the 0x00000000~0xfeffffff space since 0xff000000~0xffffffff
745 * is SoC register space (i.e. reserved)
746 */
747 size_mb = min(size_mb, 0xff000000 >> 20);
748
749 return size_mb;
750 }
751
752 #ifdef CONFIG_SPL_BUILD
753 # ifdef CONFIG_ROCKCHIP_FAST_SPL
754 static int veyron_init(struct dram_info *priv)
755 {
756 struct udevice *pmic;
757 int ret;
758
759 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
760 if (ret)
761 return ret;
762
763 /* Slowly raise to max CPU voltage to prevent overshoot */
764 ret = rk808_spl_configure_buck(pmic, 1, 1200000);
765 if (ret)
766 return ret;
767 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
768 ret = rk808_spl_configure_buck(pmic, 1, 1400000);
769 if (ret)
770 return ret;
771 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
772
773 rkclk_configure_cpu(priv->cru, priv->grf);
774
775 return 0;
776 }
777 # endif
778
779 static int setup_sdram(struct udevice *dev)
780 {
781 struct dram_info *priv = dev_get_priv(dev);
782 struct rk3288_sdram_params params;
783 const void *blob = gd->fdt_blob;
784 int node = dev->of_offset;
785 int i, ret;
786
787 params.num_channels = fdtdec_get_int(blob, node,
788 "rockchip,num-channels", 1);
789 for (i = 0; i < params.num_channels; i++) {
790 ret = fdtdec_get_byte_array(blob, node,
791 "rockchip,sdram-channel",
792 (u8 *)&params.ch[i],
793 sizeof(params.ch[i]));
794 if (ret) {
795 debug("%s: Cannot read rockchip,sdram-channel\n",
796 __func__);
797 return -EINVAL;
798 }
799 }
800 ret = fdtdec_get_int_array(blob, node, "rockchip,pctl-timing",
801 (u32 *)&params.pctl_timing,
802 sizeof(params.pctl_timing) / sizeof(u32));
803 if (ret) {
804 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
805 return -EINVAL;
806 }
807 ret = fdtdec_get_int_array(blob, node, "rockchip,phy-timing",
808 (u32 *)&params.phy_timing,
809 sizeof(params.phy_timing) / sizeof(u32));
810 if (ret) {
811 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
812 return -EINVAL;
813 }
814 ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
815 (u32 *)&params.base,
816 sizeof(params.base) / sizeof(u32));
817 if (ret) {
818 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
819 return -EINVAL;
820 }
821
822 # ifdef CONFIG_ROCKCHIP_FAST_SPL
823 if (!fdt_node_check_compatible(blob, 0, "google,veyron")) {
824 ret = veyron_init(priv);
825 if (ret)
826 return ret;
827 }
828 # endif
829
830 return sdram_init(priv, &params);
831 }
832 #endif
833
834 static int rk3288_dmc_probe(struct udevice *dev)
835 {
836 struct dram_info *priv = dev_get_priv(dev);
837 struct regmap *map;
838 int ret;
839
840 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
841 if (IS_ERR(map))
842 return PTR_ERR(map);
843 priv->chan[0].msch = regmap_get_range(map, 0);
844 priv->chan[1].msch = (struct rk3288_msch *)
845 (regmap_get_range(map, 0) + 0x80);
846
847 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
848 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
849 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
850
851 ret = regmap_init_mem(dev, &map);
852 if (ret)
853 return ret;
854 priv->chan[0].pctl = regmap_get_range(map, 0);
855 priv->chan[0].publ = regmap_get_range(map, 1);
856 priv->chan[1].pctl = regmap_get_range(map, 2);
857 priv->chan[1].publ = regmap_get_range(map, 3);
858
859 ret = uclass_get_device(UCLASS_CLK, CLK_DDR, &priv->ddr_clk);
860 if (ret)
861 return ret;
862
863 priv->cru = rockchip_get_cru();
864 if (IS_ERR(priv->cru))
865 return PTR_ERR(priv->cru);
866 #ifdef CONFIG_SPL_BUILD
867 ret = setup_sdram(dev);
868 if (ret)
869 return ret;
870 #endif
871 priv->info.base = 0;
872 priv->info.size = sdram_size_mb(priv->pmu) << 20;
873
874 return 0;
875 }
876
877 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
878 {
879 struct dram_info *priv = dev_get_priv(dev);
880
881 *info = priv->info;
882
883 return 0;
884 }
885
886 static struct ram_ops rk3288_dmc_ops = {
887 .get_info = rk3288_dmc_get_info,
888 };
889
890 static const struct udevice_id rk3288_dmc_ids[] = {
891 { .compatible = "rockchip,rk3288-dmc" },
892 { }
893 };
894
895 U_BOOT_DRIVER(dmc_rk3288) = {
896 .name = "rk3288_dmc",
897 .id = UCLASS_RAM,
898 .of_match = rk3288_dmc_ids,
899 .ops = &rk3288_dmc_ops,
900 .probe = rk3288_dmc_probe,
901 .priv_auto_alloc_size = sizeof(struct dram_info),
902 };