]> git.ipfire.org Git - people/ms/u-boot.git/blob - arch/arm/mach-rockchip/rk3288/sdram_rk3288.c
rockchip: dm: convert fdt_get to dev_read
[people/ms/u-boot.git] / arch / arm / mach-rockchip / rk3288 / sdram_rk3288.c
1 /*
2 * (C) Copyright 2015 Google, Inc
3 * Copyright 2014 Rockchip Inc.
4 *
5 * SPDX-License-Identifier: GPL-2.0
6 *
7 * Adapted from coreboot.
8 */
9
10 #include <common.h>
11 #include <clk.h>
12 #include <dm.h>
13 #include <dt-structs.h>
14 #include <errno.h>
15 #include <ram.h>
16 #include <regmap.h>
17 #include <syscon.h>
18 #include <asm/io.h>
19 #include <asm/arch/clock.h>
20 #include <asm/arch/cru_rk3288.h>
21 #include <asm/arch/ddr_rk3288.h>
22 #include <asm/arch/grf_rk3288.h>
23 #include <asm/arch/pmu_rk3288.h>
24 #include <asm/arch/sdram.h>
25 #include <linux/err.h>
26 #include <power/regulator.h>
27 #include <power/rk8xx_pmic.h>
28
29 DECLARE_GLOBAL_DATA_PTR;
30
31 struct chan_info {
32 struct rk3288_ddr_pctl *pctl;
33 struct rk3288_ddr_publ *publ;
34 struct rk3288_msch *msch;
35 };
36
37 struct dram_info {
38 struct chan_info chan[2];
39 struct ram_info info;
40 struct clk ddr_clk;
41 struct rk3288_cru *cru;
42 struct rk3288_grf *grf;
43 struct rk3288_sgrf *sgrf;
44 struct rk3288_pmu *pmu;
45 bool is_veyron;
46 };
47
48 struct rk3288_sdram_params {
49 #if CONFIG_IS_ENABLED(OF_PLATDATA)
50 struct dtd_rockchip_rk3288_dmc of_plat;
51 #endif
52 struct rk3288_sdram_channel ch[2];
53 struct rk3288_sdram_pctl_timing pctl_timing;
54 struct rk3288_sdram_phy_timing phy_timing;
55 struct rk3288_base_params base;
56 int num_channels;
57 struct regmap *map;
58 };
59
60 const int ddrconf_table[] = {
61 /* row col,bw */
62 0,
63 ((1 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
64 ((2 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
65 ((3 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
66 ((4 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
67 ((1 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
68 ((2 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
69 ((3 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
70 ((1 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
71 ((2 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
72 ((3 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
73 0,
74 0,
75 0,
76 0,
77 ((4 << 4) | 2),
78 };
79
80 #define TEST_PATTEN 0x5aa5f00f
81 #define DQS_GATE_TRAINING_ERROR_RANK0 (1 << 4)
82 #define DQS_GATE_TRAINING_ERROR_RANK1 (2 << 4)
83
84 #ifdef CONFIG_SPL_BUILD
85 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
86 {
87 int i;
88
89 for (i = 0; i < n / sizeof(u32); i++) {
90 writel(*src, dest);
91 src++;
92 dest++;
93 }
94 }
95
96 static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
97 {
98 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
99 u32 ctl_psrstn_shift = 3 + 5 * ch;
100 u32 ctl_srstn_shift = 2 + 5 * ch;
101 u32 phy_psrstn_shift = 1 + 5 * ch;
102 u32 phy_srstn_shift = 5 * ch;
103
104 rk_clrsetreg(&cru->cru_softrst_con[10],
105 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
106 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
107 1 << phy_srstn_shift,
108 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
109 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
110 phy << phy_srstn_shift);
111 }
112
113 static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
114 {
115 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
116
117 rk_clrsetreg(&cru->cru_softrst_con[10],
118 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
119 }
120
121 static void phy_pctrl_reset(struct rk3288_cru *cru,
122 struct rk3288_ddr_publ *publ,
123 int channel)
124 {
125 int i;
126
127 ddr_reset(cru, channel, 1, 1);
128 udelay(1);
129 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
130 for (i = 0; i < 4; i++)
131 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
132
133 udelay(10);
134 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
135 for (i = 0; i < 4; i++)
136 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
137
138 udelay(10);
139 ddr_reset(cru, channel, 1, 0);
140 udelay(10);
141 ddr_reset(cru, channel, 0, 0);
142 udelay(10);
143 }
144
145 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
146 u32 freq)
147 {
148 int i;
149
150 if (freq <= 250000000) {
151 if (freq <= 150000000)
152 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
153 else
154 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
155 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
156 for (i = 0; i < 4; i++)
157 setbits_le32(&publ->datx8[i].dxdllcr,
158 DXDLLCR_DLLDIS);
159
160 setbits_le32(&publ->pir, PIR_DLLBYP);
161 } else {
162 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
163 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
164 for (i = 0; i < 4; i++) {
165 clrbits_le32(&publ->datx8[i].dxdllcr,
166 DXDLLCR_DLLDIS);
167 }
168
169 clrbits_le32(&publ->pir, PIR_DLLBYP);
170 }
171 }
172
173 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
174 {
175 writel(DFI_INIT_START, &pctl->dfistcfg0);
176 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
177 &pctl->dfistcfg1);
178 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
179 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
180 &pctl->dfilpcfg0);
181
182 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
183 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
184 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
185 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
186 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
187 writel(1, &pctl->dfitphyupdtype0);
188
189 /* cs0 and cs1 write odt enable */
190 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
191 &pctl->dfiodtcfg);
192 /* odt write length */
193 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
194 /* phyupd and ctrlupd disabled */
195 writel(0, &pctl->dfiupdcfg);
196 }
197
198 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
199 {
200 uint val = 0;
201
202 if (enable) {
203 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
204 DDR0_16BIT_EN_SHIFT);
205 }
206 rk_clrsetreg(&grf->soc_con0,
207 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
208 val);
209 }
210
211 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
212 bool ddr3_mode)
213 {
214 uint mask, val;
215
216 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
217 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
218 MSCH0_MAINDDR3_SHIFT);
219 rk_clrsetreg(&grf->soc_con0, mask, val);
220 }
221
222 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
223 bool enable, bool enable_bst, bool enable_odt)
224 {
225 uint mask;
226 bool disable_bst = !enable_bst;
227
228 mask = channel ?
229 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
230 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
231 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
232 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
233 rk_clrsetreg(&grf->soc_con2, mask,
234 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
235 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
236 UPCTL0_BST_DIABLE_SHIFT) |
237 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
238 UPCTL0_LPDDR3_ODT_EN_SHIFT));
239 }
240
241 static void pctl_cfg(int channel, struct rk3288_ddr_pctl *pctl,
242 struct rk3288_sdram_params *sdram_params,
243 struct rk3288_grf *grf)
244 {
245 unsigned int burstlen;
246
247 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
248 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
249 sizeof(sdram_params->pctl_timing));
250 switch (sdram_params->base.dramtype) {
251 case LPDDR3:
252 writel(sdram_params->pctl_timing.tcl - 1,
253 &pctl->dfitrddataen);
254 writel(sdram_params->pctl_timing.tcwl,
255 &pctl->dfitphywrlat);
256 burstlen >>= 1;
257 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
258 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
259 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
260 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
261 &pctl->mcfg);
262 ddr_set_ddr3_mode(grf, channel, false);
263 ddr_set_enable(grf, channel, true);
264 ddr_set_en_bst_odt(grf, channel, true, false,
265 sdram_params->base.odt);
266 break;
267 case DDR3:
268 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
269 writel(sdram_params->pctl_timing.tcl - 3,
270 &pctl->dfitrddataen);
271 } else {
272 writel(sdram_params->pctl_timing.tcl - 2,
273 &pctl->dfitrddataen);
274 }
275 writel(sdram_params->pctl_timing.tcwl - 1,
276 &pctl->dfitphywrlat);
277 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
278 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
279 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
280 &pctl->mcfg);
281 ddr_set_ddr3_mode(grf, channel, true);
282 ddr_set_enable(grf, channel, true);
283
284 ddr_set_en_bst_odt(grf, channel, false, true, false);
285 break;
286 }
287
288 setbits_le32(&pctl->scfg, 1);
289 }
290
291 static void phy_cfg(const struct chan_info *chan, int channel,
292 struct rk3288_sdram_params *sdram_params)
293 {
294 struct rk3288_ddr_publ *publ = chan->publ;
295 struct rk3288_msch *msch = chan->msch;
296 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
297 u32 dinit2, tmp;
298 int i;
299
300 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
301 /* DDR PHY Timing */
302 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
303 sizeof(sdram_params->phy_timing));
304 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
305 writel(0x3f, &msch->readlatency);
306 writel(sdram_params->base.noc_activate, &msch->activate);
307 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
308 1 << BUSRDTORD_SHIFT, &msch->devtodev);
309 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
310 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
311 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
312 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
313 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
314 &publ->ptr[1]);
315 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
316 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
317 &publ->ptr[2]);
318
319 switch (sdram_params->base.dramtype) {
320 case LPDDR3:
321 clrsetbits_le32(&publ->pgcr, 0x1F,
322 0 << PGCR_DFTLMT_SHIFT |
323 0 << PGCR_DFTCMP_SHIFT |
324 1 << PGCR_DQSCFG_SHIFT |
325 0 << PGCR_ITMDMD_SHIFT);
326 /* DDRMODE select LPDDR3 */
327 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
328 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
329 clrsetbits_le32(&publ->dxccr,
330 DQSNRES_MASK << DQSNRES_SHIFT |
331 DQSRES_MASK << DQSRES_SHIFT,
332 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
333 tmp = readl(&publ->dtpr[1]);
334 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
335 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
336 clrsetbits_le32(&publ->dsgcr,
337 DQSGE_MASK << DQSGE_SHIFT |
338 DQSGX_MASK << DQSGX_SHIFT,
339 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
340 break;
341 case DDR3:
342 clrbits_le32(&publ->pgcr, 0x1f);
343 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
344 DDRMD_DDR3 << DDRMD_SHIFT);
345 break;
346 }
347 if (sdram_params->base.odt) {
348 /*dynamic RTT enable */
349 for (i = 0; i < 4; i++)
350 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
351 } else {
352 /*dynamic RTT disable */
353 for (i = 0; i < 4; i++)
354 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
355 }
356 }
357
358 static void phy_init(struct rk3288_ddr_publ *publ)
359 {
360 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
361 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
362 udelay(1);
363 while ((readl(&publ->pgsr) &
364 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
365 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
366 ;
367 }
368
369 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
370 u32 cmd, u32 arg)
371 {
372 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
373 udelay(1);
374 while (readl(&pctl->mcmd) & START_CMD)
375 ;
376 }
377
378 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
379 u32 rank, u32 cmd, u32 ma, u32 op)
380 {
381 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
382 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
383 }
384
385 static void memory_init(struct rk3288_ddr_publ *publ,
386 u32 dramtype)
387 {
388 setbits_le32(&publ->pir,
389 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
390 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
391 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
392 udelay(1);
393 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
394 != (PGSR_IDONE | PGSR_DLDONE))
395 ;
396 }
397
398 static void move_to_config_state(struct rk3288_ddr_publ *publ,
399 struct rk3288_ddr_pctl *pctl)
400 {
401 unsigned int state;
402
403 while (1) {
404 state = readl(&pctl->stat) & PCTL_STAT_MSK;
405
406 switch (state) {
407 case LOW_POWER:
408 writel(WAKEUP_STATE, &pctl->sctl);
409 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
410 != ACCESS)
411 ;
412 /* wait DLL lock */
413 while ((readl(&publ->pgsr) & PGSR_DLDONE)
414 != PGSR_DLDONE)
415 ;
416 /*
417 * if at low power state,need wakeup first,
418 * and then enter the config
419 * so here no break.
420 */
421 case ACCESS:
422 /* no break */
423 case INIT_MEM:
424 writel(CFG_STATE, &pctl->sctl);
425 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
426 ;
427 break;
428 case CONFIG:
429 return;
430 default:
431 break;
432 }
433 }
434 }
435
436 static void set_bandwidth_ratio(const struct chan_info *chan, int channel,
437 u32 n, struct rk3288_grf *grf)
438 {
439 struct rk3288_ddr_pctl *pctl = chan->pctl;
440 struct rk3288_ddr_publ *publ = chan->publ;
441 struct rk3288_msch *msch = chan->msch;
442
443 if (n == 1) {
444 setbits_le32(&pctl->ppcfg, 1);
445 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
446 setbits_le32(&msch->ddrtiming, 1 << 31);
447 /* Data Byte disable*/
448 clrbits_le32(&publ->datx8[2].dxgcr, 1);
449 clrbits_le32(&publ->datx8[3].dxgcr, 1);
450 /* disable DLL */
451 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
452 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
453 } else {
454 clrbits_le32(&pctl->ppcfg, 1);
455 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
456 clrbits_le32(&msch->ddrtiming, 1 << 31);
457 /* Data Byte enable*/
458 setbits_le32(&publ->datx8[2].dxgcr, 1);
459 setbits_le32(&publ->datx8[3].dxgcr, 1);
460
461 /* enable DLL */
462 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
463 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
464 /* reset DLL */
465 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
466 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
467 udelay(10);
468 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
469 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
470 }
471 setbits_le32(&pctl->dfistcfg0, 1 << 2);
472 }
473
474 static int data_training(const struct chan_info *chan, int channel,
475 struct rk3288_sdram_params *sdram_params)
476 {
477 unsigned int j;
478 int ret = 0;
479 u32 rank;
480 int i;
481 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
482 struct rk3288_ddr_publ *publ = chan->publ;
483 struct rk3288_ddr_pctl *pctl = chan->pctl;
484
485 /* disable auto refresh */
486 writel(0, &pctl->trefi);
487
488 if (sdram_params->base.dramtype != LPDDR3)
489 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
490 rank = sdram_params->ch[channel].rank | 1;
491 for (j = 0; j < ARRAY_SIZE(step); j++) {
492 /*
493 * trigger QSTRN and RVTRN
494 * clear DTDONE status
495 */
496 setbits_le32(&publ->pir, PIR_CLRSR);
497
498 /* trigger DTT */
499 setbits_le32(&publ->pir,
500 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
501 PIR_CLRSR);
502 udelay(1);
503 /* wait echo byte DTDONE */
504 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
505 != rank)
506 ;
507 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
508 != rank)
509 ;
510 if (!(readl(&pctl->ppcfg) & 1)) {
511 while ((readl(&publ->datx8[2].dxgsr[0])
512 & rank) != rank)
513 ;
514 while ((readl(&publ->datx8[3].dxgsr[0])
515 & rank) != rank)
516 ;
517 }
518 if (readl(&publ->pgsr) &
519 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
520 ret = -1;
521 break;
522 }
523 }
524 /* send some auto refresh to complement the lost while DTT */
525 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
526 send_command(pctl, rank, REF_CMD, 0);
527
528 if (sdram_params->base.dramtype != LPDDR3)
529 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
530
531 /* resume auto refresh */
532 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
533
534 return ret;
535 }
536
537 static void move_to_access_state(const struct chan_info *chan)
538 {
539 struct rk3288_ddr_publ *publ = chan->publ;
540 struct rk3288_ddr_pctl *pctl = chan->pctl;
541 unsigned int state;
542
543 while (1) {
544 state = readl(&pctl->stat) & PCTL_STAT_MSK;
545
546 switch (state) {
547 case LOW_POWER:
548 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
549 LP_TRIG_MASK) == 1)
550 return;
551
552 writel(WAKEUP_STATE, &pctl->sctl);
553 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
554 ;
555 /* wait DLL lock */
556 while ((readl(&publ->pgsr) & PGSR_DLDONE)
557 != PGSR_DLDONE)
558 ;
559 break;
560 case INIT_MEM:
561 writel(CFG_STATE, &pctl->sctl);
562 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
563 ;
564 case CONFIG:
565 writel(GO_STATE, &pctl->sctl);
566 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
567 ;
568 break;
569 case ACCESS:
570 return;
571 default:
572 break;
573 }
574 }
575 }
576
577 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
578 struct rk3288_sdram_params *sdram_params)
579 {
580 struct rk3288_ddr_publ *publ = chan->publ;
581
582 if (sdram_params->ch[chnum].bk == 3)
583 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
584 1 << PDQ_SHIFT);
585 else
586 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
587
588 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
589 }
590
591 static void dram_all_config(const struct dram_info *dram,
592 struct rk3288_sdram_params *sdram_params)
593 {
594 unsigned int chan;
595 u32 sys_reg = 0;
596
597 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
598 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
599 for (chan = 0; chan < sdram_params->num_channels; chan++) {
600 const struct rk3288_sdram_channel *info =
601 &sdram_params->ch[chan];
602
603 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
604 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
605 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
606 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
607 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
608 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
609 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
610 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
611 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
612
613 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
614 }
615 writel(sys_reg, &dram->pmu->sys_reg[2]);
616 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
617 }
618
619 static int sdram_rank_bw_detect(struct dram_info *dram, int channel,
620 struct rk3288_sdram_params *sdram_params)
621 {
622 int reg;
623 int need_trainig = 0;
624 const struct chan_info *chan = &dram->chan[channel];
625 struct rk3288_ddr_publ *publ = chan->publ;
626
627 if (data_training(chan, channel, sdram_params) < 0) {
628 reg = readl(&publ->datx8[0].dxgsr[0]);
629 /* Check the result for rank 0 */
630 if ((channel == 0) && (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
631 debug("data training fail!\n");
632 return -EIO;
633 } else if ((channel == 1) &&
634 (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
635 sdram_params->num_channels = 1;
636 }
637
638 /* Check the result for rank 1 */
639 if (reg & DQS_GATE_TRAINING_ERROR_RANK1) {
640 sdram_params->ch[channel].rank = 1;
641 clrsetbits_le32(&publ->pgcr, 0xF << 18,
642 sdram_params->ch[channel].rank << 18);
643 need_trainig = 1;
644 }
645 reg = readl(&publ->datx8[2].dxgsr[0]);
646 if (reg & (1 << 4)) {
647 sdram_params->ch[channel].bw = 1;
648 set_bandwidth_ratio(chan, channel,
649 sdram_params->ch[channel].bw,
650 dram->grf);
651 need_trainig = 1;
652 }
653 }
654 /* Assume the Die bit width are the same with the chip bit width */
655 sdram_params->ch[channel].dbw = sdram_params->ch[channel].bw;
656
657 if (need_trainig &&
658 (data_training(chan, channel, sdram_params) < 0)) {
659 if (sdram_params->base.dramtype == LPDDR3) {
660 ddr_phy_ctl_reset(dram->cru, channel, 1);
661 udelay(10);
662 ddr_phy_ctl_reset(dram->cru, channel, 0);
663 udelay(10);
664 }
665 debug("2nd data training failed!");
666 return -EIO;
667 }
668
669 return 0;
670 }
671
672 static int sdram_col_row_detect(struct dram_info *dram, int channel,
673 struct rk3288_sdram_params *sdram_params)
674 {
675 int row, col;
676 unsigned int addr;
677 const struct chan_info *chan = &dram->chan[channel];
678 struct rk3288_ddr_pctl *pctl = chan->pctl;
679 struct rk3288_ddr_publ *publ = chan->publ;
680 int ret = 0;
681
682 /* Detect col */
683 for (col = 11; col >= 9; col--) {
684 writel(0, CONFIG_SYS_SDRAM_BASE);
685 addr = CONFIG_SYS_SDRAM_BASE +
686 (1 << (col + sdram_params->ch[channel].bw - 1));
687 writel(TEST_PATTEN, addr);
688 if ((readl(addr) == TEST_PATTEN) &&
689 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
690 break;
691 }
692 if (col == 8) {
693 printf("Col detect error\n");
694 ret = -EINVAL;
695 goto out;
696 } else {
697 sdram_params->ch[channel].col = col;
698 }
699
700 move_to_config_state(publ, pctl);
701 writel(4, &chan->msch->ddrconf);
702 move_to_access_state(chan);
703 /* Detect row*/
704 for (row = 16; row >= 12; row--) {
705 writel(0, CONFIG_SYS_SDRAM_BASE);
706 addr = CONFIG_SYS_SDRAM_BASE + (1 << (row + 15 - 1));
707 writel(TEST_PATTEN, addr);
708 if ((readl(addr) == TEST_PATTEN) &&
709 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
710 break;
711 }
712 if (row == 11) {
713 printf("Row detect error\n");
714 ret = -EINVAL;
715 } else {
716 sdram_params->ch[channel].cs1_row = row;
717 sdram_params->ch[channel].row_3_4 = 0;
718 debug("chn %d col %d, row %d\n", channel, col, row);
719 sdram_params->ch[channel].cs0_row = row;
720 }
721
722 out:
723 return ret;
724 }
725
726 static int sdram_get_niu_config(struct rk3288_sdram_params *sdram_params)
727 {
728 int i, tmp, size, ret = 0;
729
730 tmp = sdram_params->ch[0].col - 9;
731 tmp -= (sdram_params->ch[0].bw == 2) ? 0 : 1;
732 tmp |= ((sdram_params->ch[0].cs0_row - 12) << 4);
733 size = sizeof(ddrconf_table)/sizeof(ddrconf_table[0]);
734 for (i = 0; i < size; i++)
735 if (tmp == ddrconf_table[i])
736 break;
737 if (i >= size) {
738 printf("niu config not found\n");
739 ret = -EINVAL;
740 } else {
741 sdram_params->base.ddrconfig = i;
742 }
743
744 return ret;
745 }
746
747 static int sdram_get_stride(struct rk3288_sdram_params *sdram_params)
748 {
749 int stride = -1;
750 int ret = 0;
751 long cap = sdram_params->num_channels * (1u <<
752 (sdram_params->ch[0].cs0_row +
753 sdram_params->ch[0].col +
754 (sdram_params->ch[0].rank - 1) +
755 sdram_params->ch[0].bw +
756 3 - 20));
757
758 switch (cap) {
759 case 512:
760 stride = 0;
761 break;
762 case 1024:
763 stride = 5;
764 break;
765 case 2048:
766 stride = 9;
767 break;
768 case 4096:
769 stride = 0xd;
770 break;
771 default:
772 stride = -1;
773 printf("could not find correct stride, cap error!\n");
774 ret = -EINVAL;
775 break;
776 }
777 sdram_params->base.stride = stride;
778
779 return ret;
780 }
781
782 static int sdram_init(struct dram_info *dram,
783 struct rk3288_sdram_params *sdram_params)
784 {
785 int channel;
786 int zqcr;
787 int ret;
788
789 debug("%s start\n", __func__);
790 if ((sdram_params->base.dramtype == DDR3 &&
791 sdram_params->base.ddr_freq > 800000000) ||
792 (sdram_params->base.dramtype == LPDDR3 &&
793 sdram_params->base.ddr_freq > 533000000)) {
794 debug("SDRAM frequency is too high!");
795 return -E2BIG;
796 }
797
798 debug("ddr clk dpll\n");
799 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
800 debug("ret=%d\n", ret);
801 if (ret) {
802 debug("Could not set DDR clock\n");
803 return ret;
804 }
805
806 for (channel = 0; channel < 2; channel++) {
807 const struct chan_info *chan = &dram->chan[channel];
808 struct rk3288_ddr_pctl *pctl = chan->pctl;
809 struct rk3288_ddr_publ *publ = chan->publ;
810
811 /* map all the 4GB space to the current channel */
812 if (channel)
813 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x17);
814 else
815 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x1a);
816 phy_pctrl_reset(dram->cru, publ, channel);
817 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
818
819 dfi_cfg(pctl, sdram_params->base.dramtype);
820
821 pctl_cfg(channel, pctl, sdram_params, dram->grf);
822
823 phy_cfg(chan, channel, sdram_params);
824
825 phy_init(publ);
826
827 writel(POWER_UP_START, &pctl->powctl);
828 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
829 ;
830
831 memory_init(publ, sdram_params->base.dramtype);
832 move_to_config_state(publ, pctl);
833
834 if (sdram_params->base.dramtype == LPDDR3) {
835 send_command(pctl, 3, DESELECT_CMD, 0);
836 udelay(1);
837 send_command(pctl, 3, PREA_CMD, 0);
838 udelay(1);
839 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
840 udelay(1);
841 send_command_op(pctl, 3, MRS_CMD, 1,
842 sdram_params->phy_timing.mr[1]);
843 udelay(1);
844 send_command_op(pctl, 3, MRS_CMD, 2,
845 sdram_params->phy_timing.mr[2]);
846 udelay(1);
847 send_command_op(pctl, 3, MRS_CMD, 3,
848 sdram_params->phy_timing.mr[3]);
849 udelay(1);
850 }
851
852 /* Using 32bit bus width for detect */
853 sdram_params->ch[channel].bw = 2;
854 set_bandwidth_ratio(chan, channel,
855 sdram_params->ch[channel].bw, dram->grf);
856 /*
857 * set cs, using n=3 for detect
858 * CS0, n=1
859 * CS1, n=2
860 * CS0 & CS1, n = 3
861 */
862 sdram_params->ch[channel].rank = 2,
863 clrsetbits_le32(&publ->pgcr, 0xF << 18,
864 (sdram_params->ch[channel].rank | 1) << 18);
865
866 /* DS=40ohm,ODT=155ohm */
867 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
868 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
869 0x19 << PD_OUTPUT_SHIFT;
870 writel(zqcr, &publ->zq1cr[0]);
871 writel(zqcr, &publ->zq0cr[0]);
872
873 if (sdram_params->base.dramtype == LPDDR3) {
874 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
875 udelay(10);
876 send_command_op(pctl,
877 sdram_params->ch[channel].rank | 1,
878 MRS_CMD, 11,
879 sdram_params->base.odt ? 3 : 0);
880 if (channel == 0) {
881 writel(0, &pctl->mrrcfg0);
882 send_command_op(pctl, 1, MRR_CMD, 8, 0);
883 /* S8 */
884 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
885 debug("failed!");
886 return -EREMOTEIO;
887 }
888 }
889 }
890
891 /* Detect the rank and bit-width with data-training */
892 sdram_rank_bw_detect(dram, channel, sdram_params);
893
894 if (sdram_params->base.dramtype == LPDDR3) {
895 u32 i;
896 writel(0, &pctl->mrrcfg0);
897 for (i = 0; i < 17; i++)
898 send_command_op(pctl, 1, MRR_CMD, i, 0);
899 }
900 writel(15, &chan->msch->ddrconf);
901 move_to_access_state(chan);
902 /* DDR3 and LPDDR3 are always 8 bank, no need detect */
903 sdram_params->ch[channel].bk = 3;
904 /* Detect Col and Row number*/
905 ret = sdram_col_row_detect(dram, channel, sdram_params);
906 if (ret)
907 goto error;
908 }
909 /* Find NIU DDR configuration */
910 ret = sdram_get_niu_config(sdram_params);
911 if (ret)
912 goto error;
913 /* Find stride setting */
914 ret = sdram_get_stride(sdram_params);
915 if (ret)
916 goto error;
917
918 dram_all_config(dram, sdram_params);
919 debug("%s done\n", __func__);
920
921 return 0;
922 error:
923 printf("DRAM init failed!\n");
924 hang();
925 }
926 #endif /* CONFIG_SPL_BUILD */
927
928 size_t sdram_size_mb(struct rk3288_pmu *pmu)
929 {
930 u32 rank, col, bk, cs0_row, cs1_row, bw, row_3_4;
931 size_t chipsize_mb = 0;
932 size_t size_mb = 0;
933 u32 ch;
934 u32 sys_reg = readl(&pmu->sys_reg[2]);
935 u32 chans;
936
937 chans = 1 + ((sys_reg >> SYS_REG_NUM_CH_SHIFT) & SYS_REG_NUM_CH_MASK);
938
939 for (ch = 0; ch < chans; ch++) {
940 rank = 1 + (sys_reg >> SYS_REG_RANK_SHIFT(ch) &
941 SYS_REG_RANK_MASK);
942 col = 9 + (sys_reg >> SYS_REG_COL_SHIFT(ch) & SYS_REG_COL_MASK);
943 bk = 3 - ((sys_reg >> SYS_REG_BK_SHIFT(ch)) & SYS_REG_BK_MASK);
944 cs0_row = 13 + (sys_reg >> SYS_REG_CS0_ROW_SHIFT(ch) &
945 SYS_REG_CS0_ROW_MASK);
946 cs1_row = 13 + (sys_reg >> SYS_REG_CS1_ROW_SHIFT(ch) &
947 SYS_REG_CS1_ROW_MASK);
948 bw = (2 >> ((sys_reg >> SYS_REG_BW_SHIFT(ch)) &
949 SYS_REG_BW_MASK));
950 row_3_4 = sys_reg >> SYS_REG_ROW_3_4_SHIFT(ch) &
951 SYS_REG_ROW_3_4_MASK;
952 chipsize_mb = (1 << (cs0_row + col + bk + bw - 20));
953
954 if (rank > 1)
955 chipsize_mb += chipsize_mb >>
956 (cs0_row - cs1_row);
957 if (row_3_4)
958 chipsize_mb = chipsize_mb * 3 / 4;
959 size_mb += chipsize_mb;
960 }
961
962 /*
963 * we use the 0x00000000~0xfdffffff space since 0xff000000~0xffffffff
964 * is SoC register space (i.e. reserved), and 0xfe000000~0xfeffffff is
965 * inaccessible for some IP controller.
966 */
967 size_mb = min(size_mb, 0xfe000000 >> 20);
968
969 return size_mb;
970 }
971
972 #ifdef CONFIG_SPL_BUILD
973 # ifdef CONFIG_ROCKCHIP_FAST_SPL
974 static int veyron_init(struct dram_info *priv)
975 {
976 struct udevice *pmic;
977 int ret;
978
979 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
980 if (ret)
981 return ret;
982
983 /* Slowly raise to max CPU voltage to prevent overshoot */
984 ret = rk8xx_spl_configure_buck(pmic, 1, 1200000);
985 if (ret)
986 return ret;
987 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
988 ret = rk8xx_spl_configure_buck(pmic, 1, 1400000);
989 if (ret)
990 return ret;
991 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
992
993 rk3288_clk_configure_cpu(priv->cru, priv->grf);
994
995 return 0;
996 }
997 # endif
998
999 static int setup_sdram(struct udevice *dev)
1000 {
1001 struct dram_info *priv = dev_get_priv(dev);
1002 struct rk3288_sdram_params *params = dev_get_platdata(dev);
1003
1004 # ifdef CONFIG_ROCKCHIP_FAST_SPL
1005 if (priv->is_veyron) {
1006 int ret;
1007
1008 ret = veyron_init(priv);
1009 if (ret)
1010 return ret;
1011 }
1012 # endif
1013
1014 return sdram_init(priv, params);
1015 }
1016
1017 static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
1018 {
1019 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1020 struct rk3288_sdram_params *params = dev_get_platdata(dev);
1021 int ret;
1022
1023 /* Rk3288 supports dual-channel, set default channel num to 2 */
1024 params->num_channels = 2;
1025 ret = dev_read_u32_array(dev, "rockchip,pctl-timing",
1026 (u32 *)&params->pctl_timing,
1027 sizeof(params->pctl_timing) / sizeof(u32));
1028 if (ret) {
1029 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
1030 return -EINVAL;
1031 }
1032 ret = dev_read_u32_array(dev, "rockchip,phy-timing",
1033 (u32 *)&params->phy_timing,
1034 sizeof(params->phy_timing) / sizeof(u32));
1035 if (ret) {
1036 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
1037 return -EINVAL;
1038 }
1039 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
1040 (u32 *)&params->base,
1041 sizeof(params->base) / sizeof(u32));
1042 if (ret) {
1043 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
1044 return -EINVAL;
1045 }
1046 #ifdef CONFIG_ROCKCHIP_FAST_SPL
1047 struct dram_info *priv = dev_get_priv(dev);
1048
1049 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
1050 #endif
1051 ret = regmap_init_mem(dev, &params->map);
1052 if (ret)
1053 return ret;
1054 #endif
1055
1056 return 0;
1057 }
1058 #endif /* CONFIG_SPL_BUILD */
1059
1060 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1061 static int conv_of_platdata(struct udevice *dev)
1062 {
1063 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1064 struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
1065 int ret;
1066
1067 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
1068 sizeof(plat->pctl_timing));
1069 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
1070 sizeof(plat->phy_timing));
1071 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
1072 /* Rk3288 supports dual-channel, set default channel num to 2 */
1073 plat->num_channels = 2;
1074 ret = regmap_init_mem_platdata(dev, of_plat->reg,
1075 ARRAY_SIZE(of_plat->reg) / 2,
1076 &plat->map);
1077 if (ret)
1078 return ret;
1079
1080 return 0;
1081 }
1082 #endif
1083
1084 static int rk3288_dmc_probe(struct udevice *dev)
1085 {
1086 #ifdef CONFIG_SPL_BUILD
1087 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1088 #endif
1089 struct dram_info *priv = dev_get_priv(dev);
1090 struct regmap *map;
1091 int ret;
1092 struct udevice *dev_clk;
1093
1094 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1095 ret = conv_of_platdata(dev);
1096 if (ret)
1097 return ret;
1098 #endif
1099 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
1100 if (IS_ERR(map))
1101 return PTR_ERR(map);
1102 priv->chan[0].msch = regmap_get_range(map, 0);
1103 priv->chan[1].msch = (struct rk3288_msch *)
1104 (regmap_get_range(map, 0) + 0x80);
1105
1106 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1107 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
1108 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
1109
1110 #ifdef CONFIG_SPL_BUILD
1111 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1112 priv->chan[0].publ = regmap_get_range(plat->map, 1);
1113 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
1114 priv->chan[1].publ = regmap_get_range(plat->map, 3);
1115 #endif
1116 ret = rockchip_get_clk(&dev_clk);
1117 if (ret)
1118 return ret;
1119 priv->ddr_clk.id = CLK_DDR;
1120 ret = clk_request(dev_clk, &priv->ddr_clk);
1121 if (ret)
1122 return ret;
1123
1124 priv->cru = rockchip_get_cru();
1125 if (IS_ERR(priv->cru))
1126 return PTR_ERR(priv->cru);
1127 #ifdef CONFIG_SPL_BUILD
1128 ret = setup_sdram(dev);
1129 if (ret)
1130 return ret;
1131 #endif
1132 priv->info.base = 0;
1133 priv->info.size = sdram_size_mb(priv->pmu) << 20;
1134
1135 return 0;
1136 }
1137
1138 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
1139 {
1140 struct dram_info *priv = dev_get_priv(dev);
1141
1142 *info = priv->info;
1143
1144 return 0;
1145 }
1146
1147 static struct ram_ops rk3288_dmc_ops = {
1148 .get_info = rk3288_dmc_get_info,
1149 };
1150
1151 static const struct udevice_id rk3288_dmc_ids[] = {
1152 { .compatible = "rockchip,rk3288-dmc" },
1153 { }
1154 };
1155
1156 U_BOOT_DRIVER(dmc_rk3288) = {
1157 .name = "rockchip_rk3288_dmc",
1158 .id = UCLASS_RAM,
1159 .of_match = rk3288_dmc_ids,
1160 .ops = &rk3288_dmc_ops,
1161 #ifdef CONFIG_SPL_BUILD
1162 .ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
1163 #endif
1164 .probe = rk3288_dmc_probe,
1165 .priv_auto_alloc_size = sizeof(struct dram_info),
1166 #ifdef CONFIG_SPL_BUILD
1167 .platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),
1168 #endif
1169 };