]> git.ipfire.org Git - people/ms/u-boot.git/blob - arch/arm/mach-rockchip/rk3288/sdram_rk3288.c
Merge git://git.denx.de/u-boot-rockchip
[people/ms/u-boot.git] / arch / arm / mach-rockchip / rk3288 / sdram_rk3288.c
1 /*
2 * (C) Copyright 2015 Google, Inc
3 * Copyright 2014 Rockchip Inc.
4 *
5 * SPDX-License-Identifier: GPL-2.0
6 *
7 * Adapted from coreboot.
8 */
9
10 #include <common.h>
11 #include <clk.h>
12 #include <dm.h>
13 #include <dt-structs.h>
14 #include <errno.h>
15 #include <ram.h>
16 #include <regmap.h>
17 #include <syscon.h>
18 #include <asm/io.h>
19 #include <asm/arch/clock.h>
20 #include <asm/arch/cru_rk3288.h>
21 #include <asm/arch/ddr_rk3288.h>
22 #include <asm/arch/grf_rk3288.h>
23 #include <asm/arch/pmu_rk3288.h>
24 #include <asm/arch/sdram.h>
25 #include <linux/err.h>
26 #include <power/regulator.h>
27 #include <power/rk808_pmic.h>
28
29 DECLARE_GLOBAL_DATA_PTR;
30
31 struct chan_info {
32 struct rk3288_ddr_pctl *pctl;
33 struct rk3288_ddr_publ *publ;
34 struct rk3288_msch *msch;
35 };
36
37 struct dram_info {
38 struct chan_info chan[2];
39 struct ram_info info;
40 struct clk ddr_clk;
41 struct rk3288_cru *cru;
42 struct rk3288_grf *grf;
43 struct rk3288_sgrf *sgrf;
44 struct rk3288_pmu *pmu;
45 bool is_veyron;
46 };
47
48 struct rk3288_sdram_params {
49 #if CONFIG_IS_ENABLED(OF_PLATDATA)
50 struct dtd_rockchip_rk3288_dmc of_plat;
51 #endif
52 struct rk3288_sdram_channel ch[2];
53 struct rk3288_sdram_pctl_timing pctl_timing;
54 struct rk3288_sdram_phy_timing phy_timing;
55 struct rk3288_base_params base;
56 int num_channels;
57 struct regmap *map;
58 };
59
60 #define TEST_PATTEN 0x5aa5f00f
61 #define DQS_GATE_TRAINING_ERROR_RANK0 (1 << 4)
62 #define DQS_GATE_TRAINING_ERROR_RANK1 (2 << 4)
63
64 #ifdef CONFIG_SPL_BUILD
65 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
66 {
67 int i;
68
69 for (i = 0; i < n / sizeof(u32); i++) {
70 writel(*src, dest);
71 src++;
72 dest++;
73 }
74 }
75
76 static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
77 {
78 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
79 u32 ctl_psrstn_shift = 3 + 5 * ch;
80 u32 ctl_srstn_shift = 2 + 5 * ch;
81 u32 phy_psrstn_shift = 1 + 5 * ch;
82 u32 phy_srstn_shift = 5 * ch;
83
84 rk_clrsetreg(&cru->cru_softrst_con[10],
85 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
86 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
87 1 << phy_srstn_shift,
88 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
89 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
90 phy << phy_srstn_shift);
91 }
92
93 static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
94 {
95 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
96
97 rk_clrsetreg(&cru->cru_softrst_con[10],
98 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
99 }
100
101 static void phy_pctrl_reset(struct rk3288_cru *cru,
102 struct rk3288_ddr_publ *publ,
103 u32 channel)
104 {
105 int i;
106
107 ddr_reset(cru, channel, 1, 1);
108 udelay(1);
109 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
110 for (i = 0; i < 4; i++)
111 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
112
113 udelay(10);
114 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
115 for (i = 0; i < 4; i++)
116 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
117
118 udelay(10);
119 ddr_reset(cru, channel, 1, 0);
120 udelay(10);
121 ddr_reset(cru, channel, 0, 0);
122 udelay(10);
123 }
124
125 static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
126 u32 freq)
127 {
128 int i;
129 if (freq <= 250000000) {
130 if (freq <= 150000000)
131 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
132 else
133 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
134 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
135 for (i = 0; i < 4; i++)
136 setbits_le32(&publ->datx8[i].dxdllcr,
137 DXDLLCR_DLLDIS);
138
139 setbits_le32(&publ->pir, PIR_DLLBYP);
140 } else {
141 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
142 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
143 for (i = 0; i < 4; i++) {
144 clrbits_le32(&publ->datx8[i].dxdllcr,
145 DXDLLCR_DLLDIS);
146 }
147
148 clrbits_le32(&publ->pir, PIR_DLLBYP);
149 }
150 }
151
152 static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
153 {
154 writel(DFI_INIT_START, &pctl->dfistcfg0);
155 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
156 &pctl->dfistcfg1);
157 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
158 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
159 &pctl->dfilpcfg0);
160
161 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
162 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
163 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
164 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
165 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
166 writel(1, &pctl->dfitphyupdtype0);
167
168 /* cs0 and cs1 write odt enable */
169 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
170 &pctl->dfiodtcfg);
171 /* odt write length */
172 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
173 /* phyupd and ctrlupd disabled */
174 writel(0, &pctl->dfiupdcfg);
175 }
176
177 static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
178 {
179 uint val = 0;
180
181 if (enable) {
182 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
183 DDR0_16BIT_EN_SHIFT);
184 }
185 rk_clrsetreg(&grf->soc_con0,
186 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
187 val);
188 }
189
190 static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
191 bool ddr3_mode)
192 {
193 uint mask, val;
194
195 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
196 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
197 MSCH0_MAINDDR3_SHIFT);
198 rk_clrsetreg(&grf->soc_con0, mask, val);
199 }
200
201 static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
202 bool enable, bool enable_bst, bool enable_odt)
203 {
204 uint mask;
205 bool disable_bst = !enable_bst;
206
207 mask = channel ?
208 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
209 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
210 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
211 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
212 rk_clrsetreg(&grf->soc_con2, mask,
213 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
214 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
215 UPCTL0_BST_DIABLE_SHIFT) |
216 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
217 UPCTL0_LPDDR3_ODT_EN_SHIFT));
218 }
219
220 static void pctl_cfg(u32 channel, struct rk3288_ddr_pctl *pctl,
221 struct rk3288_sdram_params *sdram_params,
222 struct rk3288_grf *grf)
223 {
224 unsigned int burstlen;
225
226 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
227 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
228 sizeof(sdram_params->pctl_timing));
229 switch (sdram_params->base.dramtype) {
230 case LPDDR3:
231 writel(sdram_params->pctl_timing.tcl - 1,
232 &pctl->dfitrddataen);
233 writel(sdram_params->pctl_timing.tcwl,
234 &pctl->dfitphywrlat);
235 burstlen >>= 1;
236 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
237 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
238 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
239 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
240 &pctl->mcfg);
241 ddr_set_ddr3_mode(grf, channel, false);
242 ddr_set_enable(grf, channel, true);
243 ddr_set_en_bst_odt(grf, channel, true, false,
244 sdram_params->base.odt);
245 break;
246 case DDR3:
247 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
248 writel(sdram_params->pctl_timing.tcl - 3,
249 &pctl->dfitrddataen);
250 } else {
251 writel(sdram_params->pctl_timing.tcl - 2,
252 &pctl->dfitrddataen);
253 }
254 writel(sdram_params->pctl_timing.tcwl - 1,
255 &pctl->dfitphywrlat);
256 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
257 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
258 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
259 &pctl->mcfg);
260 ddr_set_ddr3_mode(grf, channel, true);
261 ddr_set_enable(grf, channel, true);
262
263 ddr_set_en_bst_odt(grf, channel, false, true, false);
264 break;
265 }
266
267 setbits_le32(&pctl->scfg, 1);
268 }
269
270 static void phy_cfg(const struct chan_info *chan, u32 channel,
271 struct rk3288_sdram_params *sdram_params)
272 {
273 struct rk3288_ddr_publ *publ = chan->publ;
274 struct rk3288_msch *msch = chan->msch;
275 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
276 u32 dinit2, tmp;
277 int i;
278
279 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
280 /* DDR PHY Timing */
281 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
282 sizeof(sdram_params->phy_timing));
283 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
284 writel(0x3f, &msch->readlatency);
285 writel(sdram_params->base.noc_activate, &msch->activate);
286 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
287 1 << BUSRDTORD_SHIFT, &msch->devtodev);
288 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
289 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
290 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
291 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
292 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
293 &publ->ptr[1]);
294 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
295 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
296 &publ->ptr[2]);
297
298 switch (sdram_params->base.dramtype) {
299 case LPDDR3:
300 clrsetbits_le32(&publ->pgcr, 0x1F,
301 0 << PGCR_DFTLMT_SHIFT |
302 0 << PGCR_DFTCMP_SHIFT |
303 1 << PGCR_DQSCFG_SHIFT |
304 0 << PGCR_ITMDMD_SHIFT);
305 /* DDRMODE select LPDDR3 */
306 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
307 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
308 clrsetbits_le32(&publ->dxccr,
309 DQSNRES_MASK << DQSNRES_SHIFT |
310 DQSRES_MASK << DQSRES_SHIFT,
311 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
312 tmp = readl(&publ->dtpr[1]);
313 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
314 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
315 clrsetbits_le32(&publ->dsgcr,
316 DQSGE_MASK << DQSGE_SHIFT |
317 DQSGX_MASK << DQSGX_SHIFT,
318 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
319 break;
320 case DDR3:
321 clrbits_le32(&publ->pgcr, 0x1f);
322 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
323 DDRMD_DDR3 << DDRMD_SHIFT);
324 break;
325 }
326 if (sdram_params->base.odt) {
327 /*dynamic RTT enable */
328 for (i = 0; i < 4; i++)
329 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
330 } else {
331 /*dynamic RTT disable */
332 for (i = 0; i < 4; i++)
333 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
334 }
335 }
336
337 static void phy_init(struct rk3288_ddr_publ *publ)
338 {
339 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
340 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
341 udelay(1);
342 while ((readl(&publ->pgsr) &
343 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
344 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
345 ;
346 }
347
348 static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
349 u32 cmd, u32 arg)
350 {
351 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
352 udelay(1);
353 while (readl(&pctl->mcmd) & START_CMD)
354 ;
355 }
356
357 static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
358 u32 rank, u32 cmd, u32 ma, u32 op)
359 {
360 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
361 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
362 }
363
364 static void memory_init(struct rk3288_ddr_publ *publ,
365 u32 dramtype)
366 {
367 setbits_le32(&publ->pir,
368 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
369 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
370 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
371 udelay(1);
372 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
373 != (PGSR_IDONE | PGSR_DLDONE))
374 ;
375 }
376
377 static void move_to_config_state(struct rk3288_ddr_publ *publ,
378 struct rk3288_ddr_pctl *pctl)
379 {
380 unsigned int state;
381
382 while (1) {
383 state = readl(&pctl->stat) & PCTL_STAT_MSK;
384
385 switch (state) {
386 case LOW_POWER:
387 writel(WAKEUP_STATE, &pctl->sctl);
388 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
389 != ACCESS)
390 ;
391 /* wait DLL lock */
392 while ((readl(&publ->pgsr) & PGSR_DLDONE)
393 != PGSR_DLDONE)
394 ;
395 /* if at low power state,need wakeup first,
396 * and then enter the config
397 * so here no break.
398 */
399 case ACCESS:
400 /* no break */
401 case INIT_MEM:
402 writel(CFG_STATE, &pctl->sctl);
403 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
404 ;
405 break;
406 case CONFIG:
407 return;
408 default:
409 break;
410 }
411 }
412 }
413
414 static void set_bandwidth_ratio(const struct chan_info *chan, u32 channel,
415 u32 n, struct rk3288_grf *grf)
416 {
417 struct rk3288_ddr_pctl *pctl = chan->pctl;
418 struct rk3288_ddr_publ *publ = chan->publ;
419 struct rk3288_msch *msch = chan->msch;
420
421 if (n == 1) {
422 setbits_le32(&pctl->ppcfg, 1);
423 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
424 setbits_le32(&msch->ddrtiming, 1 << 31);
425 /* Data Byte disable*/
426 clrbits_le32(&publ->datx8[2].dxgcr, 1);
427 clrbits_le32(&publ->datx8[3].dxgcr, 1);
428 /* disable DLL */
429 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
430 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
431 } else {
432 clrbits_le32(&pctl->ppcfg, 1);
433 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
434 clrbits_le32(&msch->ddrtiming, 1 << 31);
435 /* Data Byte enable*/
436 setbits_le32(&publ->datx8[2].dxgcr, 1);
437 setbits_le32(&publ->datx8[3].dxgcr, 1);
438
439 /* enable DLL */
440 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
441 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
442 /* reset DLL */
443 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
444 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
445 udelay(10);
446 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
447 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
448 }
449 setbits_le32(&pctl->dfistcfg0, 1 << 2);
450 }
451
452 static int data_training(const struct chan_info *chan, u32 channel,
453 struct rk3288_sdram_params *sdram_params)
454 {
455 unsigned int j;
456 int ret = 0;
457 u32 rank;
458 int i;
459 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
460 struct rk3288_ddr_publ *publ = chan->publ;
461 struct rk3288_ddr_pctl *pctl = chan->pctl;
462
463 /* disable auto refresh */
464 writel(0, &pctl->trefi);
465
466 if (sdram_params->base.dramtype != LPDDR3)
467 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
468 rank = sdram_params->ch[channel].rank | 1;
469 for (j = 0; j < ARRAY_SIZE(step); j++) {
470 /*
471 * trigger QSTRN and RVTRN
472 * clear DTDONE status
473 */
474 setbits_le32(&publ->pir, PIR_CLRSR);
475
476 /* trigger DTT */
477 setbits_le32(&publ->pir,
478 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
479 PIR_CLRSR);
480 udelay(1);
481 /* wait echo byte DTDONE */
482 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
483 != rank)
484 ;
485 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
486 != rank)
487 ;
488 if (!(readl(&pctl->ppcfg) & 1)) {
489 while ((readl(&publ->datx8[2].dxgsr[0])
490 & rank) != rank)
491 ;
492 while ((readl(&publ->datx8[3].dxgsr[0])
493 & rank) != rank)
494 ;
495 }
496 if (readl(&publ->pgsr) &
497 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
498 ret = -1;
499 break;
500 }
501 }
502 /* send some auto refresh to complement the lost while DTT */
503 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
504 send_command(pctl, rank, REF_CMD, 0);
505
506 if (sdram_params->base.dramtype != LPDDR3)
507 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
508
509 /* resume auto refresh */
510 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
511
512 return ret;
513 }
514
515 static void move_to_access_state(const struct chan_info *chan)
516 {
517 struct rk3288_ddr_publ *publ = chan->publ;
518 struct rk3288_ddr_pctl *pctl = chan->pctl;
519 unsigned int state;
520
521 while (1) {
522 state = readl(&pctl->stat) & PCTL_STAT_MSK;
523
524 switch (state) {
525 case LOW_POWER:
526 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
527 LP_TRIG_MASK) == 1)
528 return;
529
530 writel(WAKEUP_STATE, &pctl->sctl);
531 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
532 ;
533 /* wait DLL lock */
534 while ((readl(&publ->pgsr) & PGSR_DLDONE)
535 != PGSR_DLDONE)
536 ;
537 break;
538 case INIT_MEM:
539 writel(CFG_STATE, &pctl->sctl);
540 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
541 ;
542 case CONFIG:
543 writel(GO_STATE, &pctl->sctl);
544 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
545 ;
546 break;
547 case ACCESS:
548 return;
549 default:
550 break;
551 }
552 }
553 }
554
555 static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
556 struct rk3288_sdram_params *sdram_params)
557 {
558 struct rk3288_ddr_publ *publ = chan->publ;
559
560 if (sdram_params->ch[chnum].bk == 3)
561 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
562 1 << PDQ_SHIFT);
563 else
564 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
565
566 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
567 }
568
569 static void dram_all_config(const struct dram_info *dram,
570 struct rk3288_sdram_params *sdram_params)
571 {
572 unsigned int chan;
573 u32 sys_reg = 0;
574
575 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
576 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
577 for (chan = 0; chan < sdram_params->num_channels; chan++) {
578 const struct rk3288_sdram_channel *info =
579 &sdram_params->ch[chan];
580
581 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
582 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
583 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
584 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
585 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
586 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
587 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
588 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
589 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
590
591 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
592 }
593 writel(sys_reg, &dram->pmu->sys_reg[2]);
594 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
595 }
596 const int ddrconf_table[] = {
597 /* row col,bw */
598 0,
599 ((1 << 4) | 1),
600 ((2 << 4) | 1),
601 ((3 << 4) | 1),
602 ((4 << 4) | 1),
603 ((1 << 4) | 2),
604 ((2 << 4) | 2),
605 ((3 << 4) | 2),
606 ((1 << 4) | 0),
607 ((2 << 4) | 0),
608 ((3 << 4) | 0),
609 0,
610 0,
611 0,
612 0,
613 ((4 << 4) | 2),
614 };
615
616 static int sdram_rank_bw_detect(struct dram_info *dram, int channel,
617 struct rk3288_sdram_params *sdram_params)
618 {
619 int reg;
620 int need_trainig = 0;
621 const struct chan_info *chan = &dram->chan[channel];
622 struct rk3288_ddr_publ *publ = chan->publ;
623
624 if (-1 == data_training(chan, channel, sdram_params)) {
625 reg = readl(&publ->datx8[0].dxgsr[0]);
626 /* Check the result for rank 0 */
627 if ((channel == 0) && (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
628 debug("data training fail!\n");
629 return -EIO;
630 } else if ((channel == 1) &&
631 (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
632 sdram_params->num_channels = 1;
633 }
634
635 /* Check the result for rank 1 */
636 if (reg & DQS_GATE_TRAINING_ERROR_RANK1) {
637 sdram_params->ch[channel].rank = 1;
638 clrsetbits_le32(&publ->pgcr, 0xF << 18,
639 sdram_params->ch[channel].rank << 18);
640 need_trainig = 1;
641 }
642 reg = readl(&publ->datx8[2].dxgsr[0]);
643 if (reg & (1 << 4)) {
644 sdram_params->ch[channel].bw = 1;
645 set_bandwidth_ratio(chan, channel,
646 sdram_params->ch[channel].bw,
647 dram->grf);
648 need_trainig = 1;
649 }
650 }
651 /* Assume the Die bit width are the same with the chip bit width */
652 sdram_params->ch[channel].dbw = sdram_params->ch[channel].bw;
653
654 if (need_trainig &&
655 (-1 == data_training(chan, channel, sdram_params))) {
656 if (sdram_params->base.dramtype == LPDDR3) {
657 ddr_phy_ctl_reset(dram->cru, channel, 1);
658 udelay(10);
659 ddr_phy_ctl_reset(dram->cru, channel, 0);
660 udelay(10);
661 }
662 debug("2nd data training failed!");
663 return -EIO;
664 }
665
666 return 0;
667 }
668
669 static int sdram_col_row_detect(struct dram_info *dram, int channel,
670 struct rk3288_sdram_params *sdram_params)
671 {
672 int row, col;
673 unsigned int addr;
674 const struct chan_info *chan = &dram->chan[channel];
675 struct rk3288_ddr_pctl *pctl = chan->pctl;
676 struct rk3288_ddr_publ *publ = chan->publ;
677 int ret = 0;
678
679 /* Detect col */
680 for (col = 11; col >= 9; col--) {
681 writel(0, CONFIG_SYS_SDRAM_BASE);
682 addr = CONFIG_SYS_SDRAM_BASE +
683 (1 << (col + sdram_params->ch[channel].bw - 1));
684 writel(TEST_PATTEN, addr);
685 if ((readl(addr) == TEST_PATTEN) &&
686 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
687 break;
688 }
689 if (col == 8) {
690 printf("Col detect error\n");
691 ret = -EINVAL;
692 goto out;
693 } else {
694 sdram_params->ch[channel].col = col;
695 }
696
697 move_to_config_state(publ, pctl);
698 writel(4, &chan->msch->ddrconf);
699 move_to_access_state(chan);
700 /* Detect row*/
701 for (row = 16; row >= 12; row--) {
702 writel(0, CONFIG_SYS_SDRAM_BASE);
703 addr = CONFIG_SYS_SDRAM_BASE + (1 << (row + 15 - 1));
704 writel(TEST_PATTEN, addr);
705 if ((readl(addr) == TEST_PATTEN) &&
706 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
707 break;
708 }
709 if (row == 11) {
710 printf("Row detect error\n");
711 ret = -EINVAL;
712 } else {
713 sdram_params->ch[channel].cs1_row = row;
714 sdram_params->ch[channel].row_3_4 = 0;
715 debug("chn %d col %d, row %d\n", channel, col, row);
716 sdram_params->ch[channel].cs0_row = row;
717 }
718
719 out:
720 return ret;
721 }
722
723 static int sdram_get_niu_config(struct rk3288_sdram_params *sdram_params)
724 {
725 int i, tmp, size, ret = 0;
726
727 tmp = sdram_params->ch[0].col - 9;
728 tmp -= (sdram_params->ch[0].bw == 2) ? 0 : 1;
729 tmp |= ((sdram_params->ch[0].cs0_row - 12) << 4);
730 size = sizeof(ddrconf_table)/sizeof(ddrconf_table[0]);
731 for (i = 0; i < size; i++)
732 if (tmp == ddrconf_table[i])
733 break;
734 if (i >= size) {
735 printf("niu config not found\n");
736 ret = -EINVAL;
737 } else {
738 sdram_params->base.ddrconfig = i;
739 }
740
741 return ret;
742 }
743
744 static int sdram_get_stride(struct rk3288_sdram_params *sdram_params)
745 {
746 int stride = -1;
747 int ret = 0;
748 long cap = sdram_params->num_channels * (1u <<
749 (sdram_params->ch[0].cs0_row +
750 sdram_params->ch[0].col +
751 (sdram_params->ch[0].rank - 1) +
752 sdram_params->ch[0].bw +
753 3 - 20));
754
755 switch (cap) {
756 case 512:
757 stride = 0;
758 break;
759 case 1024:
760 stride = 5;
761 break;
762 case 2048:
763 stride = 9;
764 break;
765 case 4096:
766 stride = 0xd;
767 break;
768 default:
769 stride = -1;
770 printf("could not find correct stride, cap error!\n");
771 ret = -EINVAL;
772 break;
773 }
774 sdram_params->base.stride = stride;
775
776 return ret;
777 }
778
779 static int sdram_init(struct dram_info *dram,
780 struct rk3288_sdram_params *sdram_params)
781 {
782 int channel;
783 int zqcr;
784 int ret;
785
786 debug("%s start\n", __func__);
787 if ((sdram_params->base.dramtype == DDR3 &&
788 sdram_params->base.ddr_freq > 800000000) ||
789 (sdram_params->base.dramtype == LPDDR3 &&
790 sdram_params->base.ddr_freq > 533000000)) {
791 debug("SDRAM frequency is too high!");
792 return -E2BIG;
793 }
794
795 debug("ddr clk dpll\n");
796 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
797 debug("ret=%d\n", ret);
798 if (ret) {
799 debug("Could not set DDR clock\n");
800 return ret;
801 }
802
803 for (channel = 0; channel < 2; channel++) {
804 const struct chan_info *chan = &dram->chan[channel];
805 struct rk3288_ddr_pctl *pctl = chan->pctl;
806 struct rk3288_ddr_publ *publ = chan->publ;
807
808 /* map all the 4GB space to the current channel */
809 if (channel)
810 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x17);
811 else
812 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x1a);
813 phy_pctrl_reset(dram->cru, publ, channel);
814 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
815
816 dfi_cfg(pctl, sdram_params->base.dramtype);
817
818 pctl_cfg(channel, pctl, sdram_params, dram->grf);
819
820 phy_cfg(chan, channel, sdram_params);
821
822 phy_init(publ);
823
824 writel(POWER_UP_START, &pctl->powctl);
825 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
826 ;
827
828 memory_init(publ, sdram_params->base.dramtype);
829 move_to_config_state(publ, pctl);
830
831 if (sdram_params->base.dramtype == LPDDR3) {
832 send_command(pctl, 3, DESELECT_CMD, 0);
833 udelay(1);
834 send_command(pctl, 3, PREA_CMD, 0);
835 udelay(1);
836 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
837 udelay(1);
838 send_command_op(pctl, 3, MRS_CMD, 1,
839 sdram_params->phy_timing.mr[1]);
840 udelay(1);
841 send_command_op(pctl, 3, MRS_CMD, 2,
842 sdram_params->phy_timing.mr[2]);
843 udelay(1);
844 send_command_op(pctl, 3, MRS_CMD, 3,
845 sdram_params->phy_timing.mr[3]);
846 udelay(1);
847 }
848
849 /* Using 32bit bus width for detect */
850 sdram_params->ch[channel].bw = 2;
851 set_bandwidth_ratio(chan, channel,
852 sdram_params->ch[channel].bw, dram->grf);
853 /*
854 * set cs, using n=3 for detect
855 * CS0, n=1
856 * CS1, n=2
857 * CS0 & CS1, n = 3
858 */
859 sdram_params->ch[channel].rank = 2,
860 clrsetbits_le32(&publ->pgcr, 0xF << 18,
861 (sdram_params->ch[channel].rank | 1) << 18);
862
863 /* DS=40ohm,ODT=155ohm */
864 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
865 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
866 0x19 << PD_OUTPUT_SHIFT;
867 writel(zqcr, &publ->zq1cr[0]);
868 writel(zqcr, &publ->zq0cr[0]);
869
870 if (sdram_params->base.dramtype == LPDDR3) {
871 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
872 udelay(10);
873 send_command_op(pctl,
874 sdram_params->ch[channel].rank | 1,
875 MRS_CMD, 11,
876 sdram_params->base.odt ? 3 : 0);
877 if (channel == 0) {
878 writel(0, &pctl->mrrcfg0);
879 send_command_op(pctl, 1, MRR_CMD, 8, 0);
880 /* S8 */
881 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
882 debug("failed!");
883 return -EREMOTEIO;
884 }
885 }
886 }
887
888 /* Detect the rank and bit-width with data-training */
889 sdram_rank_bw_detect(dram, channel, sdram_params);
890
891 if (sdram_params->base.dramtype == LPDDR3) {
892 u32 i;
893 writel(0, &pctl->mrrcfg0);
894 for (i = 0; i < 17; i++)
895 send_command_op(pctl, 1, MRR_CMD, i, 0);
896 }
897 writel(15, &chan->msch->ddrconf);
898 move_to_access_state(chan);
899 /* DDR3 and LPDDR3 are always 8 bank, no need detect */
900 sdram_params->ch[channel].bk = 3;
901 /* Detect Col and Row number*/
902 ret = sdram_col_row_detect(dram, channel, sdram_params);
903 if (ret)
904 goto error;
905 }
906 /* Find NIU DDR configuration */
907 ret = sdram_get_niu_config(sdram_params);
908 if (ret)
909 goto error;
910 /* Find stride setting */
911 ret = sdram_get_stride(sdram_params);
912 if (ret)
913 goto error;
914
915 dram_all_config(dram, sdram_params);
916 debug("%s done\n", __func__);
917
918 return 0;
919 error:
920 printf("DRAM init failed!\n");
921 hang();
922 }
923 #endif /* CONFIG_SPL_BUILD */
924
925 size_t sdram_size_mb(struct rk3288_pmu *pmu)
926 {
927 u32 rank, col, bk, cs0_row, cs1_row, bw, row_3_4;
928 size_t chipsize_mb = 0;
929 size_t size_mb = 0;
930 u32 ch;
931 u32 sys_reg = readl(&pmu->sys_reg[2]);
932 u32 chans;
933
934 chans = 1 + ((sys_reg >> SYS_REG_NUM_CH_SHIFT) & SYS_REG_NUM_CH_MASK);
935
936 for (ch = 0; ch < chans; ch++) {
937 rank = 1 + (sys_reg >> SYS_REG_RANK_SHIFT(ch) &
938 SYS_REG_RANK_MASK);
939 col = 9 + (sys_reg >> SYS_REG_COL_SHIFT(ch) & SYS_REG_COL_MASK);
940 bk = 3 - ((sys_reg >> SYS_REG_BK_SHIFT(ch)) & SYS_REG_BK_MASK);
941 cs0_row = 13 + (sys_reg >> SYS_REG_CS0_ROW_SHIFT(ch) &
942 SYS_REG_CS0_ROW_MASK);
943 cs1_row = 13 + (sys_reg >> SYS_REG_CS1_ROW_SHIFT(ch) &
944 SYS_REG_CS1_ROW_MASK);
945 bw = (2 >> ((sys_reg >> SYS_REG_BW_SHIFT(ch)) &
946 SYS_REG_BW_MASK));
947 row_3_4 = sys_reg >> SYS_REG_ROW_3_4_SHIFT(ch) &
948 SYS_REG_ROW_3_4_MASK;
949 chipsize_mb = (1 << (cs0_row + col + bk + bw - 20));
950
951 if (rank > 1)
952 chipsize_mb += chipsize_mb >>
953 (cs0_row - cs1_row);
954 if (row_3_4)
955 chipsize_mb = chipsize_mb * 3 / 4;
956 size_mb += chipsize_mb;
957 }
958
959 /*
960 * we use the 0x00000000~0xfdffffff space since 0xff000000~0xffffffff
961 * is SoC register space (i.e. reserved), and 0xfe000000~0xfeffffff is
962 * inaccessible for some IP controller.
963 */
964 size_mb = min(size_mb, 0xfe000000 >> 20);
965
966 return size_mb;
967 }
968
969 #ifdef CONFIG_SPL_BUILD
970 # ifdef CONFIG_ROCKCHIP_FAST_SPL
971 static int veyron_init(struct dram_info *priv)
972 {
973 struct udevice *pmic;
974 int ret;
975
976 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
977 if (ret)
978 return ret;
979
980 /* Slowly raise to max CPU voltage to prevent overshoot */
981 ret = rk808_spl_configure_buck(pmic, 1, 1200000);
982 if (ret)
983 return ret;
984 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
985 ret = rk808_spl_configure_buck(pmic, 1, 1400000);
986 if (ret)
987 return ret;
988 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
989
990 rk3288_clk_configure_cpu(priv->cru, priv->grf);
991
992 return 0;
993 }
994 # endif
995
996 static int setup_sdram(struct udevice *dev)
997 {
998 struct dram_info *priv = dev_get_priv(dev);
999 struct rk3288_sdram_params *params = dev_get_platdata(dev);
1000
1001 # ifdef CONFIG_ROCKCHIP_FAST_SPL
1002 if (priv->is_veyron) {
1003 int ret;
1004
1005 ret = veyron_init(priv);
1006 if (ret)
1007 return ret;
1008 }
1009 # endif
1010
1011 return sdram_init(priv, params);
1012 }
1013
1014 static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
1015 {
1016 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
1017 struct rk3288_sdram_params *params = dev_get_platdata(dev);
1018 const void *blob = gd->fdt_blob;
1019 int node = dev->of_offset;
1020 int ret;
1021
1022 /* Rk3288 supports dual-channel, set default channel num to 2 */
1023 params->num_channels = 2;
1024 ret = fdtdec_get_int_array(blob, node, "rockchip,pctl-timing",
1025 (u32 *)&params->pctl_timing,
1026 sizeof(params->pctl_timing) / sizeof(u32));
1027 if (ret) {
1028 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
1029 return -EINVAL;
1030 }
1031 ret = fdtdec_get_int_array(blob, node, "rockchip,phy-timing",
1032 (u32 *)&params->phy_timing,
1033 sizeof(params->phy_timing) / sizeof(u32));
1034 if (ret) {
1035 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
1036 return -EINVAL;
1037 }
1038 ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
1039 (u32 *)&params->base,
1040 sizeof(params->base) / sizeof(u32));
1041 if (ret) {
1042 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
1043 return -EINVAL;
1044 }
1045 #ifdef CONFIG_ROCKCHIP_FAST_SPL
1046 struct dram_info *priv = dev_get_priv(dev);
1047
1048 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
1049 #endif
1050 ret = regmap_init_mem(dev, &params->map);
1051 if (ret)
1052 return ret;
1053 #endif
1054
1055 return 0;
1056 }
1057 #endif /* CONFIG_SPL_BUILD */
1058
1059 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1060 static int conv_of_platdata(struct udevice *dev)
1061 {
1062 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1063 struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
1064 int ret;
1065
1066 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
1067 sizeof(plat->pctl_timing));
1068 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
1069 sizeof(plat->phy_timing));
1070 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
1071 /* Rk3288 supports dual-channel, set default channel num to 2 */
1072 plat->num_channels = 2;
1073 ret = regmap_init_mem_platdata(dev, of_plat->reg,
1074 ARRAY_SIZE(of_plat->reg) / 2,
1075 &plat->map);
1076 if (ret)
1077 return ret;
1078
1079 return 0;
1080 }
1081 #endif
1082
1083 static int rk3288_dmc_probe(struct udevice *dev)
1084 {
1085 #ifdef CONFIG_SPL_BUILD
1086 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
1087 #endif
1088 struct dram_info *priv = dev_get_priv(dev);
1089 struct regmap *map;
1090 int ret;
1091 struct udevice *dev_clk;
1092
1093 #if CONFIG_IS_ENABLED(OF_PLATDATA)
1094 ret = conv_of_platdata(dev);
1095 if (ret)
1096 return ret;
1097 #endif
1098 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
1099 if (IS_ERR(map))
1100 return PTR_ERR(map);
1101 priv->chan[0].msch = regmap_get_range(map, 0);
1102 priv->chan[1].msch = (struct rk3288_msch *)
1103 (regmap_get_range(map, 0) + 0x80);
1104
1105 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1106 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
1107 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
1108
1109 #ifdef CONFIG_SPL_BUILD
1110 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1111 priv->chan[0].publ = regmap_get_range(plat->map, 1);
1112 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
1113 priv->chan[1].publ = regmap_get_range(plat->map, 3);
1114 #endif
1115 ret = rockchip_get_clk(&dev_clk);
1116 if (ret)
1117 return ret;
1118 priv->ddr_clk.id = CLK_DDR;
1119 ret = clk_request(dev_clk, &priv->ddr_clk);
1120 if (ret)
1121 return ret;
1122
1123 priv->cru = rockchip_get_cru();
1124 if (IS_ERR(priv->cru))
1125 return PTR_ERR(priv->cru);
1126 #ifdef CONFIG_SPL_BUILD
1127 ret = setup_sdram(dev);
1128 if (ret)
1129 return ret;
1130 #endif
1131 priv->info.base = 0;
1132 priv->info.size = sdram_size_mb(priv->pmu) << 20;
1133
1134 return 0;
1135 }
1136
1137 static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
1138 {
1139 struct dram_info *priv = dev_get_priv(dev);
1140
1141 *info = priv->info;
1142
1143 return 0;
1144 }
1145
1146 static struct ram_ops rk3288_dmc_ops = {
1147 .get_info = rk3288_dmc_get_info,
1148 };
1149
1150 static const struct udevice_id rk3288_dmc_ids[] = {
1151 { .compatible = "rockchip,rk3288-dmc" },
1152 { }
1153 };
1154
1155 U_BOOT_DRIVER(dmc_rk3288) = {
1156 .name = "rockchip_rk3288_dmc",
1157 .id = UCLASS_RAM,
1158 .of_match = rk3288_dmc_ids,
1159 .ops = &rk3288_dmc_ops,
1160 #ifdef CONFIG_SPL_BUILD
1161 .ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
1162 #endif
1163 .probe = rk3288_dmc_probe,
1164 .priv_auto_alloc_size = sizeof(struct dram_info),
1165 #ifdef CONFIG_SPL_BUILD
1166 .platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),
1167 #endif
1168 };