]>
git.ipfire.org Git - people/ms/u-boot.git/blob - arch/arm/cpu/armv7/mx6/ddr.c
2 * Copyright (C) 2014 Gateworks Corporation
3 * Author: Tim Harvey <tharvey@gateworks.com>
5 * SPDX-License-Identifier: GPL-2.0+
9 #include <linux/types.h>
10 #include <asm/arch/clock.h>
11 #include <asm/arch/mx6-ddr.h>
12 #include <asm/arch/sys_proto.h>
14 #include <asm/types.h>
17 #if defined(CONFIG_MX6QDL) || defined(CONFIG_MX6Q) || defined(CONFIG_MX6D)
19 static void reset_read_data_fifos(void)
21 struct mmdc_p_regs
*mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
23 /* Reset data FIFOs twice. */
24 setbits_le32(&mmdc0
->mpdgctrl0
, 1 << 31);
25 wait_for_bit("MMDC", &mmdc0
->mpdgctrl0
, 1 << 31, 0, 100, 0);
27 setbits_le32(&mmdc0
->mpdgctrl0
, 1 << 31);
28 wait_for_bit("MMDC", &mmdc0
->mpdgctrl0
, 1 << 31, 0, 100, 0);
31 static void precharge_all(const bool cs0_enable
, const bool cs1_enable
)
33 struct mmdc_p_regs
*mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
36 * Issue the Precharge-All command to the DDR device for both
37 * chip selects. Note, CON_REQ bit should also remain set. If
38 * only using one chip select, then precharge only the desired
41 if (cs0_enable
) { /* CS0 */
42 writel(0x04008050, &mmdc0
->mdscr
);
43 wait_for_bit("MMDC", &mmdc0
->mdscr
, 1 << 14, 1, 100, 0);
46 if (cs1_enable
) { /* CS1 */
47 writel(0x04008058, &mmdc0
->mdscr
);
48 wait_for_bit("MMDC", &mmdc0
->mdscr
, 1 << 14, 1, 100, 0);
52 static void force_delay_measurement(int bus_size
)
54 struct mmdc_p_regs
*mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
55 struct mmdc_p_regs
*mmdc1
= (struct mmdc_p_regs
*)MMDC_P1_BASE_ADDR
;
57 writel(0x800, &mmdc0
->mpmur0
);
59 writel(0x800, &mmdc1
->mpmur0
);
62 static void modify_dg_result(u32
*reg_st0
, u32
*reg_st1
, u32
*reg_ctrl
)
64 u32 dg_tmp_val
, dg_dl_abs_offset
, dg_hc_del
, val_ctrl
;
67 * DQS gating absolute offset should be modified from reflecting
68 * (HW_DG_LOWx + HW_DG_UPx)/2 to reflecting (HW_DG_UPx - 0x80)
71 val_ctrl
= readl(reg_ctrl
);
72 val_ctrl
&= 0xf0000000;
74 dg_tmp_val
= ((readl(reg_st0
) & 0x07ff0000) >> 16) - 0xc0;
75 dg_dl_abs_offset
= dg_tmp_val
& 0x7f;
76 dg_hc_del
= (dg_tmp_val
& 0x780) << 1;
78 val_ctrl
|= dg_dl_abs_offset
+ dg_hc_del
;
80 dg_tmp_val
= ((readl(reg_st1
) & 0x07ff0000) >> 16) - 0xc0;
81 dg_dl_abs_offset
= dg_tmp_val
& 0x7f;
82 dg_hc_del
= (dg_tmp_val
& 0x780) << 1;
84 val_ctrl
|= (dg_dl_abs_offset
+ dg_hc_del
) << 16;
86 writel(val_ctrl
, reg_ctrl
);
89 int mmdc_do_write_level_calibration(void)
91 struct mmdc_p_regs
*mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
92 struct mmdc_p_regs
*mmdc1
= (struct mmdc_p_regs
*)MMDC_P1_BASE_ADDR
;
93 u32 esdmisc_val
, zq_val
;
99 * Stash old values in case calibration fails,
100 * we need to restore them
102 ldectrl
[0] = readl(&mmdc0
->mpwldectrl0
);
103 ldectrl
[1] = readl(&mmdc0
->mpwldectrl1
);
104 ldectrl
[2] = readl(&mmdc1
->mpwldectrl0
);
105 ldectrl
[3] = readl(&mmdc1
->mpwldectrl1
);
107 /* disable DDR logic power down timer */
108 clrbits_le32(&mmdc0
->mdpdc
, 0xff00);
110 /* disable Adopt power down timer */
111 setbits_le32(&mmdc0
->mapsr
, 0x1);
113 debug("Starting write leveling calibration.\n");
116 * 2. disable auto refresh and ZQ calibration
117 * before proceeding with Write Leveling calibration
119 esdmisc_val
= readl(&mmdc0
->mdref
);
120 writel(0x0000C000, &mmdc0
->mdref
);
121 zq_val
= readl(&mmdc0
->mpzqhwctrl
);
122 writel(zq_val
& ~0x3, &mmdc0
->mpzqhwctrl
);
124 /* 3. increase walat and ralat to maximum */
125 setbits_le32(&mmdc0
->mdmisc
,
126 (1 << 6) | (1 << 7) | (1 << 8) | (1 << 16) | (1 << 17));
127 setbits_le32(&mmdc1
->mdmisc
,
128 (1 << 6) | (1 << 7) | (1 << 8) | (1 << 16) | (1 << 17));
130 * 4 & 5. Configure the external DDR device to enter write-leveling
131 * mode through Load Mode Register command.
133 * Bits[31:16] MR1 value (0x0080 write leveling enable)
134 * Bit[9] set WL_EN to enable MMDC DQS output
135 * Bits[6:4] set CMD bits for Load Mode Register programming
136 * Bits[2:0] set CMD_BA to 0x1 for DDR MR1 programming
138 writel(0x00808231, &mmdc0
->mdscr
);
140 /* 6. Activate automatic calibration by setting MPWLGCR[HW_WL_EN] */
141 writel(0x00000001, &mmdc0
->mpwlgcr
);
144 * 7. Upon completion of this process the MMDC de-asserts
145 * the MPWLGCR[HW_WL_EN]
147 wait_for_bit("MMDC", &mmdc0
->mpwlgcr
, 1 << 0, 0, 100, 0);
150 * 8. check for any errors: check both PHYs for x64 configuration,
151 * if x32, check only PHY0
153 if (readl(&mmdc0
->mpwlgcr
) & 0x00000F00)
155 if (readl(&mmdc1
->mpwlgcr
) & 0x00000F00)
158 debug("Ending write leveling calibration. Error mask: 0x%x\n", errors
);
160 /* check to see if cal failed */
161 if ((readl(&mmdc0
->mpwldectrl0
) == 0x001F001F) &&
162 (readl(&mmdc0
->mpwldectrl1
) == 0x001F001F) &&
163 (readl(&mmdc1
->mpwldectrl0
) == 0x001F001F) &&
164 (readl(&mmdc1
->mpwldectrl1
) == 0x001F001F)) {
165 debug("Cal seems to have soft-failed due to memory not supporting write leveling on all channels. Restoring original write leveling values.\n");
166 writel(ldectrl
[0], &mmdc0
->mpwldectrl0
);
167 writel(ldectrl
[1], &mmdc0
->mpwldectrl1
);
168 writel(ldectrl
[2], &mmdc1
->mpwldectrl0
);
169 writel(ldectrl
[3], &mmdc1
->mpwldectrl1
);
174 * User should issue MRS command to exit write leveling mode
175 * through Load Mode Register command
177 * Bits[31:16] MR1 value "ddr_mr1" value from initialization
178 * Bit[9] clear WL_EN to disable MMDC DQS output
179 * Bits[6:4] set CMD bits for Load Mode Register programming
180 * Bits[2:0] set CMD_BA to 0x1 for DDR MR1 programming
182 writel((ddr_mr1
<< 16) + 0x8031, &mmdc0
->mdscr
);
184 /* re-enable auto refresh and zq cal */
185 writel(esdmisc_val
, &mmdc0
->mdref
);
186 writel(zq_val
, &mmdc0
->mpzqhwctrl
);
188 debug("\tMMDC_MPWLDECTRL0 after write level cal: 0x%08X\n",
189 readl(&mmdc0
->mpwldectrl0
));
190 debug("\tMMDC_MPWLDECTRL1 after write level cal: 0x%08X\n",
191 readl(&mmdc0
->mpwldectrl1
));
192 debug("\tMMDC_MPWLDECTRL0 after write level cal: 0x%08X\n",
193 readl(&mmdc1
->mpwldectrl0
));
194 debug("\tMMDC_MPWLDECTRL1 after write level cal: 0x%08X\n",
195 readl(&mmdc1
->mpwldectrl1
));
197 /* We must force a readback of these values, to get them to stick */
198 readl(&mmdc0
->mpwldectrl0
);
199 readl(&mmdc0
->mpwldectrl1
);
200 readl(&mmdc1
->mpwldectrl0
);
201 readl(&mmdc1
->mpwldectrl1
);
203 /* enable DDR logic power down timer: */
204 setbits_le32(&mmdc0
->mdpdc
, 0x00005500);
206 /* Enable Adopt power down timer: */
207 clrbits_le32(&mmdc0
->mapsr
, 0x1);
210 writel(0, &mmdc0
->mdscr
);
215 int mmdc_do_dqs_calibration(void)
217 struct mmdc_p_regs
*mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
218 struct mmdc_p_regs
*mmdc1
= (struct mmdc_p_regs
*)MMDC_P1_BASE_ADDR
;
219 struct mx6dq_iomux_ddr_regs
*mx6_ddr_iomux
=
220 (struct mx6dq_iomux_ddr_regs
*)MX6DQ_IOM_DDR_BASE
;
223 bool cs0_enable_initial
;
224 bool cs1_enable_initial
;
228 u32 pddword
= 0x00ffff00; /* best so far, place into MPPDCMPR1 */
230 u32 initdelay
= 0x40404040;
232 /* check to see which chip selects are enabled */
233 cs0_enable_initial
= readl(&mmdc0
->mdctl
) & 0x80000000;
234 cs1_enable_initial
= readl(&mmdc0
->mdctl
) & 0x40000000;
236 /* disable DDR logic power down timer: */
237 clrbits_le32(&mmdc0
->mdpdc
, 0xff00);
239 /* disable Adopt power down timer: */
240 setbits_le32(&mmdc0
->mapsr
, 0x1);
242 /* set DQS pull ups */
243 setbits_le32(&mx6_ddr_iomux
->dram_sdqs0
, 0x7000);
244 setbits_le32(&mx6_ddr_iomux
->dram_sdqs1
, 0x7000);
245 setbits_le32(&mx6_ddr_iomux
->dram_sdqs2
, 0x7000);
246 setbits_le32(&mx6_ddr_iomux
->dram_sdqs3
, 0x7000);
247 setbits_le32(&mx6_ddr_iomux
->dram_sdqs4
, 0x7000);
248 setbits_le32(&mx6_ddr_iomux
->dram_sdqs5
, 0x7000);
249 setbits_le32(&mx6_ddr_iomux
->dram_sdqs6
, 0x7000);
250 setbits_le32(&mx6_ddr_iomux
->dram_sdqs7
, 0x7000);
252 /* Save old RALAT and WALAT values */
253 esdmisc_val
= readl(&mmdc0
->mdmisc
);
255 setbits_le32(&mmdc0
->mdmisc
,
256 (1 << 6) | (1 << 7) | (1 << 8) | (1 << 16) | (1 << 17));
258 /* Disable auto refresh before proceeding with calibration */
259 temp_ref
= readl(&mmdc0
->mdref
);
260 writel(0x0000c000, &mmdc0
->mdref
);
263 * Per the ref manual, issue one refresh cycle MDSCR[CMD]= 0x2,
264 * this also sets the CON_REQ bit.
266 if (cs0_enable_initial
)
267 writel(0x00008020, &mmdc0
->mdscr
);
268 if (cs1_enable_initial
)
269 writel(0x00008028, &mmdc0
->mdscr
);
271 /* poll to make sure the con_ack bit was asserted */
272 wait_for_bit("MMDC", &mmdc0
->mdscr
, 1 << 14, 1, 100, 0);
275 * Check MDMISC register CALIB_PER_CS to see which CS calibration
276 * is targeted to (under normal cases, it should be cleared
277 * as this is the default value, indicating calibration is directed
279 * Disable the other chip select not being target for calibration
280 * to avoid any potential issues. This will get re-enabled at end
283 if ((readl(&mmdc0
->mdmisc
) & 0x00100000) == 0)
284 clrbits_le32(&mmdc0
->mdctl
, 1 << 30); /* clear SDE_1 */
286 clrbits_le32(&mmdc0
->mdctl
, 1 << 31); /* clear SDE_0 */
289 * Check to see which chip selects are now enabled for
290 * the remainder of the calibration.
292 cs0_enable
= readl(&mmdc0
->mdctl
) & 0x80000000;
293 cs1_enable
= readl(&mmdc0
->mdctl
) & 0x40000000;
295 /* Check to see what the data bus size is */
296 bus_size
= (readl(&mmdc0
->mdctl
) & 0x30000) >> 16;
297 debug("Data bus size: %d (%d bits)\n", bus_size
, 1 << (bus_size
+ 4));
299 precharge_all(cs0_enable
, cs1_enable
);
301 /* Write the pre-defined value into MPPDCMPR1 */
302 writel(pddword
, &mmdc0
->mppdcmpr1
);
305 * Issue a write access to the external DDR device by setting
306 * the bit SW_DUMMY_WR (bit 0) in the MPSWDAR0 and then poll
307 * this bit until it clears to indicate completion of the write access.
309 setbits_le32(&mmdc0
->mpswdar0
, 1);
310 wait_for_bit("MMDC", &mmdc0
->mpswdar0
, 1 << 0, 0, 100, 0);
312 /* Set the RD_DL_ABS# bits to their default values
313 * (will be calibrated later in the read delay-line calibration).
314 * Both PHYs for x64 configuration, if x32, do only PHY0.
316 writel(initdelay
, &mmdc0
->mprddlctl
);
318 writel(initdelay
, &mmdc1
->mprddlctl
);
320 /* Force a measurment, for previous delay setup to take effect. */
321 force_delay_measurement(bus_size
);
324 * ***************************
325 * Read DQS Gating calibration
326 * ***************************
328 debug("Starting Read DQS Gating calibration.\n");
331 * Reset the read data FIFOs (two resets); only need to issue reset
332 * to PHY0 since in x64 mode, the reset will also go to PHY1.
334 reset_read_data_fifos();
337 * Start the automatic read DQS gating calibration process by
338 * asserting MPDGCTRL0[HW_DG_EN] and MPDGCTRL0[DG_CMP_CYC]
339 * and then poll MPDGCTRL0[HW_DG_EN]] until this bit clears
340 * to indicate completion.
341 * Also, ensure that MPDGCTRL0[HW_DG_ERR] is clear to indicate
342 * no errors were seen during calibration.
346 * Set bit 30: chooses option to wait 32 cycles instead of
347 * 16 before comparing read data.
349 setbits_le32(&mmdc0
->mpdgctrl0
, 1 << 30);
351 /* Set bit 28 to start automatic read DQS gating calibration */
352 setbits_le32(&mmdc0
->mpdgctrl0
, 5 << 28);
354 /* Poll for completion. MPDGCTRL0[HW_DG_EN] should be 0 */
355 wait_for_bit("MMDC", &mmdc0
->mpdgctrl0
, 1 << 28, 0, 100, 0);
358 * Check to see if any errors were encountered during calibration
359 * (check MPDGCTRL0[HW_DG_ERR]).
360 * Check both PHYs for x64 configuration, if x32, check only PHY0.
362 if (readl(&mmdc0
->mpdgctrl0
) & 0x00001000)
365 if ((bus_size
== 0x2) && (readl(&mmdc1
->mpdgctrl0
) & 0x00001000))
369 * DQS gating absolute offset should be modified from
370 * reflecting (HW_DG_LOWx + HW_DG_UPx)/2 to
371 * reflecting (HW_DG_UPx - 0x80)
373 modify_dg_result(&mmdc0
->mpdghwst0
, &mmdc0
->mpdghwst1
,
375 modify_dg_result(&mmdc0
->mpdghwst2
, &mmdc0
->mpdghwst3
,
377 if (bus_size
== 0x2) {
378 modify_dg_result(&mmdc1
->mpdghwst0
, &mmdc1
->mpdghwst1
,
380 modify_dg_result(&mmdc1
->mpdghwst2
, &mmdc1
->mpdghwst3
,
383 debug("Ending Read DQS Gating calibration. Error mask: 0x%x\n", errors
);
386 * **********************
387 * Read Delay calibration
388 * **********************
390 debug("Starting Read Delay calibration.\n");
392 reset_read_data_fifos();
395 * 4. Issue the Precharge-All command to the DDR device for both
396 * chip selects. If only using one chip select, then precharge
397 * only the desired chip select.
399 precharge_all(cs0_enable
, cs1_enable
);
402 * 9. Read delay-line calibration
403 * Start the automatic read calibration process by asserting
404 * MPRDDLHWCTL[HW_RD_DL_EN].
406 writel(0x00000030, &mmdc0
->mprddlhwctl
);
409 * 10. poll for completion
410 * MMDC indicates that the write data calibration had finished by
411 * setting MPRDDLHWCTL[HW_RD_DL_EN] = 0. Also, ensure that
412 * no error bits were set.
414 wait_for_bit("MMDC", &mmdc0
->mprddlhwctl
, 1 << 4, 0, 100, 0);
416 /* check both PHYs for x64 configuration, if x32, check only PHY0 */
417 if (readl(&mmdc0
->mprddlhwctl
) & 0x0000000f)
420 if ((bus_size
== 0x2) && (readl(&mmdc1
->mprddlhwctl
) & 0x0000000f))
423 debug("Ending Read Delay calibration. Error mask: 0x%x\n", errors
);
426 * ***********************
427 * Write Delay Calibration
428 * ***********************
430 debug("Starting Write Delay calibration.\n");
432 reset_read_data_fifos();
435 * 4. Issue the Precharge-All command to the DDR device for both
436 * chip selects. If only using one chip select, then precharge
437 * only the desired chip select.
439 precharge_all(cs0_enable
, cs1_enable
);
442 * 8. Set the WR_DL_ABS# bits to their default values.
443 * Both PHYs for x64 configuration, if x32, do only PHY0.
445 writel(initdelay
, &mmdc0
->mpwrdlctl
);
447 writel(initdelay
, &mmdc1
->mpwrdlctl
);
450 * XXX This isn't in the manual. Force a measurement,
451 * for previous delay setup to effect.
453 force_delay_measurement(bus_size
);
456 * 9. 10. Start the automatic write calibration process
457 * by asserting MPWRDLHWCTL0[HW_WR_DL_EN].
459 writel(0x00000030, &mmdc0
->mpwrdlhwctl
);
462 * Poll for completion.
463 * MMDC indicates that the write data calibration had finished
464 * by setting MPWRDLHWCTL[HW_WR_DL_EN] = 0.
465 * Also, ensure that no error bits were set.
467 wait_for_bit("MMDC", &mmdc0
->mpwrdlhwctl
, 1 << 4, 0, 100, 0);
469 /* Check both PHYs for x64 configuration, if x32, check only PHY0 */
470 if (readl(&mmdc0
->mpwrdlhwctl
) & 0x0000000f)
473 if ((bus_size
== 0x2) && (readl(&mmdc1
->mpwrdlhwctl
) & 0x0000000f))
476 debug("Ending Write Delay calibration. Error mask: 0x%x\n", errors
);
478 reset_read_data_fifos();
480 /* Enable DDR logic power down timer */
481 setbits_le32(&mmdc0
->mdpdc
, 0x00005500);
483 /* Enable Adopt power down timer */
484 clrbits_le32(&mmdc0
->mapsr
, 0x1);
486 /* Restore MDMISC value (RALAT, WALAT) to MMDCP1 */
487 writel(esdmisc_val
, &mmdc0
->mdmisc
);
489 /* Clear DQS pull ups */
490 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs0
, 0x7000);
491 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs1
, 0x7000);
492 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs2
, 0x7000);
493 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs3
, 0x7000);
494 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs4
, 0x7000);
495 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs5
, 0x7000);
496 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs6
, 0x7000);
497 clrbits_le32(&mx6_ddr_iomux
->dram_sdqs7
, 0x7000);
499 /* Re-enable SDE (chip selects) if they were set initially */
500 if (cs1_enable_initial
)
502 setbits_le32(&mmdc0
->mdctl
, 1 << 30);
504 if (cs0_enable_initial
)
506 setbits_le32(&mmdc0
->mdctl
, 1 << 31);
508 /* Re-enable to auto refresh */
509 writel(temp_ref
, &mmdc0
->mdref
);
511 /* Clear the MDSCR (including the con_req bit) */
512 writel(0x0, &mmdc0
->mdscr
); /* CS0 */
514 /* Poll to make sure the con_ack bit is clear */
515 wait_for_bit("MMDC", &mmdc0
->mdscr
, 1 << 14, 0, 100, 0);
518 * Print out the registers that were updated as a result
519 * of the calibration process.
521 debug("MMDC registers updated from calibration\n");
522 debug("Read DQS gating calibration:\n");
523 debug("\tMPDGCTRL0 PHY0 = 0x%08X\n", readl(&mmdc0
->mpdgctrl0
));
524 debug("\tMPDGCTRL1 PHY0 = 0x%08X\n", readl(&mmdc0
->mpdgctrl1
));
525 debug("\tMPDGCTRL0 PHY1 = 0x%08X\n", readl(&mmdc1
->mpdgctrl0
));
526 debug("\tMPDGCTRL1 PHY1 = 0x%08X\n", readl(&mmdc1
->mpdgctrl1
));
527 debug("Read calibration:\n");
528 debug("\tMPRDDLCTL PHY0 = 0x%08X\n", readl(&mmdc0
->mprddlctl
));
529 debug("\tMPRDDLCTL PHY1 = 0x%08X\n", readl(&mmdc1
->mprddlctl
));
530 debug("Write calibration:\n");
531 debug("\tMPWRDLCTL PHY0 = 0x%08X\n", readl(&mmdc0
->mpwrdlctl
));
532 debug("\tMPWRDLCTL PHY1 = 0x%08X\n", readl(&mmdc1
->mpwrdlctl
));
535 * Registers below are for debugging purposes. These print out
536 * the upper and lower boundaries captured during
537 * read DQS gating calibration.
539 debug("Status registers bounds for read DQS gating:\n");
540 debug("\tMPDGHWST0 PHY0 = 0x%08x\n", readl(&mmdc0
->mpdghwst0
));
541 debug("\tMPDGHWST1 PHY0 = 0x%08x\n", readl(&mmdc0
->mpdghwst1
));
542 debug("\tMPDGHWST2 PHY0 = 0x%08x\n", readl(&mmdc0
->mpdghwst2
));
543 debug("\tMPDGHWST3 PHY0 = 0x%08x\n", readl(&mmdc0
->mpdghwst3
));
544 debug("\tMPDGHWST0 PHY1 = 0x%08x\n", readl(&mmdc1
->mpdghwst0
));
545 debug("\tMPDGHWST1 PHY1 = 0x%08x\n", readl(&mmdc1
->mpdghwst1
));
546 debug("\tMPDGHWST2 PHY1 = 0x%08x\n", readl(&mmdc1
->mpdghwst2
));
547 debug("\tMPDGHWST3 PHY1 = 0x%08x\n", readl(&mmdc1
->mpdghwst3
));
549 debug("Final do_dqs_calibration error mask: 0x%x\n", errors
);
555 #if defined(CONFIG_MX6SX)
556 /* Configure MX6SX mmdc iomux */
557 void mx6sx_dram_iocfg(unsigned width
,
558 const struct mx6sx_iomux_ddr_regs
*ddr
,
559 const struct mx6sx_iomux_grp_regs
*grp
)
561 struct mx6sx_iomux_ddr_regs
*mx6_ddr_iomux
;
562 struct mx6sx_iomux_grp_regs
*mx6_grp_iomux
;
564 mx6_ddr_iomux
= (struct mx6sx_iomux_ddr_regs
*)MX6SX_IOM_DDR_BASE
;
565 mx6_grp_iomux
= (struct mx6sx_iomux_grp_regs
*)MX6SX_IOM_GRP_BASE
;
568 writel(grp
->grp_ddr_type
, &mx6_grp_iomux
->grp_ddr_type
);
569 writel(grp
->grp_ddrpke
, &mx6_grp_iomux
->grp_ddrpke
);
572 writel(ddr
->dram_sdclk_0
, &mx6_ddr_iomux
->dram_sdclk_0
);
575 writel(ddr
->dram_cas
, &mx6_ddr_iomux
->dram_cas
);
576 writel(ddr
->dram_ras
, &mx6_ddr_iomux
->dram_ras
);
577 writel(grp
->grp_addds
, &mx6_grp_iomux
->grp_addds
);
580 writel(ddr
->dram_reset
, &mx6_ddr_iomux
->dram_reset
);
581 writel(ddr
->dram_sdba2
, &mx6_ddr_iomux
->dram_sdba2
);
582 writel(ddr
->dram_sdcke0
, &mx6_ddr_iomux
->dram_sdcke0
);
583 writel(ddr
->dram_sdcke1
, &mx6_ddr_iomux
->dram_sdcke1
);
584 writel(ddr
->dram_odt0
, &mx6_ddr_iomux
->dram_odt0
);
585 writel(ddr
->dram_odt1
, &mx6_ddr_iomux
->dram_odt1
);
586 writel(grp
->grp_ctlds
, &mx6_grp_iomux
->grp_ctlds
);
589 writel(grp
->grp_ddrmode_ctl
, &mx6_grp_iomux
->grp_ddrmode_ctl
);
590 writel(ddr
->dram_sdqs0
, &mx6_ddr_iomux
->dram_sdqs0
);
591 writel(ddr
->dram_sdqs1
, &mx6_ddr_iomux
->dram_sdqs1
);
593 writel(ddr
->dram_sdqs2
, &mx6_ddr_iomux
->dram_sdqs2
);
594 writel(ddr
->dram_sdqs3
, &mx6_ddr_iomux
->dram_sdqs3
);
598 writel(grp
->grp_ddrmode
, &mx6_grp_iomux
->grp_ddrmode
);
599 writel(grp
->grp_b0ds
, &mx6_grp_iomux
->grp_b0ds
);
600 writel(grp
->grp_b1ds
, &mx6_grp_iomux
->grp_b1ds
);
602 writel(grp
->grp_b2ds
, &mx6_grp_iomux
->grp_b2ds
);
603 writel(grp
->grp_b3ds
, &mx6_grp_iomux
->grp_b3ds
);
605 writel(ddr
->dram_dqm0
, &mx6_ddr_iomux
->dram_dqm0
);
606 writel(ddr
->dram_dqm1
, &mx6_ddr_iomux
->dram_dqm1
);
608 writel(ddr
->dram_dqm2
, &mx6_ddr_iomux
->dram_dqm2
);
609 writel(ddr
->dram_dqm3
, &mx6_ddr_iomux
->dram_dqm3
);
615 void mx6ul_dram_iocfg(unsigned width
,
616 const struct mx6ul_iomux_ddr_regs
*ddr
,
617 const struct mx6ul_iomux_grp_regs
*grp
)
619 struct mx6ul_iomux_ddr_regs
*mx6_ddr_iomux
;
620 struct mx6ul_iomux_grp_regs
*mx6_grp_iomux
;
622 mx6_ddr_iomux
= (struct mx6ul_iomux_ddr_regs
*)MX6UL_IOM_DDR_BASE
;
623 mx6_grp_iomux
= (struct mx6ul_iomux_grp_regs
*)MX6UL_IOM_GRP_BASE
;
626 writel(grp
->grp_ddr_type
, &mx6_grp_iomux
->grp_ddr_type
);
627 writel(grp
->grp_ddrpke
, &mx6_grp_iomux
->grp_ddrpke
);
630 writel(ddr
->dram_sdclk_0
, &mx6_ddr_iomux
->dram_sdclk_0
);
633 writel(ddr
->dram_cas
, &mx6_ddr_iomux
->dram_cas
);
634 writel(ddr
->dram_ras
, &mx6_ddr_iomux
->dram_ras
);
635 writel(grp
->grp_addds
, &mx6_grp_iomux
->grp_addds
);
638 writel(ddr
->dram_reset
, &mx6_ddr_iomux
->dram_reset
);
639 writel(ddr
->dram_sdba2
, &mx6_ddr_iomux
->dram_sdba2
);
640 writel(ddr
->dram_odt0
, &mx6_ddr_iomux
->dram_odt0
);
641 writel(ddr
->dram_odt1
, &mx6_ddr_iomux
->dram_odt1
);
642 writel(grp
->grp_ctlds
, &mx6_grp_iomux
->grp_ctlds
);
645 writel(grp
->grp_ddrmode_ctl
, &mx6_grp_iomux
->grp_ddrmode_ctl
);
646 writel(ddr
->dram_sdqs0
, &mx6_ddr_iomux
->dram_sdqs0
);
647 writel(ddr
->dram_sdqs1
, &mx6_ddr_iomux
->dram_sdqs1
);
650 writel(grp
->grp_ddrmode
, &mx6_grp_iomux
->grp_ddrmode
);
651 writel(grp
->grp_b0ds
, &mx6_grp_iomux
->grp_b0ds
);
652 writel(grp
->grp_b1ds
, &mx6_grp_iomux
->grp_b1ds
);
653 writel(ddr
->dram_dqm0
, &mx6_ddr_iomux
->dram_dqm0
);
654 writel(ddr
->dram_dqm1
, &mx6_ddr_iomux
->dram_dqm1
);
658 #if defined(CONFIG_MX6SL)
659 void mx6sl_dram_iocfg(unsigned width
,
660 const struct mx6sl_iomux_ddr_regs
*ddr
,
661 const struct mx6sl_iomux_grp_regs
*grp
)
663 struct mx6sl_iomux_ddr_regs
*mx6_ddr_iomux
;
664 struct mx6sl_iomux_grp_regs
*mx6_grp_iomux
;
666 mx6_ddr_iomux
= (struct mx6sl_iomux_ddr_regs
*)MX6SL_IOM_DDR_BASE
;
667 mx6_grp_iomux
= (struct mx6sl_iomux_grp_regs
*)MX6SL_IOM_GRP_BASE
;
670 mx6_grp_iomux
->grp_ddr_type
= grp
->grp_ddr_type
;
671 mx6_grp_iomux
->grp_ddrpke
= grp
->grp_ddrpke
;
674 mx6_ddr_iomux
->dram_sdclk_0
= ddr
->dram_sdclk_0
;
677 mx6_ddr_iomux
->dram_cas
= ddr
->dram_cas
;
678 mx6_ddr_iomux
->dram_ras
= ddr
->dram_ras
;
679 mx6_grp_iomux
->grp_addds
= grp
->grp_addds
;
682 mx6_ddr_iomux
->dram_reset
= ddr
->dram_reset
;
683 mx6_ddr_iomux
->dram_sdba2
= ddr
->dram_sdba2
;
684 mx6_grp_iomux
->grp_ctlds
= grp
->grp_ctlds
;
687 mx6_grp_iomux
->grp_ddrmode_ctl
= grp
->grp_ddrmode_ctl
;
688 mx6_ddr_iomux
->dram_sdqs0
= ddr
->dram_sdqs0
;
689 mx6_ddr_iomux
->dram_sdqs1
= ddr
->dram_sdqs1
;
691 mx6_ddr_iomux
->dram_sdqs2
= ddr
->dram_sdqs2
;
692 mx6_ddr_iomux
->dram_sdqs3
= ddr
->dram_sdqs3
;
696 mx6_grp_iomux
->grp_ddrmode
= grp
->grp_ddrmode
;
697 mx6_grp_iomux
->grp_b0ds
= grp
->grp_b0ds
;
698 mx6_grp_iomux
->grp_b1ds
= grp
->grp_b1ds
;
700 mx6_grp_iomux
->grp_b2ds
= grp
->grp_b2ds
;
701 mx6_grp_iomux
->grp_b3ds
= grp
->grp_b3ds
;
704 mx6_ddr_iomux
->dram_dqm0
= ddr
->dram_dqm0
;
705 mx6_ddr_iomux
->dram_dqm1
= ddr
->dram_dqm1
;
707 mx6_ddr_iomux
->dram_dqm2
= ddr
->dram_dqm2
;
708 mx6_ddr_iomux
->dram_dqm3
= ddr
->dram_dqm3
;
713 #if defined(CONFIG_MX6QDL) || defined(CONFIG_MX6Q) || defined(CONFIG_MX6D)
714 /* Configure MX6DQ mmdc iomux */
715 void mx6dq_dram_iocfg(unsigned width
,
716 const struct mx6dq_iomux_ddr_regs
*ddr
,
717 const struct mx6dq_iomux_grp_regs
*grp
)
719 volatile struct mx6dq_iomux_ddr_regs
*mx6_ddr_iomux
;
720 volatile struct mx6dq_iomux_grp_regs
*mx6_grp_iomux
;
722 mx6_ddr_iomux
= (struct mx6dq_iomux_ddr_regs
*)MX6DQ_IOM_DDR_BASE
;
723 mx6_grp_iomux
= (struct mx6dq_iomux_grp_regs
*)MX6DQ_IOM_GRP_BASE
;
726 mx6_grp_iomux
->grp_ddr_type
= grp
->grp_ddr_type
;
727 mx6_grp_iomux
->grp_ddrpke
= grp
->grp_ddrpke
;
730 mx6_ddr_iomux
->dram_sdclk_0
= ddr
->dram_sdclk_0
;
731 mx6_ddr_iomux
->dram_sdclk_1
= ddr
->dram_sdclk_1
;
734 mx6_ddr_iomux
->dram_cas
= ddr
->dram_cas
;
735 mx6_ddr_iomux
->dram_ras
= ddr
->dram_ras
;
736 mx6_grp_iomux
->grp_addds
= grp
->grp_addds
;
739 mx6_ddr_iomux
->dram_reset
= ddr
->dram_reset
;
740 mx6_ddr_iomux
->dram_sdcke0
= ddr
->dram_sdcke0
;
741 mx6_ddr_iomux
->dram_sdcke1
= ddr
->dram_sdcke1
;
742 mx6_ddr_iomux
->dram_sdba2
= ddr
->dram_sdba2
;
743 mx6_ddr_iomux
->dram_sdodt0
= ddr
->dram_sdodt0
;
744 mx6_ddr_iomux
->dram_sdodt1
= ddr
->dram_sdodt1
;
745 mx6_grp_iomux
->grp_ctlds
= grp
->grp_ctlds
;
748 mx6_grp_iomux
->grp_ddrmode_ctl
= grp
->grp_ddrmode_ctl
;
749 mx6_ddr_iomux
->dram_sdqs0
= ddr
->dram_sdqs0
;
750 mx6_ddr_iomux
->dram_sdqs1
= ddr
->dram_sdqs1
;
752 mx6_ddr_iomux
->dram_sdqs2
= ddr
->dram_sdqs2
;
753 mx6_ddr_iomux
->dram_sdqs3
= ddr
->dram_sdqs3
;
756 mx6_ddr_iomux
->dram_sdqs4
= ddr
->dram_sdqs4
;
757 mx6_ddr_iomux
->dram_sdqs5
= ddr
->dram_sdqs5
;
758 mx6_ddr_iomux
->dram_sdqs6
= ddr
->dram_sdqs6
;
759 mx6_ddr_iomux
->dram_sdqs7
= ddr
->dram_sdqs7
;
763 mx6_grp_iomux
->grp_ddrmode
= grp
->grp_ddrmode
;
764 mx6_grp_iomux
->grp_b0ds
= grp
->grp_b0ds
;
765 mx6_grp_iomux
->grp_b1ds
= grp
->grp_b1ds
;
767 mx6_grp_iomux
->grp_b2ds
= grp
->grp_b2ds
;
768 mx6_grp_iomux
->grp_b3ds
= grp
->grp_b3ds
;
771 mx6_grp_iomux
->grp_b4ds
= grp
->grp_b4ds
;
772 mx6_grp_iomux
->grp_b5ds
= grp
->grp_b5ds
;
773 mx6_grp_iomux
->grp_b6ds
= grp
->grp_b6ds
;
774 mx6_grp_iomux
->grp_b7ds
= grp
->grp_b7ds
;
776 mx6_ddr_iomux
->dram_dqm0
= ddr
->dram_dqm0
;
777 mx6_ddr_iomux
->dram_dqm1
= ddr
->dram_dqm1
;
779 mx6_ddr_iomux
->dram_dqm2
= ddr
->dram_dqm2
;
780 mx6_ddr_iomux
->dram_dqm3
= ddr
->dram_dqm3
;
783 mx6_ddr_iomux
->dram_dqm4
= ddr
->dram_dqm4
;
784 mx6_ddr_iomux
->dram_dqm5
= ddr
->dram_dqm5
;
785 mx6_ddr_iomux
->dram_dqm6
= ddr
->dram_dqm6
;
786 mx6_ddr_iomux
->dram_dqm7
= ddr
->dram_dqm7
;
791 #if defined(CONFIG_MX6QDL) || defined(CONFIG_MX6DL) || defined(CONFIG_MX6S)
792 /* Configure MX6SDL mmdc iomux */
793 void mx6sdl_dram_iocfg(unsigned width
,
794 const struct mx6sdl_iomux_ddr_regs
*ddr
,
795 const struct mx6sdl_iomux_grp_regs
*grp
)
797 volatile struct mx6sdl_iomux_ddr_regs
*mx6_ddr_iomux
;
798 volatile struct mx6sdl_iomux_grp_regs
*mx6_grp_iomux
;
800 mx6_ddr_iomux
= (struct mx6sdl_iomux_ddr_regs
*)MX6SDL_IOM_DDR_BASE
;
801 mx6_grp_iomux
= (struct mx6sdl_iomux_grp_regs
*)MX6SDL_IOM_GRP_BASE
;
804 mx6_grp_iomux
->grp_ddr_type
= grp
->grp_ddr_type
;
805 mx6_grp_iomux
->grp_ddrpke
= grp
->grp_ddrpke
;
808 mx6_ddr_iomux
->dram_sdclk_0
= ddr
->dram_sdclk_0
;
809 mx6_ddr_iomux
->dram_sdclk_1
= ddr
->dram_sdclk_1
;
812 mx6_ddr_iomux
->dram_cas
= ddr
->dram_cas
;
813 mx6_ddr_iomux
->dram_ras
= ddr
->dram_ras
;
814 mx6_grp_iomux
->grp_addds
= grp
->grp_addds
;
817 mx6_ddr_iomux
->dram_reset
= ddr
->dram_reset
;
818 mx6_ddr_iomux
->dram_sdcke0
= ddr
->dram_sdcke0
;
819 mx6_ddr_iomux
->dram_sdcke1
= ddr
->dram_sdcke1
;
820 mx6_ddr_iomux
->dram_sdba2
= ddr
->dram_sdba2
;
821 mx6_ddr_iomux
->dram_sdodt0
= ddr
->dram_sdodt0
;
822 mx6_ddr_iomux
->dram_sdodt1
= ddr
->dram_sdodt1
;
823 mx6_grp_iomux
->grp_ctlds
= grp
->grp_ctlds
;
826 mx6_grp_iomux
->grp_ddrmode_ctl
= grp
->grp_ddrmode_ctl
;
827 mx6_ddr_iomux
->dram_sdqs0
= ddr
->dram_sdqs0
;
828 mx6_ddr_iomux
->dram_sdqs1
= ddr
->dram_sdqs1
;
830 mx6_ddr_iomux
->dram_sdqs2
= ddr
->dram_sdqs2
;
831 mx6_ddr_iomux
->dram_sdqs3
= ddr
->dram_sdqs3
;
834 mx6_ddr_iomux
->dram_sdqs4
= ddr
->dram_sdqs4
;
835 mx6_ddr_iomux
->dram_sdqs5
= ddr
->dram_sdqs5
;
836 mx6_ddr_iomux
->dram_sdqs6
= ddr
->dram_sdqs6
;
837 mx6_ddr_iomux
->dram_sdqs7
= ddr
->dram_sdqs7
;
841 mx6_grp_iomux
->grp_ddrmode
= grp
->grp_ddrmode
;
842 mx6_grp_iomux
->grp_b0ds
= grp
->grp_b0ds
;
843 mx6_grp_iomux
->grp_b1ds
= grp
->grp_b1ds
;
845 mx6_grp_iomux
->grp_b2ds
= grp
->grp_b2ds
;
846 mx6_grp_iomux
->grp_b3ds
= grp
->grp_b3ds
;
849 mx6_grp_iomux
->grp_b4ds
= grp
->grp_b4ds
;
850 mx6_grp_iomux
->grp_b5ds
= grp
->grp_b5ds
;
851 mx6_grp_iomux
->grp_b6ds
= grp
->grp_b6ds
;
852 mx6_grp_iomux
->grp_b7ds
= grp
->grp_b7ds
;
854 mx6_ddr_iomux
->dram_dqm0
= ddr
->dram_dqm0
;
855 mx6_ddr_iomux
->dram_dqm1
= ddr
->dram_dqm1
;
857 mx6_ddr_iomux
->dram_dqm2
= ddr
->dram_dqm2
;
858 mx6_ddr_iomux
->dram_dqm3
= ddr
->dram_dqm3
;
861 mx6_ddr_iomux
->dram_dqm4
= ddr
->dram_dqm4
;
862 mx6_ddr_iomux
->dram_dqm5
= ddr
->dram_dqm5
;
863 mx6_ddr_iomux
->dram_dqm6
= ddr
->dram_dqm6
;
864 mx6_ddr_iomux
->dram_dqm7
= ddr
->dram_dqm7
;
870 * Configure mx6 mmdc registers based on:
871 * - board-specific memory configuration
872 * - board-specific calibration data
873 * - ddr3/lpddr2 chip details
875 * The various calculations here are derived from the Freescale
876 * 1. i.Mx6DQSDL DDR3 Script Aid spreadsheet (DOC-94917) designed to generate
877 * MMDC configuration registers based on memory system and memory chip
880 * 2. i.Mx6SL LPDDR2 Script Aid spreadsheet V0.04 designed to generate MMDC
881 * configuration registers based on memory system and memory chip
884 * The defaults here are those which were specified in the spreadsheet.
885 * For details on each register, refer to the IMX6DQRM and/or IMX6SDLRM
886 * and/or IMX6SLRM section titled MMDC initialization.
888 #define MR(val, ba, cmd, cs1) \
889 ((val << 16) | (1 << 15) | (cmd << 4) | (cs1 << 3) | ba)
890 #define MMDC1(entry, value) do { \
891 if (!is_mx6sx() && !is_mx6ul() && !is_mx6sl()) \
892 mmdc1->entry = value; \
896 * According JESD209-2B-LPDDR2: Table 103
899 static int lpddr2_wl(uint32_t mem_speed
)
914 puts("invalid memory speed\n");
922 * According JESD209-2B-LPDDR2: Table 103
925 static int lpddr2_rl(uint32_t mem_speed
)
942 puts("invalid memory speed\n");
949 void mx6_lpddr2_cfg(const struct mx6_ddr_sysinfo
*sysinfo
,
950 const struct mx6_mmdc_calibration
*calib
,
951 const struct mx6_lpddr2_cfg
*lpddr2_cfg
)
953 volatile struct mmdc_p_regs
*mmdc0
;
955 u8 tcke
, tcksrx
, tcksre
, trrd
;
956 u8 twl
, txp
, tfaw
, tcl
;
957 u16 tras
, twr
, tmrd
, trtp
, twtr
, trfc
, txsr
;
958 u16 trcd_lp
, trppb_lp
, trpab_lp
, trc_lp
;
961 int clkper
; /* clock period in picoseconds */
962 int clock
; /* clock freq in mHz */
965 /* only support 16/32 bits */
966 if (sysinfo
->dsize
> 1)
969 mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
971 clock
= mxc_get_clock(MXC_DDR_CLK
) / 1000000U;
972 clkper
= (1000 * 1000) / clock
; /* pico seconds */
974 twl
= lpddr2_wl(lpddr2_cfg
->mem_speed
) - 1;
976 /* LPDDR2-S2 and LPDDR2-S4 have the same tRFC value. */
977 switch (lpddr2_cfg
->density
) {
981 trfc
= DIV_ROUND_UP(130000, clkper
) - 1;
982 txsr
= DIV_ROUND_UP(140000, clkper
) - 1;
985 trfc
= DIV_ROUND_UP(210000, clkper
) - 1;
986 txsr
= DIV_ROUND_UP(220000, clkper
) - 1;
990 * 64Mb, 128Mb, 256Mb, 512Mb are not supported currently.
996 * txpdll, txpr, taonpd and taofpd are not relevant in LPDDR2 mode,
998 txp
= DIV_ROUND_UP(7500, clkper
) - 1;
1000 if (lpddr2_cfg
->mem_speed
== 333)
1001 tfaw
= DIV_ROUND_UP(60000, clkper
) - 1;
1003 tfaw
= DIV_ROUND_UP(50000, clkper
) - 1;
1004 trrd
= DIV_ROUND_UP(10000, clkper
) - 1;
1006 /* tckesr for LPDDR2 */
1007 tcksre
= DIV_ROUND_UP(15000, clkper
);
1009 twr
= DIV_ROUND_UP(15000, clkper
) - 1;
1012 * tMRD should be set to max(tMRR, tMRW)
1015 tras
= DIV_ROUND_UP(lpddr2_cfg
->trasmin
, clkper
/ 10) - 1;
1016 /* LPDDR2 mode use tRCD_LP filed in MDCFG3. */
1017 trcd_lp
= DIV_ROUND_UP(lpddr2_cfg
->trcd_lp
, clkper
/ 10) - 1;
1018 trc_lp
= DIV_ROUND_UP(lpddr2_cfg
->trasmin
+ lpddr2_cfg
->trppb_lp
,
1020 trppb_lp
= DIV_ROUND_UP(lpddr2_cfg
->trppb_lp
, clkper
/ 10) - 1;
1021 trpab_lp
= DIV_ROUND_UP(lpddr2_cfg
->trpab_lp
, clkper
/ 10) - 1;
1022 /* To LPDDR2, CL in MDCFG0 refers to RL */
1023 tcl
= lpddr2_rl(lpddr2_cfg
->mem_speed
) - 3;
1024 twtr
= DIV_ROUND_UP(7500, clkper
) - 1;
1025 trtp
= DIV_ROUND_UP(7500, clkper
) - 1;
1027 cs0_end
= 4 * sysinfo
->cs_density
- 1;
1029 debug("density:%d Gb (%d Gb per chip)\n",
1030 sysinfo
->cs_density
, lpddr2_cfg
->density
);
1031 debug("clock: %dMHz (%d ps)\n", clock
, clkper
);
1032 debug("memspd:%d\n", lpddr2_cfg
->mem_speed
);
1033 debug("trcd_lp=%d\n", trcd_lp
);
1034 debug("trppb_lp=%d\n", trppb_lp
);
1035 debug("trpab_lp=%d\n", trpab_lp
);
1036 debug("trc_lp=%d\n", trc_lp
);
1037 debug("tcke=%d\n", tcke
);
1038 debug("tcksrx=%d\n", tcksrx
);
1039 debug("tcksre=%d\n", tcksre
);
1040 debug("trfc=%d\n", trfc
);
1041 debug("txsr=%d\n", txsr
);
1042 debug("txp=%d\n", txp
);
1043 debug("tfaw=%d\n", tfaw
);
1044 debug("tcl=%d\n", tcl
);
1045 debug("tras=%d\n", tras
);
1046 debug("twr=%d\n", twr
);
1047 debug("tmrd=%d\n", tmrd
);
1048 debug("twl=%d\n", twl
);
1049 debug("trtp=%d\n", trtp
);
1050 debug("twtr=%d\n", twtr
);
1051 debug("trrd=%d\n", trrd
);
1052 debug("cs0_end=%d\n", cs0_end
);
1053 debug("ncs=%d\n", sysinfo
->ncs
);
1056 * board-specific configuration:
1057 * These values are determined empirically and vary per board layout
1059 mmdc0
->mpwldectrl0
= calib
->p0_mpwldectrl0
;
1060 mmdc0
->mpwldectrl1
= calib
->p0_mpwldectrl1
;
1061 mmdc0
->mpdgctrl0
= calib
->p0_mpdgctrl0
;
1062 mmdc0
->mpdgctrl1
= calib
->p0_mpdgctrl1
;
1063 mmdc0
->mprddlctl
= calib
->p0_mprddlctl
;
1064 mmdc0
->mpwrdlctl
= calib
->p0_mpwrdlctl
;
1065 mmdc0
->mpzqlp2ctl
= calib
->mpzqlp2ctl
;
1067 /* Read data DQ Byte0-3 delay */
1068 mmdc0
->mprddqby0dl
= 0x33333333;
1069 mmdc0
->mprddqby1dl
= 0x33333333;
1070 if (sysinfo
->dsize
> 0) {
1071 mmdc0
->mprddqby2dl
= 0x33333333;
1072 mmdc0
->mprddqby3dl
= 0x33333333;
1075 /* Write data DQ Byte0-3 delay */
1076 mmdc0
->mpwrdqby0dl
= 0xf3333333;
1077 mmdc0
->mpwrdqby1dl
= 0xf3333333;
1078 if (sysinfo
->dsize
> 0) {
1079 mmdc0
->mpwrdqby2dl
= 0xf3333333;
1080 mmdc0
->mpwrdqby3dl
= 0xf3333333;
1084 * In LPDDR2 mode this register should be cleared,
1085 * so no termination will be activated.
1087 mmdc0
->mpodtctrl
= 0;
1089 /* complete calibration */
1090 val
= (1 << 11); /* Force measurement on delay-lines */
1091 mmdc0
->mpmur0
= val
;
1093 /* Step 1: configuration request */
1094 mmdc0
->mdscr
= (u32
)(1 << 15); /* config request */
1096 /* Step 2: Timing configuration */
1097 mmdc0
->mdcfg0
= (trfc
<< 24) | (txsr
<< 16) | (txp
<< 13) |
1099 mmdc0
->mdcfg1
= (tras
<< 16) | (twr
<< 9) | (tmrd
<< 5) | twl
;
1100 mmdc0
->mdcfg2
= (trtp
<< 6) | (twtr
<< 3) | trrd
;
1101 mmdc0
->mdcfg3lp
= (trc_lp
<< 16) | (trcd_lp
<< 8) |
1102 (trppb_lp
<< 4) | trpab_lp
;
1105 mmdc0
->mdasp
= cs0_end
; /* CS addressing */
1107 /* Step 3: Configure DDR type */
1108 mmdc0
->mdmisc
= (sysinfo
->cs1_mirror
<< 19) | (sysinfo
->walat
<< 16) |
1109 (sysinfo
->bi_on
<< 12) | (sysinfo
->mif3_mode
<< 9) |
1110 (sysinfo
->ralat
<< 6) | (1 << 3);
1112 /* Step 4: Configure delay while leaving reset */
1113 mmdc0
->mdor
= (sysinfo
->sde_to_rst
<< 8) |
1114 (sysinfo
->rst_to_cke
<< 0);
1116 /* Step 5: Configure DDR physical parameters (density and burst len) */
1117 coladdr
= lpddr2_cfg
->coladdr
;
1118 if (lpddr2_cfg
->coladdr
== 8) /* 8-bit COL is 0x3 */
1120 else if (lpddr2_cfg
->coladdr
== 12) /* 12-bit COL is 0x4 */
1122 mmdc0
->mdctl
= (lpddr2_cfg
->rowaddr
- 11) << 24 | /* ROW */
1123 (coladdr
- 9) << 20 | /* COL */
1124 (0 << 19) | /* Burst Length = 4 for LPDDR2 */
1125 (sysinfo
->dsize
<< 16); /* DDR data bus size */
1127 /* Step 6: Perform ZQ calibration */
1128 val
= 0xa1390003; /* one-time HW ZQ calib */
1129 mmdc0
->mpzqhwctrl
= val
;
1131 /* Step 7: Enable MMDC with desired chip select */
1132 mmdc0
->mdctl
|= (1 << 31) | /* SDE_0 for CS0 */
1133 ((sysinfo
->ncs
== 2) ? 1 : 0) << 30; /* SDE_1 for CS1 */
1135 /* Step 8: Write Mode Registers to Init LPDDR2 devices */
1136 for (cs
= 0; cs
< sysinfo
->ncs
; cs
++) {
1138 mmdc0
->mdscr
= MR(63, 0, 3, cs
);
1139 /* MR10: calibration,
1140 * 0xff is calibration command after intilization.
1142 val
= 0xA | (0xff << 8);
1143 mmdc0
->mdscr
= MR(val
, 0, 3, cs
);
1145 val
= 0x1 | (0x82 << 8);
1146 mmdc0
->mdscr
= MR(val
, 0, 3, cs
);
1148 val
= 0x2 | (0x04 << 8);
1149 mmdc0
->mdscr
= MR(val
, 0, 3, cs
);
1151 val
= 0x3 | (0x02 << 8);
1152 mmdc0
->mdscr
= MR(val
, 0, 3, cs
);
1155 /* Step 10: Power down control and self-refresh */
1156 mmdc0
->mdpdc
= (tcke
& 0x7) << 16 |
1157 5 << 12 | /* PWDT_1: 256 cycles */
1158 5 << 8 | /* PWDT_0: 256 cycles */
1159 1 << 6 | /* BOTH_CS_PD */
1160 (tcksrx
& 0x7) << 3 |
1162 mmdc0
->mapsr
= 0x00001006; /* ADOPT power down enabled */
1164 /* Step 11: Configure ZQ calibration: one-time and periodic 1ms */
1166 mmdc0
->mpzqhwctrl
= val
;
1168 /* Step 12: Configure and activate periodic refresh */
1169 mmdc0
->mdref
= (sysinfo
->refsel
<< 14) | (sysinfo
->refr
<< 11);
1171 /* Step 13: Deassert config request - init complete */
1172 mmdc0
->mdscr
= 0x00000000;
1174 /* wait for auto-ZQ calibration to complete */
1178 void mx6_ddr3_cfg(const struct mx6_ddr_sysinfo
*sysinfo
,
1179 const struct mx6_mmdc_calibration
*calib
,
1180 const struct mx6_ddr3_cfg
*ddr3_cfg
)
1182 volatile struct mmdc_p_regs
*mmdc0
;
1183 volatile struct mmdc_p_regs
*mmdc1
;
1185 u8 tcke
, tcksrx
, tcksre
, txpdll
, taofpd
, taonpd
, trrd
;
1186 u8 todtlon
, taxpd
, tanpd
, tcwl
, txp
, tfaw
, tcl
;
1187 u8 todt_idle_off
= 0x4; /* from DDR3 Script Aid spreadsheet */
1188 u16 trcd
, trc
, tras
, twr
, tmrd
, trtp
, trp
, twtr
, trfc
, txs
, txpr
;
1190 u16 tdllk
= 0x1ff; /* DLL locking time: 512 cycles (JEDEC DDR3) */
1192 int clkper
; /* clock period in picoseconds */
1193 int clock
; /* clock freq in MHz */
1195 u16 mem_speed
= ddr3_cfg
->mem_speed
;
1197 mmdc0
= (struct mmdc_p_regs
*)MMDC_P0_BASE_ADDR
;
1198 if (!is_mx6sx() && !is_mx6ul() && !is_mx6sl())
1199 mmdc1
= (struct mmdc_p_regs
*)MMDC_P1_BASE_ADDR
;
1201 /* Limit mem_speed for MX6D/MX6Q */
1202 if (is_mx6dq() || is_mx6dqp()) {
1203 if (mem_speed
> 1066)
1204 mem_speed
= 1066; /* 1066 MT/s */
1208 /* Limit mem_speed for MX6S/MX6DL */
1210 if (mem_speed
> 800)
1211 mem_speed
= 800; /* 800 MT/s */
1216 clock
= mem_speed
/ 2;
1218 * Data rate of 1066 MT/s requires 533 MHz DDR3 clock, but MX6D/Q supports
1219 * up to 528 MHz, so reduce the clock to fit chip specs
1221 if (is_mx6dq() || is_mx6dqp()) {
1223 clock
= 528; /* 528 MHz */
1226 clkper
= (1000 * 1000) / clock
; /* pico seconds */
1231 switch (ddr3_cfg
->density
) {
1232 case 1: /* 1Gb per chip */
1233 trfc
= DIV_ROUND_UP(110000, clkper
) - 1;
1234 txs
= DIV_ROUND_UP(120000, clkper
) - 1;
1236 case 2: /* 2Gb per chip */
1237 trfc
= DIV_ROUND_UP(160000, clkper
) - 1;
1238 txs
= DIV_ROUND_UP(170000, clkper
) - 1;
1240 case 4: /* 4Gb per chip */
1241 trfc
= DIV_ROUND_UP(260000, clkper
) - 1;
1242 txs
= DIV_ROUND_UP(270000, clkper
) - 1;
1244 case 8: /* 8Gb per chip */
1245 trfc
= DIV_ROUND_UP(350000, clkper
) - 1;
1246 txs
= DIV_ROUND_UP(360000, clkper
) - 1;
1249 /* invalid density */
1250 puts("invalid chip density\n");
1256 switch (mem_speed
) {
1258 txp
= DIV_ROUND_UP(max(3 * clkper
, 7500), clkper
) - 1;
1259 tcke
= DIV_ROUND_UP(max(3 * clkper
, 7500), clkper
) - 1;
1260 if (ddr3_cfg
->pagesz
== 1) {
1261 tfaw
= DIV_ROUND_UP(40000, clkper
) - 1;
1262 trrd
= DIV_ROUND_UP(max(4 * clkper
, 10000), clkper
) - 1;
1264 tfaw
= DIV_ROUND_UP(50000, clkper
) - 1;
1265 trrd
= DIV_ROUND_UP(max(4 * clkper
, 10000), clkper
) - 1;
1269 txp
= DIV_ROUND_UP(max(3 * clkper
, 7500), clkper
) - 1;
1270 tcke
= DIV_ROUND_UP(max(3 * clkper
, 5625), clkper
) - 1;
1271 if (ddr3_cfg
->pagesz
== 1) {
1272 tfaw
= DIV_ROUND_UP(37500, clkper
) - 1;
1273 trrd
= DIV_ROUND_UP(max(4 * clkper
, 7500), clkper
) - 1;
1275 tfaw
= DIV_ROUND_UP(50000, clkper
) - 1;
1276 trrd
= DIV_ROUND_UP(max(4 * clkper
, 10000), clkper
) - 1;
1280 puts("invalid memory speed\n");
1284 txpdll
= DIV_ROUND_UP(max(10 * clkper
, 24000), clkper
) - 1;
1285 tcksre
= DIV_ROUND_UP(max(5 * clkper
, 10000), clkper
);
1286 taonpd
= DIV_ROUND_UP(2000, clkper
) - 1;
1289 twr
= DIV_ROUND_UP(15000, clkper
) - 1;
1290 tmrd
= DIV_ROUND_UP(max(12 * clkper
, 15000), clkper
) - 1;
1291 trc
= DIV_ROUND_UP(ddr3_cfg
->trcmin
, clkper
/ 10) - 1;
1292 tras
= DIV_ROUND_UP(ddr3_cfg
->trasmin
, clkper
/ 10) - 1;
1293 tcl
= DIV_ROUND_UP(ddr3_cfg
->trcd
, clkper
/ 10) - 3;
1294 trp
= DIV_ROUND_UP(ddr3_cfg
->trcd
, clkper
/ 10) - 1;
1295 twtr
= ROUND(max(4 * clkper
, 7500) / clkper
, 1) - 1;
1298 cs0_end
= 4 * sysinfo
->cs_density
- 1;
1300 debug("density:%d Gb (%d Gb per chip)\n",
1301 sysinfo
->cs_density
, ddr3_cfg
->density
);
1302 debug("clock: %dMHz (%d ps)\n", clock
, clkper
);
1303 debug("memspd:%d\n", mem_speed
);
1304 debug("tcke=%d\n", tcke
);
1305 debug("tcksrx=%d\n", tcksrx
);
1306 debug("tcksre=%d\n", tcksre
);
1307 debug("taofpd=%d\n", taofpd
);
1308 debug("taonpd=%d\n", taonpd
);
1309 debug("todtlon=%d\n", todtlon
);
1310 debug("tanpd=%d\n", tanpd
);
1311 debug("taxpd=%d\n", taxpd
);
1312 debug("trfc=%d\n", trfc
);
1313 debug("txs=%d\n", txs
);
1314 debug("txp=%d\n", txp
);
1315 debug("txpdll=%d\n", txpdll
);
1316 debug("tfaw=%d\n", tfaw
);
1317 debug("tcl=%d\n", tcl
);
1318 debug("trcd=%d\n", trcd
);
1319 debug("trp=%d\n", trp
);
1320 debug("trc=%d\n", trc
);
1321 debug("tras=%d\n", tras
);
1322 debug("twr=%d\n", twr
);
1323 debug("tmrd=%d\n", tmrd
);
1324 debug("tcwl=%d\n", tcwl
);
1325 debug("tdllk=%d\n", tdllk
);
1326 debug("trtp=%d\n", trtp
);
1327 debug("twtr=%d\n", twtr
);
1328 debug("trrd=%d\n", trrd
);
1329 debug("txpr=%d\n", txpr
);
1330 debug("cs0_end=%d\n", cs0_end
);
1331 debug("ncs=%d\n", sysinfo
->ncs
);
1332 debug("Rtt_wr=%d\n", sysinfo
->rtt_wr
);
1333 debug("Rtt_nom=%d\n", sysinfo
->rtt_nom
);
1334 debug("SRT=%d\n", ddr3_cfg
->SRT
);
1335 debug("twr=%d\n", twr
);
1338 * board-specific configuration:
1339 * These values are determined empirically and vary per board layout
1341 * appnote, ddr3 spreadsheet
1343 mmdc0
->mpwldectrl0
= calib
->p0_mpwldectrl0
;
1344 mmdc0
->mpwldectrl1
= calib
->p0_mpwldectrl1
;
1345 mmdc0
->mpdgctrl0
= calib
->p0_mpdgctrl0
;
1346 mmdc0
->mpdgctrl1
= calib
->p0_mpdgctrl1
;
1347 mmdc0
->mprddlctl
= calib
->p0_mprddlctl
;
1348 mmdc0
->mpwrdlctl
= calib
->p0_mpwrdlctl
;
1349 if (sysinfo
->dsize
> 1) {
1350 MMDC1(mpwldectrl0
, calib
->p1_mpwldectrl0
);
1351 MMDC1(mpwldectrl1
, calib
->p1_mpwldectrl1
);
1352 MMDC1(mpdgctrl0
, calib
->p1_mpdgctrl0
);
1353 MMDC1(mpdgctrl1
, calib
->p1_mpdgctrl1
);
1354 MMDC1(mprddlctl
, calib
->p1_mprddlctl
);
1355 MMDC1(mpwrdlctl
, calib
->p1_mpwrdlctl
);
1358 /* Read data DQ Byte0-3 delay */
1359 mmdc0
->mprddqby0dl
= 0x33333333;
1360 mmdc0
->mprddqby1dl
= 0x33333333;
1361 if (sysinfo
->dsize
> 0) {
1362 mmdc0
->mprddqby2dl
= 0x33333333;
1363 mmdc0
->mprddqby3dl
= 0x33333333;
1366 if (sysinfo
->dsize
> 1) {
1367 MMDC1(mprddqby0dl
, 0x33333333);
1368 MMDC1(mprddqby1dl
, 0x33333333);
1369 MMDC1(mprddqby2dl
, 0x33333333);
1370 MMDC1(mprddqby3dl
, 0x33333333);
1373 /* MMDC Termination: rtt_nom:2 RZQ/2(120ohm), rtt_nom:1 RZQ/4(60ohm) */
1374 val
= (sysinfo
->rtt_nom
== 2) ? 0x00011117 : 0x00022227;
1375 mmdc0
->mpodtctrl
= val
;
1376 if (sysinfo
->dsize
> 1)
1377 MMDC1(mpodtctrl
, val
);
1379 /* complete calibration */
1380 val
= (1 << 11); /* Force measurement on delay-lines */
1381 mmdc0
->mpmur0
= val
;
1382 if (sysinfo
->dsize
> 1)
1385 /* Step 1: configuration request */
1386 mmdc0
->mdscr
= (u32
)(1 << 15); /* config request */
1388 /* Step 2: Timing configuration */
1389 mmdc0
->mdcfg0
= (trfc
<< 24) | (txs
<< 16) | (txp
<< 13) |
1390 (txpdll
<< 9) | (tfaw
<< 4) | tcl
;
1391 mmdc0
->mdcfg1
= (trcd
<< 29) | (trp
<< 26) | (trc
<< 21) |
1392 (tras
<< 16) | (1 << 15) /* trpa */ |
1393 (twr
<< 9) | (tmrd
<< 5) | tcwl
;
1394 mmdc0
->mdcfg2
= (tdllk
<< 16) | (trtp
<< 6) | (twtr
<< 3) | trrd
;
1395 mmdc0
->mdotc
= (taofpd
<< 27) | (taonpd
<< 24) | (tanpd
<< 20) |
1396 (taxpd
<< 16) | (todtlon
<< 12) | (todt_idle_off
<< 4);
1397 mmdc0
->mdasp
= cs0_end
; /* CS addressing */
1399 /* Step 3: Configure DDR type */
1400 mmdc0
->mdmisc
= (sysinfo
->cs1_mirror
<< 19) | (sysinfo
->walat
<< 16) |
1401 (sysinfo
->bi_on
<< 12) | (sysinfo
->mif3_mode
<< 9) |
1402 (sysinfo
->ralat
<< 6);
1404 /* Step 4: Configure delay while leaving reset */
1405 mmdc0
->mdor
= (txpr
<< 16) | (sysinfo
->sde_to_rst
<< 8) |
1406 (sysinfo
->rst_to_cke
<< 0);
1408 /* Step 5: Configure DDR physical parameters (density and burst len) */
1409 coladdr
= ddr3_cfg
->coladdr
;
1410 if (ddr3_cfg
->coladdr
== 8) /* 8-bit COL is 0x3 */
1412 else if (ddr3_cfg
->coladdr
== 12) /* 12-bit COL is 0x4 */
1414 mmdc0
->mdctl
= (ddr3_cfg
->rowaddr
- 11) << 24 | /* ROW */
1415 (coladdr
- 9) << 20 | /* COL */
1416 (1 << 19) | /* Burst Length = 8 for DDR3 */
1417 (sysinfo
->dsize
<< 16); /* DDR data bus size */
1419 /* Step 6: Perform ZQ calibration */
1420 val
= 0xa1390001; /* one-time HW ZQ calib */
1421 mmdc0
->mpzqhwctrl
= val
;
1422 if (sysinfo
->dsize
> 1)
1423 MMDC1(mpzqhwctrl
, val
);
1425 /* Step 7: Enable MMDC with desired chip select */
1426 mmdc0
->mdctl
|= (1 << 31) | /* SDE_0 for CS0 */
1427 ((sysinfo
->ncs
== 2) ? 1 : 0) << 30; /* SDE_1 for CS1 */
1429 /* Step 8: Write Mode Registers to Init DDR3 devices */
1430 for (cs
= 0; cs
< sysinfo
->ncs
; cs
++) {
1432 val
= (sysinfo
->rtt_wr
& 3) << 9 | (ddr3_cfg
->SRT
& 1) << 7 |
1433 ((tcwl
- 3) & 3) << 3;
1434 debug("MR2 CS%d: 0x%08x\n", cs
, (u32
)MR(val
, 2, 3, cs
));
1435 mmdc0
->mdscr
= MR(val
, 2, 3, cs
);
1437 debug("MR3 CS%d: 0x%08x\n", cs
, (u32
)MR(0, 3, 3, cs
));
1438 mmdc0
->mdscr
= MR(0, 3, 3, cs
);
1440 val
= ((sysinfo
->rtt_nom
& 1) ? 1 : 0) << 2 |
1441 ((sysinfo
->rtt_nom
& 2) ? 1 : 0) << 6;
1442 debug("MR1 CS%d: 0x%08x\n", cs
, (u32
)MR(val
, 1, 3, cs
));
1443 mmdc0
->mdscr
= MR(val
, 1, 3, cs
);
1445 val
= ((tcl
- 1) << 4) | /* CAS */
1446 (1 << 8) | /* DLL Reset */
1447 ((twr
- 3) << 9) | /* Write Recovery */
1448 (sysinfo
->pd_fast_exit
<< 12); /* Precharge PD PLL on */
1449 debug("MR0 CS%d: 0x%08x\n", cs
, (u32
)MR(val
, 0, 3, cs
));
1450 mmdc0
->mdscr
= MR(val
, 0, 3, cs
);
1451 /* ZQ calibration */
1453 mmdc0
->mdscr
= MR(val
, 0, 4, cs
);
1456 /* Step 10: Power down control and self-refresh */
1457 mmdc0
->mdpdc
= (tcke
& 0x7) << 16 |
1458 5 << 12 | /* PWDT_1: 256 cycles */
1459 5 << 8 | /* PWDT_0: 256 cycles */
1460 1 << 6 | /* BOTH_CS_PD */
1461 (tcksrx
& 0x7) << 3 |
1463 if (!sysinfo
->pd_fast_exit
)
1464 mmdc0
->mdpdc
|= (1 << 7); /* SLOW_PD */
1465 mmdc0
->mapsr
= 0x00001006; /* ADOPT power down enabled */
1467 /* Step 11: Configure ZQ calibration: one-time and periodic 1ms */
1469 mmdc0
->mpzqhwctrl
= val
;
1470 if (sysinfo
->dsize
> 1)
1471 MMDC1(mpzqhwctrl
, val
);
1473 /* Step 12: Configure and activate periodic refresh */
1474 mmdc0
->mdref
= (sysinfo
->refsel
<< 14) | (sysinfo
->refr
<< 11);
1476 /* Step 13: Deassert config request - init complete */
1477 mmdc0
->mdscr
= 0x00000000;
1479 /* wait for auto-ZQ calibration to complete */
1483 void mx6_dram_cfg(const struct mx6_ddr_sysinfo
*sysinfo
,
1484 const struct mx6_mmdc_calibration
*calib
,
1485 const void *ddr_cfg
)
1487 if (sysinfo
->ddr_type
== DDR_TYPE_DDR3
) {
1488 mx6_ddr3_cfg(sysinfo
, calib
, ddr_cfg
);
1489 } else if (sysinfo
->ddr_type
== DDR_TYPE_LPDDR2
) {
1490 mx6_lpddr2_cfg(sysinfo
, calib
, ddr_cfg
);
1492 puts("Unsupported ddr type\n");