2 * DDR3 mem setup file for board based on EXYNOS5
4 * Copyright (C) 2012 Samsung Electronics
6 * SPDX-License-Identifier: GPL-2.0+
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cpu.h>
14 #include <asm/arch/dmc.h>
15 #include <asm/arch/power.h>
16 #include "common_setup.h"
17 #include "exynos5_setup.h"
18 #include "clock_init.h"
20 #define TIMEOUT_US 10000
21 #define NUM_BYTE_LANES 4
23 #define DEFAULT_DQS_X4 ((DEFAULT_DQS << 24) || (DEFAULT_DQS << 16) \
24 || (DEFAULT_DQS << 8) || (DEFAULT_DQS << 0))
26 #ifdef CONFIG_EXYNOS5250
27 static void reset_phy_ctrl(void)
29 struct exynos5_clock
*clk
=
30 (struct exynos5_clock
*)samsung_get_base_clock();
32 writel(DDR3PHY_CTRL_PHY_RESET_OFF
, &clk
->lpddr3phy_ctrl
);
33 writel(DDR3PHY_CTRL_PHY_RESET
, &clk
->lpddr3phy_ctrl
);
36 int ddr3_mem_ctrl_init(struct mem_timings
*mem
, int reset
)
39 struct exynos5_phy_control
*phy0_ctrl
, *phy1_ctrl
;
40 struct exynos5_dmc
*dmc
;
43 phy0_ctrl
= (struct exynos5_phy_control
*)samsung_get_base_dmc_phy();
44 phy1_ctrl
= (struct exynos5_phy_control
*)(samsung_get_base_dmc_phy()
46 dmc
= (struct exynos5_dmc
*)samsung_get_base_dmc_ctrl();
51 /* Set Impedance Output Driver */
52 val
= (mem
->impedance
<< CA_CK_DRVR_DS_OFFSET
) |
53 (mem
->impedance
<< CA_CKE_DRVR_DS_OFFSET
) |
54 (mem
->impedance
<< CA_CS_DRVR_DS_OFFSET
) |
55 (mem
->impedance
<< CA_ADR_DRVR_DS_OFFSET
);
56 writel(val
, &phy0_ctrl
->phy_con39
);
57 writel(val
, &phy1_ctrl
->phy_con39
);
59 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
60 val
= (mem
->ctrl_bstlen
<< PHY_CON42_CTRL_BSTLEN_SHIFT
) |
61 (mem
->ctrl_rdlat
<< PHY_CON42_CTRL_RDLAT_SHIFT
);
62 writel(val
, &phy0_ctrl
->phy_con42
);
63 writel(val
, &phy1_ctrl
->phy_con42
);
66 if (dmc_config_zq(mem
, &phy0_ctrl
->phy_con16
, &phy1_ctrl
->phy_con16
,
67 &phy0_ctrl
->phy_con17
, &phy1_ctrl
->phy_con17
))
68 return SETUP_ERR_ZQ_CALIBRATION_FAILURE
;
71 writel(mem
->phy0_pulld_dqs
, &phy0_ctrl
->phy_con14
);
72 writel(mem
->phy1_pulld_dqs
, &phy1_ctrl
->phy_con14
);
74 writel(mem
->concontrol
| (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
)
75 | (mem
->dfi_init_start
<< CONCONTROL_DFI_INIT_START_SHIFT
),
78 update_reset_dll(&dmc
->phycontrol0
, DDR_MODE_DDR3
);
81 writel(mem
->phy0_dqs
, &phy0_ctrl
->phy_con4
);
82 writel(mem
->phy1_dqs
, &phy1_ctrl
->phy_con4
);
84 writel(mem
->phy0_dq
, &phy0_ctrl
->phy_con6
);
85 writel(mem
->phy1_dq
, &phy1_ctrl
->phy_con6
);
87 writel(mem
->phy0_tFS
, &phy0_ctrl
->phy_con10
);
88 writel(mem
->phy1_tFS
, &phy1_ctrl
->phy_con10
);
90 val
= (mem
->ctrl_start_point
<< PHY_CON12_CTRL_START_POINT_SHIFT
) |
91 (mem
->ctrl_inc
<< PHY_CON12_CTRL_INC_SHIFT
) |
92 (mem
->ctrl_dll_on
<< PHY_CON12_CTRL_DLL_ON_SHIFT
) |
93 (mem
->ctrl_ref
<< PHY_CON12_CTRL_REF_SHIFT
);
94 writel(val
, &phy0_ctrl
->phy_con12
);
95 writel(val
, &phy1_ctrl
->phy_con12
);
97 /* Start DLL locking */
98 writel(val
| (mem
->ctrl_start
<< PHY_CON12_CTRL_START_SHIFT
),
99 &phy0_ctrl
->phy_con12
);
100 writel(val
| (mem
->ctrl_start
<< PHY_CON12_CTRL_START_SHIFT
),
101 &phy1_ctrl
->phy_con12
);
103 update_reset_dll(&dmc
->phycontrol0
, DDR_MODE_DDR3
);
105 writel(mem
->concontrol
| (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
),
108 /* Memory Channel Inteleaving Size */
109 writel(mem
->iv_size
, &dmc
->ivcontrol
);
111 writel(mem
->memconfig
, &dmc
->memconfig0
);
112 writel(mem
->memconfig
, &dmc
->memconfig1
);
113 writel(mem
->membaseconfig0
, &dmc
->membaseconfig0
);
114 writel(mem
->membaseconfig1
, &dmc
->membaseconfig1
);
116 /* Precharge Configuration */
117 writel(mem
->prechconfig_tp_cnt
<< PRECHCONFIG_TP_CNT_SHIFT
,
120 /* Power Down mode Configuration */
121 writel(mem
->dpwrdn_cyc
<< PWRDNCONFIG_DPWRDN_CYC_SHIFT
|
122 mem
->dsref_cyc
<< PWRDNCONFIG_DSREF_CYC_SHIFT
,
125 /* TimingRow, TimingData, TimingPower and Timingaref
126 * values as per Memory AC parameters
128 writel(mem
->timing_ref
, &dmc
->timingref
);
129 writel(mem
->timing_row
, &dmc
->timingrow
);
130 writel(mem
->timing_data
, &dmc
->timingdata
);
131 writel(mem
->timing_power
, &dmc
->timingpower
);
133 /* Send PALL command */
134 dmc_config_prech(mem
, &dmc
->directcmd
);
136 /* Send NOP, MRS and ZQINIT commands */
137 dmc_config_mrs(mem
, &dmc
->directcmd
);
139 if (mem
->gate_leveling_enable
) {
140 val
= PHY_CON0_RESET_VAL
;
142 writel(val
, &phy0_ctrl
->phy_con0
);
143 writel(val
, &phy1_ctrl
->phy_con0
);
145 val
= PHY_CON2_RESET_VAL
;
146 val
|= INIT_DESKEW_EN
;
147 writel(val
, &phy0_ctrl
->phy_con2
);
148 writel(val
, &phy1_ctrl
->phy_con2
);
150 val
= PHY_CON0_RESET_VAL
;
152 val
|= BYTE_RDLVL_EN
;
153 writel(val
, &phy0_ctrl
->phy_con0
);
154 writel(val
, &phy1_ctrl
->phy_con0
);
156 val
= (mem
->ctrl_start_point
<<
157 PHY_CON12_CTRL_START_POINT_SHIFT
) |
158 (mem
->ctrl_inc
<< PHY_CON12_CTRL_INC_SHIFT
) |
159 (mem
->ctrl_force
<< PHY_CON12_CTRL_FORCE_SHIFT
) |
160 (mem
->ctrl_start
<< PHY_CON12_CTRL_START_SHIFT
) |
161 (mem
->ctrl_ref
<< PHY_CON12_CTRL_REF_SHIFT
);
162 writel(val
, &phy0_ctrl
->phy_con12
);
163 writel(val
, &phy1_ctrl
->phy_con12
);
165 val
= PHY_CON2_RESET_VAL
;
166 val
|= INIT_DESKEW_EN
;
167 val
|= RDLVL_GATE_EN
;
168 writel(val
, &phy0_ctrl
->phy_con2
);
169 writel(val
, &phy1_ctrl
->phy_con2
);
171 val
= PHY_CON0_RESET_VAL
;
173 val
|= BYTE_RDLVL_EN
;
175 writel(val
, &phy0_ctrl
->phy_con0
);
176 writel(val
, &phy1_ctrl
->phy_con0
);
178 val
= PHY_CON1_RESET_VAL
;
179 val
&= ~(CTRL_GATEDURADJ_MASK
);
180 writel(val
, &phy0_ctrl
->phy_con1
);
181 writel(val
, &phy1_ctrl
->phy_con1
);
183 writel(CTRL_RDLVL_GATE_ENABLE
, &dmc
->rdlvl_config
);
185 while ((readl(&dmc
->phystatus
) &
186 (RDLVL_COMPLETE_CHO
| RDLVL_COMPLETE_CH1
)) !=
187 (RDLVL_COMPLETE_CHO
| RDLVL_COMPLETE_CH1
) && i
> 0) {
189 * TODO(waihong): Comment on how long this take to
196 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT
;
197 writel(CTRL_RDLVL_GATE_DISABLE
, &dmc
->rdlvl_config
);
199 writel(0, &phy0_ctrl
->phy_con14
);
200 writel(0, &phy1_ctrl
->phy_con14
);
202 val
= (mem
->ctrl_start_point
<<
203 PHY_CON12_CTRL_START_POINT_SHIFT
) |
204 (mem
->ctrl_inc
<< PHY_CON12_CTRL_INC_SHIFT
) |
205 (mem
->ctrl_force
<< PHY_CON12_CTRL_FORCE_SHIFT
) |
206 (mem
->ctrl_start
<< PHY_CON12_CTRL_START_SHIFT
) |
207 (mem
->ctrl_dll_on
<< PHY_CON12_CTRL_DLL_ON_SHIFT
) |
208 (mem
->ctrl_ref
<< PHY_CON12_CTRL_REF_SHIFT
);
209 writel(val
, &phy0_ctrl
->phy_con12
);
210 writel(val
, &phy1_ctrl
->phy_con12
);
212 update_reset_dll(&dmc
->phycontrol0
, DDR_MODE_DDR3
);
215 /* Send PALL command */
216 dmc_config_prech(mem
, &dmc
->directcmd
);
218 writel(mem
->memcontrol
, &dmc
->memcontrol
);
220 /* Set DMC Concontrol and enable auto-refresh counter */
221 writel(mem
->concontrol
| (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
)
222 | (mem
->aref_en
<< CONCONTROL_AREF_EN_SHIFT
), &dmc
->concontrol
);
227 #ifdef CONFIG_EXYNOS5420
229 * RAM address to use in the test.
231 * We'll use 4 words at this address and 4 at this address + 0x80 (Ares
232 * interleaves channels every 128 bytes). This will allow us to evaluate all of
233 * the chips in a 1 chip per channel (2GB) system and half the chips in a 2
234 * chip per channel (4GB) system. We can't test the 2nd chip since we need to
235 * do tests before the 2nd chip is enabled. Looking at the 2nd chip isn't
236 * critical because the 1st and 2nd chip have very similar timings (they'd
237 * better have similar timings, since there's only a single adjustment that is
238 * shared by both chips).
240 const unsigned int test_addr
= CONFIG_SYS_SDRAM_BASE
;
242 /* Test pattern with which RAM will be tested */
243 static const unsigned int test_pattern
[] = {
251 * This function is a test vector for sw read leveling,
252 * it compares the read data with the written data.
254 * @param ch DMC channel number
255 * @param byte_lane which DQS byte offset,
256 * possible values are 0,1,2,3
257 * @return TRUE if memory was good, FALSE if not.
259 static bool dmc_valid_window_test_vector(int ch
, int byte_lane
)
261 unsigned int read_data
;
265 mask
= 0xFF << (8 * byte_lane
);
267 for (i
= 0; i
< ARRAY_SIZE(test_pattern
); i
++) {
268 read_data
= readl(test_addr
+ i
* 4 + ch
* 0x80);
269 if ((read_data
& mask
) != (test_pattern
[i
] & mask
))
277 * This function returns current read offset value.
279 * @param phy_ctrl pointer to the current phy controller
281 static unsigned int dmc_get_read_offset_value(struct exynos5420_phy_control
284 return readl(&phy_ctrl
->phy_con4
);
288 * This function performs resync, so that slave DLL is updated.
290 * @param phy_ctrl pointer to the current phy controller
292 static void ddr_phy_set_do_resync(struct exynos5420_phy_control
*phy_ctrl
)
294 setbits_le32(&phy_ctrl
->phy_con10
, PHY_CON10_CTRL_OFFSETR3
);
295 clrbits_le32(&phy_ctrl
->phy_con10
, PHY_CON10_CTRL_OFFSETR3
);
299 * This function sets read offset value register with 'offset'.
301 * ...we also call call ddr_phy_set_do_resync().
303 * @param phy_ctrl pointer to the current phy controller
304 * @param offset offset to read DQS
306 static void dmc_set_read_offset_value(struct exynos5420_phy_control
*phy_ctrl
,
309 writel(offset
, &phy_ctrl
->phy_con4
);
310 ddr_phy_set_do_resync(phy_ctrl
);
314 * Convert a 2s complement byte to a byte with a sign bit.
316 * NOTE: you shouldn't use normal math on the number returned by this function.
317 * As an example, -10 = 0xf6. After this function -10 = 0x8a. If you wanted
318 * to do math and get the average of 10 and -10 (should be 0):
319 * 0x8a + 0xa = 0x94 (-108)
320 * 0x94 / 2 = 0xca (-54)
321 * ...and 0xca = sign bit plus 0x4a, or -74
323 * Also note that you lose the ability to represent -128 since there are two
324 * representations of 0.
326 * @param b The byte to convert in two's complement.
327 * @return The 7-bit value + sign bit.
330 unsigned char make_signed_byte(signed char b
)
339 * Test various shifts starting at 'start' and going to 'end'.
341 * For each byte lane, we'll walk through shift starting at 'start' and going
342 * to 'end' (inclusive). When we are finally able to read the test pattern
343 * we'll store the value in the results array.
345 * @param phy_ctrl pointer to the current phy controller
346 * @param ch channel number
347 * @param start the start shift. -127 to 127
348 * @param end the end shift. -127 to 127
349 * @param results we'll store results for each byte lane.
352 void test_shifts(struct exynos5420_phy_control
*phy_ctrl
, int ch
,
353 int start
, int end
, int results
[NUM_BYTE_LANES
])
355 int incr
= (start
< end
) ? 1 : -1;
358 for (byte_lane
= 0; byte_lane
< NUM_BYTE_LANES
; byte_lane
++) {
361 dmc_set_read_offset_value(phy_ctrl
, DEFAULT_DQS_X4
);
362 results
[byte_lane
] = DEFAULT_DQS
;
364 for (shift
= start
; shift
!= (end
+ incr
); shift
+= incr
) {
365 unsigned int byte_offsetr
;
366 unsigned int offsetr
;
368 byte_offsetr
= make_signed_byte(shift
);
370 offsetr
= dmc_get_read_offset_value(phy_ctrl
);
371 offsetr
&= ~(0xFF << (8 * byte_lane
));
372 offsetr
|= (byte_offsetr
<< (8 * byte_lane
));
373 dmc_set_read_offset_value(phy_ctrl
, offsetr
);
375 if (dmc_valid_window_test_vector(ch
, byte_lane
)) {
376 results
[byte_lane
] = shift
;
384 * This function performs SW read leveling to compensate DQ-DQS skew at
385 * receiver it first finds the optimal read offset value on each DQS
386 * then applies the value to PHY.
388 * Read offset value has its min margin and max margin. If read offset
389 * value exceeds its min or max margin, read data will have corruption.
390 * To avoid this we are doing sw read leveling.
392 * SW read leveling is:
393 * 1> Finding offset value's left_limit and right_limit
394 * 2> and calculate its center value
395 * 3> finally programs that center value to PHY
396 * 4> then PHY gets its optimal offset value.
398 * @param phy_ctrl pointer to the current phy controller
399 * @param ch channel number
400 * @param coarse_lock_val The coarse lock value read from PHY_CON13.
403 static void software_find_read_offset(struct exynos5420_phy_control
*phy_ctrl
,
404 int ch
, unsigned int coarse_lock_val
)
406 unsigned int offsetr_cent
;
410 int left
[NUM_BYTE_LANES
];
411 int right
[NUM_BYTE_LANES
];
414 /* Fill the memory with test patterns */
415 for (i
= 0; i
< ARRAY_SIZE(test_pattern
); i
++)
416 writel(test_pattern
[i
], test_addr
+ i
* 4 + ch
* 0x80);
418 /* Figure out the limits we'll test with; keep -127 < limit < 127 */
419 left_limit
= DEFAULT_DQS
- coarse_lock_val
;
420 right_limit
= DEFAULT_DQS
+ coarse_lock_val
;
421 if (right_limit
> 127)
424 /* Fill in the location where reads were OK from left and right */
425 test_shifts(phy_ctrl
, ch
, left_limit
, right_limit
, left
);
426 test_shifts(phy_ctrl
, ch
, right_limit
, left_limit
, right
);
428 /* Make a final value by taking the center between the left and right */
430 for (byte_lane
= 0; byte_lane
< NUM_BYTE_LANES
; byte_lane
++) {
434 temp_center
= (left
[byte_lane
] + right
[byte_lane
]) / 2;
435 vmwc
= make_signed_byte(temp_center
);
436 offsetr_cent
|= vmwc
<< (8 * byte_lane
);
438 dmc_set_read_offset_value(phy_ctrl
, offsetr_cent
);
441 int ddr3_mem_ctrl_init(struct mem_timings
*mem
, int reset
)
443 struct exynos5420_clock
*clk
=
444 (struct exynos5420_clock
*)samsung_get_base_clock();
445 struct exynos5420_power
*power
=
446 (struct exynos5420_power
*)samsung_get_base_power();
447 struct exynos5420_phy_control
*phy0_ctrl
, *phy1_ctrl
;
448 struct exynos5420_dmc
*drex0
, *drex1
;
449 struct exynos5420_tzasc
*tzasc0
, *tzasc1
;
450 struct exynos5_power
*pmu
;
451 uint32_t val
, n_lock_r
, n_lock_w_phy0
, n_lock_w_phy1
;
452 uint32_t lock0_info
, lock1_info
;
456 phy0_ctrl
= (struct exynos5420_phy_control
*)samsung_get_base_dmc_phy();
457 phy1_ctrl
= (struct exynos5420_phy_control
*)(samsung_get_base_dmc_phy()
459 drex0
= (struct exynos5420_dmc
*)samsung_get_base_dmc_ctrl();
460 drex1
= (struct exynos5420_dmc
*)(samsung_get_base_dmc_ctrl()
462 tzasc0
= (struct exynos5420_tzasc
*)samsung_get_base_dmc_tzasc();
463 tzasc1
= (struct exynos5420_tzasc
*)(samsung_get_base_dmc_tzasc()
465 pmu
= (struct exynos5_power
*)EXYNOS5420_POWER_BASE
;
467 if (CONFIG_NR_DRAM_BANKS
> 4) {
468 /* Need both controllers. */
469 mem
->memcontrol
|= DMC_MEMCONTROL_NUM_CHIP_2
;
470 mem
->chips_per_channel
= 2;
471 mem
->chips_to_configure
= 2;
473 /* 2GB requires a single controller */
474 mem
->memcontrol
|= DMC_MEMCONTROL_NUM_CHIP_1
;
477 /* Enable PAUSE for DREX */
478 setbits_le32(&clk
->pause
, ENABLE_BIT
);
480 /* Enable BYPASS mode */
481 setbits_le32(&clk
->bpll_con1
, BYPASS_EN
);
483 writel(MUX_BPLL_SEL_FOUTBPLL
, &clk
->src_cdrex
);
485 val
= readl(&clk
->mux_stat_cdrex
);
486 val
&= BPLL_SEL_MASK
;
487 } while (val
!= FOUTBPLL
);
489 clrbits_le32(&clk
->bpll_con1
, BYPASS_EN
);
491 /* Specify the DDR memory type as DDR3 */
492 val
= readl(&phy0_ctrl
->phy_con0
);
493 val
&= ~(PHY_CON0_CTRL_DDR_MODE_MASK
<< PHY_CON0_CTRL_DDR_MODE_SHIFT
);
494 val
|= (DDR_MODE_DDR3
<< PHY_CON0_CTRL_DDR_MODE_SHIFT
);
495 writel(val
, &phy0_ctrl
->phy_con0
);
497 val
= readl(&phy1_ctrl
->phy_con0
);
498 val
&= ~(PHY_CON0_CTRL_DDR_MODE_MASK
<< PHY_CON0_CTRL_DDR_MODE_SHIFT
);
499 val
|= (DDR_MODE_DDR3
<< PHY_CON0_CTRL_DDR_MODE_SHIFT
);
500 writel(val
, &phy1_ctrl
->phy_con0
);
502 /* Set Read Latency and Burst Length for PHY0 and PHY1 */
503 val
= (mem
->ctrl_bstlen
<< PHY_CON42_CTRL_BSTLEN_SHIFT
) |
504 (mem
->ctrl_rdlat
<< PHY_CON42_CTRL_RDLAT_SHIFT
);
505 writel(val
, &phy0_ctrl
->phy_con42
);
506 writel(val
, &phy1_ctrl
->phy_con42
);
508 val
= readl(&phy0_ctrl
->phy_con26
);
509 val
&= ~(T_WRDATA_EN_MASK
<< T_WRDATA_EN_OFFSET
);
510 val
|= (T_WRDATA_EN_DDR3
<< T_WRDATA_EN_OFFSET
);
511 writel(val
, &phy0_ctrl
->phy_con26
);
513 val
= readl(&phy1_ctrl
->phy_con26
);
514 val
&= ~(T_WRDATA_EN_MASK
<< T_WRDATA_EN_OFFSET
);
515 val
|= (T_WRDATA_EN_DDR3
<< T_WRDATA_EN_OFFSET
);
516 writel(val
, &phy1_ctrl
->phy_con26
);
519 * Set Driver strength for CK, CKE, CS & CA to 0x7
520 * Set Driver strength for Data Slice 0~3 to 0x7
522 val
= (0x7 << CA_CK_DRVR_DS_OFFSET
) | (0x7 << CA_CKE_DRVR_DS_OFFSET
) |
523 (0x7 << CA_CS_DRVR_DS_OFFSET
) | (0x7 << CA_ADR_DRVR_DS_OFFSET
);
524 val
|= (0x7 << DA_3_DS_OFFSET
) | (0x7 << DA_2_DS_OFFSET
) |
525 (0x7 << DA_1_DS_OFFSET
) | (0x7 << DA_0_DS_OFFSET
);
526 writel(val
, &phy0_ctrl
->phy_con39
);
527 writel(val
, &phy1_ctrl
->phy_con39
);
530 if (dmc_config_zq(mem
, &phy0_ctrl
->phy_con16
, &phy1_ctrl
->phy_con16
,
531 &phy0_ctrl
->phy_con17
, &phy1_ctrl
->phy_con17
))
532 return SETUP_ERR_ZQ_CALIBRATION_FAILURE
;
534 clrbits_le32(&phy0_ctrl
->phy_con16
, ZQ_CLK_DIV_EN
);
535 clrbits_le32(&phy1_ctrl
->phy_con16
, ZQ_CLK_DIV_EN
);
538 val
= readl(&phy0_ctrl
->phy_con14
);
539 val
|= mem
->phy0_pulld_dqs
;
540 writel(val
, &phy0_ctrl
->phy_con14
);
541 val
= readl(&phy1_ctrl
->phy_con14
);
542 val
|= mem
->phy1_pulld_dqs
;
543 writel(val
, &phy1_ctrl
->phy_con14
);
545 val
= MEM_TERM_EN
| PHY_TERM_EN
;
546 writel(val
, &drex0
->phycontrol0
);
547 writel(val
, &drex1
->phycontrol0
);
549 writel(mem
->concontrol
|
550 (mem
->dfi_init_start
<< CONCONTROL_DFI_INIT_START_SHIFT
) |
551 (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
),
553 writel(mem
->concontrol
|
554 (mem
->dfi_init_start
<< CONCONTROL_DFI_INIT_START_SHIFT
) |
555 (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
),
559 val
= readl(&drex0
->phystatus
);
560 } while ((val
& DFI_INIT_COMPLETE
) != DFI_INIT_COMPLETE
);
562 val
= readl(&drex1
->phystatus
);
563 } while ((val
& DFI_INIT_COMPLETE
) != DFI_INIT_COMPLETE
);
565 clrbits_le32(&drex0
->concontrol
, DFI_INIT_START
);
566 clrbits_le32(&drex1
->concontrol
, DFI_INIT_START
);
568 update_reset_dll(&drex0
->phycontrol0
, DDR_MODE_DDR3
);
569 update_reset_dll(&drex1
->phycontrol0
, DDR_MODE_DDR3
);
573 * 0x2000_0000 ~ 0x5FFF_FFFF
574 * 0x6000_0000 ~ 0x9FFF_FFFF
577 val
= DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_0
) |
578 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK
);
579 writel(val
, &tzasc0
->membaseconfig0
);
580 writel(val
, &tzasc1
->membaseconfig0
);
583 val
= DMC_MEMBASECONFIGX_CHIP_BASE(DMC_CHIP_BASE_1
) |
584 DMC_MEMBASECONFIGX_CHIP_MASK(DMC_CHIP_MASK
);
585 writel(val
, &tzasc0
->membaseconfig1
);
586 writel(val
, &tzasc1
->membaseconfig1
);
589 * Memory Channel Inteleaving Size
590 * Ares Channel interleaving = 128 bytes
593 writel(mem
->memconfig
, &tzasc0
->memconfig0
);
594 writel(mem
->memconfig
, &tzasc1
->memconfig0
);
595 writel(mem
->memconfig
, &tzasc0
->memconfig1
);
596 writel(mem
->memconfig
, &tzasc1
->memconfig1
);
598 /* Precharge Configuration */
599 writel(mem
->prechconfig_tp_cnt
<< PRECHCONFIG_TP_CNT_SHIFT
,
600 &drex0
->prechconfig0
);
601 writel(mem
->prechconfig_tp_cnt
<< PRECHCONFIG_TP_CNT_SHIFT
,
602 &drex1
->prechconfig0
);
605 * TimingRow, TimingData, TimingPower and Timingaref
606 * values as per Memory AC parameters
608 writel(mem
->timing_ref
, &drex0
->timingref
);
609 writel(mem
->timing_ref
, &drex1
->timingref
);
610 writel(mem
->timing_row
, &drex0
->timingrow0
);
611 writel(mem
->timing_row
, &drex1
->timingrow0
);
612 writel(mem
->timing_data
, &drex0
->timingdata0
);
613 writel(mem
->timing_data
, &drex1
->timingdata0
);
614 writel(mem
->timing_power
, &drex0
->timingpower0
);
615 writel(mem
->timing_power
, &drex1
->timingpower0
);
619 * Send NOP, MRS and ZQINIT commands
620 * Sending MRS command will reset the DRAM. We should not be
621 * reseting the DRAM after resume, this will lead to memory
622 * corruption as DRAM content is lost after DRAM reset
624 dmc_config_mrs(mem
, &drex0
->directcmd
);
625 dmc_config_mrs(mem
, &drex1
->directcmd
);
629 * Get PHY_CON13 from both phys. Gate CLKM around reading since
630 * PHY_CON13 is glitchy when CLKM is running. We're paranoid and
631 * wait until we get a "fine lock", though a coarse lock is probably
632 * OK (we only use the coarse numbers below). We try to gate the
633 * clock for as short a time as possible in case SDRAM is somehow
634 * sensitive. sdelay(10) in the loop is arbitrary to make sure
635 * there is some time for PHY_CON13 to get updated. In practice
636 * no delay appears to be needed.
638 val
= readl(&clk
->gate_bus_cdrex
);
640 writel(val
& ~0x1, &clk
->gate_bus_cdrex
);
641 lock0_info
= readl(&phy0_ctrl
->phy_con13
);
642 writel(val
, &clk
->gate_bus_cdrex
);
644 if ((lock0_info
& CTRL_FINE_LOCKED
) == CTRL_FINE_LOCKED
)
650 writel(val
& ~0x2, &clk
->gate_bus_cdrex
);
651 lock1_info
= readl(&phy1_ctrl
->phy_con13
);
652 writel(val
, &clk
->gate_bus_cdrex
);
654 if ((lock1_info
& CTRL_FINE_LOCKED
) == CTRL_FINE_LOCKED
)
662 * During Suspend-Resume & S/W-Reset, as soon as PMU releases
663 * pad retention, CKE goes high. This causes memory contents
664 * not to be retained during DRAM initialization. Therfore,
665 * there is a new control register(0x100431e8[28]) which lets us
666 * release pad retention and retain the memory content until the
667 * initialization is complete.
669 writel(PAD_RETENTION_DRAM_COREBLK_VAL
,
670 &power
->pad_retention_dram_coreblk_option
);
672 val
= readl(&power
->pad_retention_dram_status
);
673 } while (val
!= 0x1);
676 * CKE PAD retention disables DRAM self-refresh mode.
677 * Send auto refresh command for DRAM refresh.
679 for (i
= 0; i
< 128; i
++) {
680 for (chip
= 0; chip
< mem
->chips_to_configure
; chip
++) {
681 writel(DIRECT_CMD_REFA
|
682 (chip
<< DIRECT_CMD_CHIP_SHIFT
),
684 writel(DIRECT_CMD_REFA
|
685 (chip
<< DIRECT_CMD_CHIP_SHIFT
),
691 if (mem
->gate_leveling_enable
) {
692 writel(PHY_CON0_RESET_VAL
, &phy0_ctrl
->phy_con0
);
693 writel(PHY_CON0_RESET_VAL
, &phy1_ctrl
->phy_con0
);
695 setbits_le32(&phy0_ctrl
->phy_con0
, P0_CMD_EN
);
696 setbits_le32(&phy1_ctrl
->phy_con0
, P0_CMD_EN
);
698 val
= PHY_CON2_RESET_VAL
;
699 val
|= INIT_DESKEW_EN
;
700 writel(val
, &phy0_ctrl
->phy_con2
);
701 writel(val
, &phy1_ctrl
->phy_con2
);
703 val
= readl(&phy0_ctrl
->phy_con1
);
704 val
|= (RDLVL_PASS_ADJ_VAL
<< RDLVL_PASS_ADJ_OFFSET
);
705 writel(val
, &phy0_ctrl
->phy_con1
);
707 val
= readl(&phy1_ctrl
->phy_con1
);
708 val
|= (RDLVL_PASS_ADJ_VAL
<< RDLVL_PASS_ADJ_OFFSET
);
709 writel(val
, &phy1_ctrl
->phy_con1
);
711 n_lock_w_phy0
= (lock0_info
& CTRL_LOCK_COARSE_MASK
) >> 2;
712 n_lock_r
= readl(&phy0_ctrl
->phy_con12
);
713 n_lock_r
&= ~CTRL_DLL_ON
;
714 n_lock_r
|= n_lock_w_phy0
;
715 writel(n_lock_r
, &phy0_ctrl
->phy_con12
);
717 n_lock_w_phy1
= (lock1_info
& CTRL_LOCK_COARSE_MASK
) >> 2;
718 n_lock_r
= readl(&phy1_ctrl
->phy_con12
);
719 n_lock_r
&= ~CTRL_DLL_ON
;
720 n_lock_r
|= n_lock_w_phy1
;
721 writel(n_lock_r
, &phy1_ctrl
->phy_con12
);
723 val
= (0x3 << DIRECT_CMD_BANK_SHIFT
) | 0x4;
724 for (chip
= 0; chip
< mem
->chips_to_configure
; chip
++) {
725 writel(val
| (chip
<< DIRECT_CMD_CHIP_SHIFT
),
727 writel(val
| (chip
<< DIRECT_CMD_CHIP_SHIFT
),
731 setbits_le32(&phy0_ctrl
->phy_con2
, RDLVL_GATE_EN
);
732 setbits_le32(&phy1_ctrl
->phy_con2
, RDLVL_GATE_EN
);
734 setbits_le32(&phy0_ctrl
->phy_con0
, CTRL_SHGATE
);
735 setbits_le32(&phy1_ctrl
->phy_con0
, CTRL_SHGATE
);
737 val
= readl(&phy0_ctrl
->phy_con1
);
738 val
&= ~(CTRL_GATEDURADJ_MASK
);
739 writel(val
, &phy0_ctrl
->phy_con1
);
741 val
= readl(&phy1_ctrl
->phy_con1
);
742 val
&= ~(CTRL_GATEDURADJ_MASK
);
743 writel(val
, &phy1_ctrl
->phy_con1
);
745 writel(CTRL_RDLVL_GATE_ENABLE
, &drex0
->rdlvl_config
);
747 while (((readl(&drex0
->phystatus
) & RDLVL_COMPLETE_CHO
) !=
748 RDLVL_COMPLETE_CHO
) && (i
> 0)) {
750 * TODO(waihong): Comment on how long this take to
757 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT
;
758 writel(CTRL_RDLVL_GATE_DISABLE
, &drex0
->rdlvl_config
);
760 writel(CTRL_RDLVL_GATE_ENABLE
, &drex1
->rdlvl_config
);
762 while (((readl(&drex1
->phystatus
) & RDLVL_COMPLETE_CHO
) !=
763 RDLVL_COMPLETE_CHO
) && (i
> 0)) {
765 * TODO(waihong): Comment on how long this take to
772 return SETUP_ERR_RDLV_COMPLETE_TIMEOUT
;
773 writel(CTRL_RDLVL_GATE_DISABLE
, &drex1
->rdlvl_config
);
775 writel(0, &phy0_ctrl
->phy_con14
);
776 writel(0, &phy1_ctrl
->phy_con14
);
778 val
= (0x3 << DIRECT_CMD_BANK_SHIFT
);
779 for (chip
= 0; chip
< mem
->chips_to_configure
; chip
++) {
780 writel(val
| (chip
<< DIRECT_CMD_CHIP_SHIFT
),
782 writel(val
| (chip
<< DIRECT_CMD_CHIP_SHIFT
),
786 /* Common Settings for Leveling */
787 val
= PHY_CON12_RESET_VAL
;
788 writel((val
+ n_lock_w_phy0
), &phy0_ctrl
->phy_con12
);
789 writel((val
+ n_lock_w_phy1
), &phy1_ctrl
->phy_con12
);
791 setbits_le32(&phy0_ctrl
->phy_con2
, DLL_DESKEW_EN
);
792 setbits_le32(&phy1_ctrl
->phy_con2
, DLL_DESKEW_EN
);
796 * Do software read leveling
798 * Do this before we turn on auto refresh since the auto refresh can
799 * be in conflict with the resync operation that's part of setting
803 /* restore calibrated value after resume */
804 dmc_set_read_offset_value(phy0_ctrl
, readl(&pmu
->pmu_spare1
));
805 dmc_set_read_offset_value(phy1_ctrl
, readl(&pmu
->pmu_spare2
));
807 software_find_read_offset(phy0_ctrl
, 0,
808 CTRL_LOCK_COARSE(lock0_info
));
809 software_find_read_offset(phy1_ctrl
, 1,
810 CTRL_LOCK_COARSE(lock1_info
));
811 /* save calibrated value to restore after resume */
812 writel(dmc_get_read_offset_value(phy0_ctrl
), &pmu
->pmu_spare1
);
813 writel(dmc_get_read_offset_value(phy1_ctrl
), &pmu
->pmu_spare2
);
816 /* Send PALL command */
817 dmc_config_prech(mem
, &drex0
->directcmd
);
818 dmc_config_prech(mem
, &drex1
->directcmd
);
820 writel(mem
->memcontrol
, &drex0
->memcontrol
);
821 writel(mem
->memcontrol
, &drex1
->memcontrol
);
824 * Set DMC Concontrol: Enable auto-refresh counter, provide
825 * read data fetch cycles and enable DREX auto set powerdown
826 * for input buffer of I/O in none read memory state.
828 writel(mem
->concontrol
| (mem
->aref_en
<< CONCONTROL_AREF_EN_SHIFT
) |
829 (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
)|
830 DMC_CONCONTROL_IO_PD_CON(0x2),
832 writel(mem
->concontrol
| (mem
->aref_en
<< CONCONTROL_AREF_EN_SHIFT
) |
833 (mem
->rd_fetch
<< CONCONTROL_RD_FETCH_SHIFT
)|
834 DMC_CONCONTROL_IO_PD_CON(0x2),
838 * Enable Clock Gating Control for DMC
839 * this saves around 25 mw dmc power as compared to the power
840 * consumption without these bits enabled
842 setbits_le32(&drex0
->cgcontrol
, DMC_INTERNAL_CG
);
843 setbits_le32(&drex1
->cgcontrol
, DMC_INTERNAL_CG
);
846 * As per Exynos5800 UM ver 0.00 section 17.13.2.1
847 * CONCONTROL register bit 3 [update_mode], Exynos5800 does not
848 * support the PHY initiated update. And it is recommended to set
849 * this field to 1'b1 during initialization
851 * When we apply PHY-initiated mode, DLL lock value is determined
852 * once at DMC init time and not updated later when we change the MIF
853 * voltage based on ASV group in kernel. Applying MC-initiated mode
854 * makes sure that DLL tracing is ON so that silicon is able to
855 * compensate the voltage variation.
857 val
= readl(&drex0
->concontrol
);
858 val
|= CONCONTROL_UPDATE_MODE
;
859 writel(val
, &drex0
->concontrol
);
860 val
= readl(&drex1
->concontrol
);
861 val
|= CONCONTROL_UPDATE_MODE
;
862 writel(val
, &drex1
->concontrol
);