2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
14 #include "ddr3_init.h"
15 #include "ddr3_hw_training.h"
19 #include "ddr3_patterns_64bit.h"
21 #include "ddr3_patterns_16bit.h"
22 #if defined(MV88F672X)
23 #include "ddr3_patterns_16bit.h"
31 #define DEBUG_MAIN_C(s, d, l) \
32 DEBUG_MAIN_S(s); DEBUG_MAIN_D(d, l); DEBUG_MAIN_S("\n")
33 #define DEBUG_MAIN_FULL_C(s, d, l) \
34 DEBUG_MAIN_FULL_S(s); DEBUG_MAIN_FULL_D(d, l); DEBUG_MAIN_FULL_S("\n")
37 #define DEBUG_MAIN_S(s) puts(s)
38 #define DEBUG_MAIN_D(d, l) printf("%x", d)
40 #define DEBUG_MAIN_S(s)
41 #define DEBUG_MAIN_D(d, l)
44 #ifdef MV_DEBUG_MAIN_FULL
45 #define DEBUG_MAIN_FULL_S(s) puts(s)
46 #define DEBUG_MAIN_FULL_D(d, l) printf("%x", d)
48 #define DEBUG_MAIN_FULL_S(s)
49 #define DEBUG_MAIN_FULL_D(d, l)
52 #ifdef MV_DEBUG_SUSPEND_RESUME
53 #define DEBUG_SUSPEND_RESUME_S(s) puts(s)
54 #define DEBUG_SUSPEND_RESUME_D(d, l) printf("%x", d)
56 #define DEBUG_SUSPEND_RESUME_S(s)
57 #define DEBUG_SUSPEND_RESUME_D(d, l)
60 static u32 ddr3_sw_wl_rl_debug
;
61 static u32 ddr3_run_pbs
= 1;
63 void ddr3_print_version(void)
65 puts("DDR3 Training Sequence - Ver 5.7.");
68 void ddr3_set_sw_wl_rl_debug(u32 val
)
70 ddr3_sw_wl_rl_debug
= val
;
73 void ddr3_set_pbs(u32 val
)
78 int ddr3_hw_training(u32 target_freq
, u32 ddr_width
, int xor_bypass
,
79 u32 scrub_offs
, u32 scrub_size
, int dqs_clk_aligned
,
80 int debug_mode
, int reg_dimm_skip_wl
)
82 /* A370 has no PBS mechanism */
83 __maybe_unused u32 first_loop_flag
= 0;
85 MV_DRAM_INFO dram_info
;
91 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 1\n");
93 memset(&dram_info
, 0, sizeof(dram_info
));
94 dram_info
.num_cs
= ddr3_get_cs_num_from_reg();
95 dram_info
.cs_ena
= ddr3_get_cs_ena_from_reg();
96 dram_info
.target_frequency
= target_freq
;
97 dram_info
.ddr_width
= ddr_width
;
98 dram_info
.num_of_std_pups
= ddr_width
/ PUP_SIZE
;
99 dram_info
.rl400_bug
= 0;
100 dram_info
.multi_cs_mr_support
= 0;
102 dram_info
.rl400_bug
= 1;
105 /* Ignore ECC errors - if ECC is enabled */
106 reg
= reg_read(REG_SDRAM_CONFIG_ADDR
);
107 if (reg
& (1 << REG_SDRAM_CONFIG_ECC_OFFS
)) {
108 dram_info
.ecc_ena
= 1;
109 reg
|= (1 << REG_SDRAM_CONFIG_IERR_OFFS
);
110 reg_write(REG_SDRAM_CONFIG_ADDR
, reg
);
112 dram_info
.ecc_ena
= 0;
115 reg
= reg_read(REG_SDRAM_CONFIG_ADDR
);
116 if (reg
& (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS
))
117 dram_info
.reg_dimm
= 1;
119 dram_info
.reg_dimm
= 0;
121 dram_info
.num_of_total_pups
= ddr_width
/ PUP_SIZE
+ dram_info
.ecc_ena
;
123 /* Get target 2T value */
124 reg
= reg_read(REG_DUNIT_CTRL_LOW_ADDR
);
125 dram_info
.mode_2t
= (reg
>> REG_DUNIT_CTRL_LOW_2T_OFFS
) &
126 REG_DUNIT_CTRL_LOW_2T_MASK
;
128 /* Get target CL value */
130 reg
= reg_read(REG_DDR3_MR0_ADDR
) >> 2;
132 reg
= reg_read(REG_DDR3_MR0_CS_ADDR
) >> 2;
135 reg
= (((reg
>> 1) & 0xE) | (reg
& 0x1)) & 0xF;
136 dram_info
.cl
= ddr3_valid_cl_to_cl(reg
);
138 /* Get target CWL value */
140 reg
= reg_read(REG_DDR3_MR2_ADDR
) >> REG_DDR3_MR2_CWL_OFFS
;
142 reg
= reg_read(REG_DDR3_MR2_CS_ADDR
) >> REG_DDR3_MR2_CWL_OFFS
;
145 reg
&= REG_DDR3_MR2_CWL_MASK
;
147 #if !defined(MV88F67XX)
148 /* A370 has no PBS mechanism */
149 #if defined(MV88F78X60)
150 if ((dram_info
.target_frequency
> DDR_400
) && (ddr3_run_pbs
))
153 /* first_loop_flag = 1; skip mid freq at ALP/A375 */
154 if ((dram_info
.target_frequency
> DDR_400
) && (ddr3_run_pbs
) &&
155 (mv_ctrl_revision_get() >= UMC_A0
))
162 freq
= dram_info
.target_frequency
;
164 /* Set ODT to always on */
165 ddr3_odt_activate(1);
168 mv_sys_xor_init(&dram_info
);
170 /* Get DRAM/HCLK ratio */
171 if (reg_read(REG_DDR_IO_ADDR
) & (1 << REG_DDR_IO_CLK_RATIO_OFFS
))
175 * Xor Bypass - ECC support in AXP is currently available for 1:1
176 * modes frequency modes.
177 * Not all frequency modes support the ddr3 training sequence
179 * Xor Bypass allows using the Xor initializations and scrubbing
180 * inside the ddr3 training sequence without running the training
183 if (xor_bypass
== 0) {
185 DEBUG_MAIN_S("DDR3 Training Sequence - Run with PBS.\n");
187 DEBUG_MAIN_S("DDR3 Training Sequence - Run without PBS.\n");
190 if (dram_info
.target_frequency
> DFS_MARGIN
) {
194 if (dram_info
.reg_dimm
== 1)
197 if (MV_OK
!= ddr3_dfs_high_2_low(freq
, &dram_info
)) {
198 /* Set low - 100Mhz DDR Frequency by HW */
199 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs High2Low)\n");
200 return MV_DDR3_TRAINING_ERR_DFS_H2L
;
203 if ((dram_info
.reg_dimm
== 1) &&
204 (reg_dimm_skip_wl
== 0)) {
206 ddr3_write_leveling_hw_reg_dimm(freq
,
208 DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM Low WL - SKIP\n");
211 if (ddr3_get_log_level() >= MV_LOG_LEVEL_1
)
212 ddr3_print_freq(freq
);
215 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 2\n");
217 if (!dqs_clk_aligned
) {
220 * If running training sequence without DFS,
221 * we must run Write leveling before writing
226 * ODT - Multi CS system use SW WL,
227 * Single CS System use HW WL
229 if (dram_info
.cs_ena
> 1) {
231 ddr3_write_leveling_sw(
234 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
235 return MV_DDR3_TRAINING_ERR_WR_LVL_SW
;
239 ddr3_write_leveling_hw(freq
,
241 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
242 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
246 if (MV_OK
!= ddr3_write_leveling_hw(
248 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
249 if (ddr3_sw_wl_rl_debug
) {
251 ddr3_write_leveling_sw(
254 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
255 return MV_DDR3_TRAINING_ERR_WR_LVL_SW
;
258 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
265 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 3\n");
268 if (MV_OK
!= ddr3_load_patterns(&dram_info
, 0)) {
269 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
270 return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS
;
275 * The mainline U-Boot port of the bin_hdr DDR training code
276 * needs a delay of minimum 20ms here (10ms is a bit too short
277 * and the CPU hangs). The bin_hdr code doesn't have this delay.
278 * To be save here, lets add a delay of 50ms here.
280 * Tested on the Marvell DB-MV784MP-GP board
285 freq
= dram_info
.target_frequency
;
286 tmp_ratio
= ratio_2to1
;
287 DEBUG_MAIN_FULL_S("DDR3 Training Sequence - DEBUG - 4\n");
289 #if defined(MV88F78X60)
291 * There is a difference on the DFS frequency at the
292 * first iteration of this loop
294 if (first_loop_flag
) {
300 if (MV_OK
!= ddr3_dfs_low_2_high(freq
, tmp_ratio
,
302 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
303 return MV_DDR3_TRAINING_ERR_DFS_H2L
;
306 if (ddr3_get_log_level() >= MV_LOG_LEVEL_1
) {
307 ddr3_print_freq(freq
);
311 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 5\n");
314 if (!dqs_clk_aligned
) {
317 * ODT - Multi CS system that not support Multi
318 * CS MRS commands must use SW WL
320 if (dram_info
.cs_ena
> 1) {
321 if (MV_OK
!= ddr3_write_leveling_sw(
322 freq
, tmp_ratio
, &dram_info
)) {
323 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
324 return MV_DDR3_TRAINING_ERR_WR_LVL_SW
;
327 if (MV_OK
!= ddr3_write_leveling_hw(
329 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
330 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
334 if ((dram_info
.reg_dimm
== 1) &&
336 if (reg_dimm_skip_wl
== 0) {
337 if (MV_OK
!= ddr3_write_leveling_hw_reg_dimm(
339 DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM WL - SKIP\n");
342 if (MV_OK
!= ddr3_write_leveling_hw(
344 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
345 if (ddr3_sw_wl_rl_debug
) {
346 if (MV_OK
!= ddr3_write_leveling_sw(
347 freq
, tmp_ratio
, &dram_info
)) {
348 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
349 return MV_DDR3_TRAINING_ERR_WR_LVL_SW
;
352 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
359 ("DDR3 Training Sequence - DEBUG - 6\n");
364 * Armada 370 - Support for HCLK @ 400MHZ - must use
367 if (freq
== DDR_400
&& dram_info
.rl400_bug
) {
368 status
= ddr3_read_leveling_sw(freq
, tmp_ratio
,
370 if (MV_OK
!= status
) {
372 ("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
376 if (MV_OK
!= ddr3_read_leveling_hw(
378 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
379 if (ddr3_sw_wl_rl_debug
) {
380 if (MV_OK
!= ddr3_read_leveling_sw(
383 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
384 return MV_DDR3_TRAINING_ERR_WR_LVL_SW
;
387 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
393 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 7\n");
395 if (MV_OK
!= ddr3_wl_supplement(&dram_info
)) {
396 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hi-Freq Sup)\n");
397 return MV_DDR3_TRAINING_ERR_WR_LVL_HI_FREQ
;
401 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 8\n");
402 #if !defined(MV88F67XX)
403 /* A370 has no PBS mechanism */
404 #if defined(MV88F78X60) || defined(MV88F672X)
405 if (first_loop_flag
== 1) {
409 status
= ddr3_pbs_rx(&dram_info
);
410 if (MV_OK
!= status
) {
411 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS RX)\n");
416 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 9\n");
418 status
= ddr3_pbs_tx(&dram_info
);
419 if (MV_OK
!= status
) {
420 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS TX)\n");
425 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 10\n");
429 } while (freq
!= dram_info
.target_frequency
);
431 status
= ddr3_dqs_centralization_rx(&dram_info
);
432 if (MV_OK
!= status
) {
433 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization RX)\n");
438 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 11\n");
440 status
= ddr3_dqs_centralization_tx(&dram_info
);
441 if (MV_OK
!= status
) {
442 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization TX)\n");
447 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 12\n");
450 ddr3_set_performance_params(&dram_info
);
452 if (dram_info
.ecc_ena
) {
453 /* Need to SCRUB the DRAM memory area to load U-boot */
455 dram_info
.num_cs
= 1;
456 dram_info
.cs_ena
= 1;
457 mv_sys_xor_init(&dram_info
);
458 mv_xor_mem_init(0, scrub_offs
, scrub_size
, 0xdeadbeef,
461 /* Wait for previous transfer completion */
462 while (mv_xor_state_get(0) != MV_IDLE
)
466 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 13\n");
469 /* Return XOR State */
472 #if defined(MV88F78X60)
473 /* Save training results in memeory for resume state */
474 ddr3_save_training(&dram_info
);
476 /* Clear ODT always on */
477 ddr3_odt_activate(0);
479 /* Configure Dynamic read ODT */
480 ddr3_odt_read_dynamic_config(&dram_info
);
485 void ddr3_set_performance_params(MV_DRAM_INFO
*dram_info
)
487 u32 twr2wr
, trd2rd
, trd2wr_wr2rd
;
490 DEBUG_MAIN_FULL_C("Max WL Phase: ", dram_info
->wl_max_phase
, 2);
491 DEBUG_MAIN_FULL_C("Min WL Phase: ", dram_info
->wl_min_phase
, 2);
492 DEBUG_MAIN_FULL_C("Max RL Phase: ", dram_info
->rl_max_phase
, 2);
493 DEBUG_MAIN_FULL_C("Min RL Phase: ", dram_info
->rl_min_phase
, 2);
495 if (dram_info
->wl_max_phase
< 2)
500 trd2rd
= 0x1 + (dram_info
->rl_max_phase
+ 1) / 2 +
501 (dram_info
->rl_max_phase
+ 1) % 2;
503 tmp1
= (dram_info
->rl_max_phase
- dram_info
->wl_min_phase
) / 2 +
504 (((dram_info
->rl_max_phase
- dram_info
->wl_min_phase
) % 2) >
506 tmp2
= (dram_info
->wl_max_phase
- dram_info
->rl_min_phase
) / 2 +
507 ((dram_info
->wl_max_phase
- dram_info
->rl_min_phase
) % 2 >
509 trd2wr_wr2rd
= (tmp1
>= tmp2
) ? tmp1
: tmp2
;
515 DEBUG_MAIN_FULL_C("WR 2 WR: ", twr2wr
, 2);
516 DEBUG_MAIN_FULL_C("RD 2 RD: ", trd2rd
, 2);
517 DEBUG_MAIN_FULL_C("RD 2 WR / WR 2 RD: ", trd2wr_wr2rd
, 2);
519 reg
= reg_read(REG_SDRAM_TIMING_HIGH_ADDR
);
521 reg
&= ~(REG_SDRAM_TIMING_H_W2W_MASK
<< REG_SDRAM_TIMING_H_W2W_OFFS
);
522 reg
|= ((twr2wr
& REG_SDRAM_TIMING_H_W2W_MASK
) <<
523 REG_SDRAM_TIMING_H_W2W_OFFS
);
525 reg
&= ~(REG_SDRAM_TIMING_H_R2R_MASK
<< REG_SDRAM_TIMING_H_R2R_OFFS
);
526 reg
&= ~(REG_SDRAM_TIMING_H_R2R_H_MASK
<<
527 REG_SDRAM_TIMING_H_R2R_H_OFFS
);
528 reg
|= ((trd2rd
& REG_SDRAM_TIMING_H_R2R_MASK
) <<
529 REG_SDRAM_TIMING_H_R2R_OFFS
);
530 reg
|= (((trd2rd
>> 2) & REG_SDRAM_TIMING_H_R2R_H_MASK
) <<
531 REG_SDRAM_TIMING_H_R2R_H_OFFS
);
533 reg
&= ~(REG_SDRAM_TIMING_H_R2W_W2R_MASK
<<
534 REG_SDRAM_TIMING_H_R2W_W2R_OFFS
);
535 reg
&= ~(REG_SDRAM_TIMING_H_R2W_W2R_H_MASK
<<
536 REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS
);
537 reg
|= ((trd2wr_wr2rd
& REG_SDRAM_TIMING_H_R2W_W2R_MASK
) <<
538 REG_SDRAM_TIMING_H_R2W_W2R_OFFS
);
539 reg
|= (((trd2wr_wr2rd
>> 2) & REG_SDRAM_TIMING_H_R2W_W2R_H_MASK
) <<
540 REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS
);
542 reg_write(REG_SDRAM_TIMING_HIGH_ADDR
, reg
);
546 * Perform DDR3 PUP Indirect Write
548 void ddr3_write_pup_reg(u32 mode
, u32 cs
, u32 pup
, u32 phase
, u32 delay
)
553 reg
|= (1 << REG_PHY_BC_OFFS
);
555 reg
|= (pup
<< REG_PHY_PUP_OFFS
);
557 reg
|= ((0x4 * cs
+ mode
) << REG_PHY_CS_OFFS
);
558 reg
|= (phase
<< REG_PHY_PHASE_OFFS
) | delay
;
560 if (mode
== PUP_WL_MODE
)
561 reg
|= ((INIT_WL_DELAY
+ delay
) << REG_PHY_DQS_REF_DLY_OFFS
);
563 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, reg
); /* 0x16A0 */
564 reg
|= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR
;
565 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, reg
); /* 0x16A0 */
568 reg
= reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
) &
569 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE
;
570 } while (reg
); /* Wait for '0' to mark the end of the transaction */
572 /* If read Leveling mode - need to write to register 3 separetly */
573 if (mode
== PUP_RL_MODE
) {
577 reg
|= (1 << REG_PHY_BC_OFFS
);
579 reg
|= (pup
<< REG_PHY_PUP_OFFS
);
581 reg
|= ((0x4 * cs
+ mode
+ 1) << REG_PHY_CS_OFFS
);
582 reg
|= (INIT_RL_DELAY
);
584 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, reg
); /* 0x16A0 */
585 reg
|= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR
;
586 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, reg
); /* 0x16A0 */
589 reg
= reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
) &
590 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE
;
596 * Perform DDR3 PUP Indirect Read
598 u32
ddr3_read_pup_reg(u32 mode
, u32 cs
, u32 pup
)
602 reg
= (pup
<< REG_PHY_PUP_OFFS
) |
603 ((0x4 * cs
+ mode
) << REG_PHY_CS_OFFS
);
604 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, reg
); /* 0x16A0 */
606 reg
|= REG_PHY_REGISTRY_FILE_ACCESS_OP_RD
;
607 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, reg
); /* 0x16A0 */
610 reg
= reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
) &
611 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE
;
612 } while (reg
); /* Wait for '0' to mark the end of the transaction */
614 return reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
); /* 0x16A0 */
618 * Set training patterns
620 int ddr3_load_patterns(MV_DRAM_INFO
*dram_info
, int resume
)
624 /* Enable SW override - Required for the ECC Pup */
625 reg
= reg_read(REG_DRAM_TRAINING_2_ADDR
) |
626 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS
);
628 /* [0] = 1 - Enable SW override */
629 /* 0x15B8 - Training SW 2 Register */
630 reg_write(REG_DRAM_TRAINING_2_ADDR
, reg
);
632 reg
= (1 << REG_DRAM_TRAINING_AUTO_OFFS
);
633 reg_write(REG_DRAM_TRAINING_ADDR
, reg
); /* 0x15B0 - Training Register */
636 #if defined(MV88F78X60) || defined(MV88F672X)
637 ddr3_load_pbs_patterns(dram_info
);
639 ddr3_load_dqs_patterns(dram_info
);
642 /* Disable SW override - Must be in a different stage */
643 /* [0]=0 - Enable SW override */
644 reg
= reg_read(REG_DRAM_TRAINING_2_ADDR
);
645 reg
&= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS
);
646 /* 0x15B8 - Training SW 2 Register */
647 reg_write(REG_DRAM_TRAINING_2_ADDR
, reg
);
649 reg
= reg_read(REG_DRAM_TRAINING_1_ADDR
) |
650 (1 << REG_DRAM_TRAINING_1_TRNBPOINT_OFFS
);
651 reg_write(REG_DRAM_TRAINING_1_ADDR
, reg
);
654 #if defined(MV88F67XX)
655 reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR
, 0);
658 reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR
, 0);
660 reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR
,
661 RESUME_RL_PATTERNS_ADDR
);
666 reg
= (dram_info
->cs_ena
<< REG_DRAM_TRAINING_CS_OFFS
) |
667 (1 << REG_DRAM_TRAINING_PATTERNS_OFFS
);
669 reg
= (0x1 << REG_DRAM_TRAINING_CS_OFFS
) |
670 (1 << REG_DRAM_TRAINING_PATTERNS_OFFS
);
673 reg
|= (1 << REG_DRAM_TRAINING_AUTO_OFFS
);
675 reg_write(REG_DRAM_TRAINING_ADDR
, reg
);
679 /* Check if Successful */
680 if (reg_read(REG_DRAM_TRAINING_ADDR
) &
681 (1 << REG_DRAM_TRAINING_ERROR_OFFS
))
687 #if !defined(MV88F67XX)
689 * Name: ddr3_save_training(MV_DRAM_INFO *dram_info)
690 * Desc: saves the training results to memeory (RL,WL,PBS,Rx/Tx
692 * Args: MV_DRAM_INFO *dram_info
696 void ddr3_save_training(MV_DRAM_INFO
*dram_info
)
698 u32 val
, pup
, tmp_cs
, cs
, i
, dq
;
701 u32
*sdram_offset
= (u32
*)RESUME_TRAINING_VALUES_ADDR
;
702 u32 mode_config
[MAX_TRAINING_MODE
];
704 mode_config
[DQS_WR_MODE
] = PUP_DQS_WR
;
705 mode_config
[WL_MODE_
] = PUP_WL_MODE
;
706 mode_config
[RL_MODE_
] = PUP_RL_MODE
;
707 mode_config
[DQS_RD_MODE
] = PUP_DQS_RD
;
708 mode_config
[PBS_TX_DM_MODE
] = PUP_PBS_TX_DM
;
709 mode_config
[PBS_TX_MODE
] = PUP_PBS_TX
;
710 mode_config
[PBS_RX_MODE
] = PUP_PBS_RX
;
712 /* num of training modes */
713 for (i
= 0; i
< MAX_TRAINING_MODE
; i
++) {
714 tmp_cs
= dram_info
->cs_ena
;
716 for (cs
= 0; cs
< MAX_CS
; cs
++) {
717 if (tmp_cs
& (1 << cs
)) {
719 for (pup
= 0; pup
< dram_info
->num_of_total_pups
;
721 if (pup
== dram_info
->num_of_std_pups
&&
724 if (i
== PBS_TX_DM_MODE
) {
726 * Change CS bitmask because
727 * PBS works only with CS0
730 val
= ddr3_read_pup_reg(
731 mode_config
[i
], CS0
, pup
);
732 } else if (i
== PBS_TX_MODE
||
735 * Change CS bitmask because
736 * PBS works only with CS0
739 for (dq
= 0; dq
<= DQ_NUM
;
741 val
= ddr3_read_pup_reg(
745 (*sdram_offset
) = val
;
746 crc
+= *sdram_offset
;
752 val
= ddr3_read_pup_reg(
753 mode_config
[i
], cs
, pup
);
757 crc
+= *sdram_offset
;
765 *sdram_offset
= reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR
);
766 crc
+= *sdram_offset
;
769 *sdram_offset
= reg_read(REG_READ_DATA_READY_DELAYS_ADDR
);
770 crc
+= *sdram_offset
;
773 sdram_offset
= (u32
*)NUM_OF_REGISTER_ADDR
;
774 *sdram_offset
= regs
;
775 DEBUG_SUSPEND_RESUME_S("Training Results CheckSum write= ");
776 DEBUG_SUSPEND_RESUME_D(crc
, 8);
777 DEBUG_SUSPEND_RESUME_S("\n");
778 sdram_offset
= (u32
*)CHECKSUM_RESULT_ADDR
;
783 * Name: ddr3_read_training_results()
784 * Desc: Reads the training results from memeory (RL,WL,PBS,Rx/Tx
786 * and writes them to the relevant registers
787 * Args: MV_DRAM_INFO *dram_info
791 int ddr3_read_training_results(void)
793 u32 val
, reg
, idx
, dqs_wr_idx
= 0, crc
= 0;
794 u32
*sdram_offset
= (u32
*)RESUME_TRAINING_VALUES_ADDR
;
795 u32 training_val
[RESUME_TRAINING_VALUES_MAX
] = { 0 };
796 u32 regs
= *((u32
*)NUM_OF_REGISTER_ADDR
);
799 * Read Training results & Dunit registers from memory and write
802 for (idx
= 0; idx
< regs
; idx
++) {
803 training_val
[idx
] = *sdram_offset
;
804 crc
+= *sdram_offset
;
808 sdram_offset
= (u32
*)CHECKSUM_RESULT_ADDR
;
810 if ((*sdram_offset
) == crc
) {
811 DEBUG_SUSPEND_RESUME_S("Training Results CheckSum read PASS= ");
812 DEBUG_SUSPEND_RESUME_D(crc
, 8);
813 DEBUG_SUSPEND_RESUME_S("\n");
815 DEBUG_MAIN_S("Wrong Training Results CheckSum\n");
820 * We iterate through all the registers except for the last 2 since
821 * they are Dunit registers (and not PHY registers)
823 for (idx
= 0; idx
< (regs
- 2); idx
++) {
824 val
= training_val
[idx
];
825 reg
= (val
>> REG_PHY_CS_OFFS
) & 0x3F; /*read the phy address */
827 /* Check if the values belongs to the DQS WR */
828 if (reg
== PUP_WL_MODE
) {
829 /* bit[5:0] in DQS_WR are delay */
830 val
= (training_val
[dqs_wr_idx
++] & 0x3F);
832 * bit[15:10] are DQS_WR delay & bit[9:0] are
835 val
= (val
<< REG_PHY_DQS_REF_DLY_OFFS
) |
836 (training_val
[idx
] & 0x3C003FF);
837 /* Add Request pending and write operation bits */
838 val
|= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR
;
839 } else if (reg
== PUP_DQS_WR
) {
841 * Do nothing since DQS_WR will be done in PUP_WL_MODE
846 val
|= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR
;
847 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
, val
);
849 val
= (reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR
)) &
850 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE
;
851 } while (val
); /* Wait for '0' to mark the end of the transaction */
854 /* write last 2 Dunit configurations */
855 val
= training_val
[idx
];
856 reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR
, val
); /* reg 0x1538 */
857 val
= training_val
[idx
+ 1];
858 reg_write(REG_READ_DATA_READY_DELAYS_ADDR
, val
); /* reg 0x153c */
864 * Name: ddr3_check_if_resume_mode()
865 * Desc: Reads the address (0x3000) of the Resume Magic word (0xDEADB002)
866 * Args: MV_DRAM_INFO *dram_info
868 * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
870 int ddr3_check_if_resume_mode(MV_DRAM_INFO
*dram_info
, u32 freq
)
873 u32
*sdram_offset
= (u32
*)BOOT_INFO_ADDR
;
875 if (dram_info
->reg_dimm
!= 1) {
877 * Perform write levleling in order initiate the phy with
880 if (MV_OK
!= ddr3_write_leveling_hw(freq
, dram_info
)) {
881 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
882 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
886 if (MV_OK
!= ddr3_load_patterns(dram_info
, 1)) {
887 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
888 return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS
;
891 /* Enable CS0 only for RL */
892 dram_info
->cs_ena
= 0x1;
894 /* Perform Read levleling in order to get stable memory */
895 if (MV_OK
!= ddr3_read_leveling_hw(freq
, dram_info
)) {
896 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
897 return MV_DDR3_TRAINING_ERR_WR_LVL_HW
;
900 /* Back to relevant CS */
901 dram_info
->cs_ena
= ddr3_get_cs_ena_from_reg();
903 magic_word
= *sdram_offset
;
904 return magic_word
== SUSPEND_MAGIC_WORD
;
908 * Name: ddr3_training_suspend_resume()
909 * Desc: Execute the Resume state
910 * Args: MV_DRAM_INFO *dram_info
912 * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
914 int ddr3_training_suspend_resume(MV_DRAM_INFO
*dram_info
)
920 if (MV_OK
!= ddr3_read_training_results())
923 /* Reset read FIFO */
924 reg
= reg_read(REG_DRAM_TRAINING_ADDR
);
926 /* Start Auto Read Leveling procedure */
927 reg
|= (1 << REG_DRAM_TRAINING_RL_OFFS
);
928 reg_write(REG_DRAM_TRAINING_ADDR
, reg
); /* 0x15B0 - Training Register */
930 reg
= reg_read(REG_DRAM_TRAINING_2_ADDR
);
931 reg
|= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS
) +
932 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS
));
934 /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
935 /* 0x15B8 - Training SW 2 Register */
936 reg_write(REG_DRAM_TRAINING_2_ADDR
, reg
);
940 reg
= reg_read(REG_DRAM_TRAINING_ADDR
);
941 /* Clear Auto Read Leveling procedure */
942 reg
&= ~(1 << REG_DRAM_TRAINING_RL_OFFS
);
943 reg_write(REG_DRAM_TRAINING_ADDR
, reg
); /* 0x15B0 - Training Register */
945 /* Return to target frequency */
946 freq
= dram_info
->target_frequency
;
948 if (MV_OK
!= ddr3_dfs_low_2_high(freq
, tmp_ratio
, dram_info
)) {
949 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
950 return MV_DDR3_TRAINING_ERR_DFS_H2L
;
953 if (dram_info
->ecc_ena
) {
954 /* Scabbling the RL area pattern and the training area */
956 dram_info
->num_cs
= 1;
957 dram_info
->cs_ena
= 1;
958 mv_sys_xor_init(dram_info
);
959 mv_xor_mem_init(0, RESUME_RL_PATTERNS_ADDR
,
960 RESUME_RL_PATTERNS_SIZE
, 0xFFFFFFFF, 0xFFFFFFFF);
962 /* Wait for previous transfer completion */
964 while (mv_xor_state_get(0) != MV_IDLE
)
967 /* Return XOR State */
975 void ddr3_print_freq(u32 freq
)
1017 printf("Current frequency is: %dMHz\n", tmp_freq
);
1020 int ddr3_get_min_max_read_sample_delay(u32 cs_enable
, u32 reg
, u32
*min
,
1021 u32
*max
, u32
*cs_max
)
1028 for (cs
= 0; cs
< MAX_CS
; cs
++) {
1029 if ((cs_enable
& (1 << cs
)) == 0)
1032 delay
= ((reg
>> (cs
* 8)) & 0x1F);
1046 int ddr3_get_min_max_rl_phase(MV_DRAM_INFO
*dram_info
, u32
*min
, u32
*max
,
1049 u32 pup
, reg
, phase
;
1054 for (pup
= 0; pup
< dram_info
->num_of_total_pups
; pup
++) {
1055 reg
= ddr3_read_pup_reg(PUP_RL_MODE
, cs
, pup
);
1056 phase
= ((reg
>> 8) & 0x7);
1068 int ddr3_odt_activate(int activate
)
1072 mask
= (1 << REG_DUNIT_ODT_CTRL_OVRD_OFFS
) |
1073 (1 << REG_DUNIT_ODT_CTRL_OVRD_VAL_OFFS
);
1074 /* {0x0000149C} - DDR Dunit ODT Control Register */
1075 reg
= reg_read(REG_DUNIT_ODT_CTRL_ADDR
);
1081 reg_write(REG_DUNIT_ODT_CTRL_ADDR
, reg
);
1086 int ddr3_odt_read_dynamic_config(MV_DRAM_INFO
*dram_info
)
1088 u32 min_read_sample_delay
, max_read_sample_delay
, max_rl_phase
;
1089 u32 min
, max
, cs_max
;
1092 reg
= reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR
);
1093 cs_ena
= ddr3_get_cs_ena_from_reg();
1095 /* Get minimum and maximum of read sample delay of all CS */
1096 ddr3_get_min_max_read_sample_delay(cs_ena
, reg
, &min_read_sample_delay
,
1097 &max_read_sample_delay
, &cs_max
);
1100 * Get minimum and maximum read leveling phase which belongs to the
1101 * maximal read sample delay
1103 ddr3_get_min_max_rl_phase(dram_info
, &min
, &max
, cs_max
);
1106 /* DDR ODT Timing (Low) Register calculation */
1107 reg
= reg_read(REG_ODT_TIME_LOW_ADDR
);
1108 reg
&= ~(0x1FF << REG_ODT_ON_CTL_RD_OFFS
);
1109 reg
|= (((min_read_sample_delay
- 1) & 0xF) << REG_ODT_ON_CTL_RD_OFFS
);
1110 reg
|= (((max_read_sample_delay
+ 4 + (((max_rl_phase
+ 1) / 2) + 1)) &
1111 0x1F) << REG_ODT_OFF_CTL_RD_OFFS
);
1112 reg_write(REG_ODT_TIME_LOW_ADDR
, reg
);