2 * Copyright (C) Marvell International Ltd. and its affiliates
4 * SPDX-License-Identifier: GPL-2.0
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
13 #include "ddr3_init.h"
15 #define PATTERN_1 0x55555555
16 #define PATTERN_2 0xaaaaaaaa
18 #define VALIDATE_TRAINING_LIMIT(e1, e2) \
19 ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
21 u32 phy_reg_bk
[MAX_INTERFACE_NUM
][MAX_BUS_NUM
][BUS_WIDTH_IN_BITS
];
23 u32 training_res
[MAX_INTERFACE_NUM
* MAX_BUS_NUM
* BUS_WIDTH_IN_BITS
*
24 HWS_SEARCH_DIR_LIMIT
];
26 u16 mask_results_dq_reg_map
[] = {
27 RESULT_CONTROL_PUP_0_BIT_0_REG
, RESULT_CONTROL_PUP_0_BIT_1_REG
,
28 RESULT_CONTROL_PUP_0_BIT_2_REG
, RESULT_CONTROL_PUP_0_BIT_3_REG
,
29 RESULT_CONTROL_PUP_0_BIT_4_REG
, RESULT_CONTROL_PUP_0_BIT_5_REG
,
30 RESULT_CONTROL_PUP_0_BIT_6_REG
, RESULT_CONTROL_PUP_0_BIT_7_REG
,
31 RESULT_CONTROL_PUP_1_BIT_0_REG
, RESULT_CONTROL_PUP_1_BIT_1_REG
,
32 RESULT_CONTROL_PUP_1_BIT_2_REG
, RESULT_CONTROL_PUP_1_BIT_3_REG
,
33 RESULT_CONTROL_PUP_1_BIT_4_REG
, RESULT_CONTROL_PUP_1_BIT_5_REG
,
34 RESULT_CONTROL_PUP_1_BIT_6_REG
, RESULT_CONTROL_PUP_1_BIT_7_REG
,
35 RESULT_CONTROL_PUP_2_BIT_0_REG
, RESULT_CONTROL_PUP_2_BIT_1_REG
,
36 RESULT_CONTROL_PUP_2_BIT_2_REG
, RESULT_CONTROL_PUP_2_BIT_3_REG
,
37 RESULT_CONTROL_PUP_2_BIT_4_REG
, RESULT_CONTROL_PUP_2_BIT_5_REG
,
38 RESULT_CONTROL_PUP_2_BIT_6_REG
, RESULT_CONTROL_PUP_2_BIT_7_REG
,
39 RESULT_CONTROL_PUP_3_BIT_0_REG
, RESULT_CONTROL_PUP_3_BIT_1_REG
,
40 RESULT_CONTROL_PUP_3_BIT_2_REG
, RESULT_CONTROL_PUP_3_BIT_3_REG
,
41 RESULT_CONTROL_PUP_3_BIT_4_REG
, RESULT_CONTROL_PUP_3_BIT_5_REG
,
42 RESULT_CONTROL_PUP_3_BIT_6_REG
, RESULT_CONTROL_PUP_3_BIT_7_REG
,
43 RESULT_CONTROL_PUP_4_BIT_0_REG
, RESULT_CONTROL_PUP_4_BIT_1_REG
,
44 RESULT_CONTROL_PUP_4_BIT_2_REG
, RESULT_CONTROL_PUP_4_BIT_3_REG
,
45 RESULT_CONTROL_PUP_4_BIT_4_REG
, RESULT_CONTROL_PUP_4_BIT_5_REG
,
46 RESULT_CONTROL_PUP_4_BIT_6_REG
, RESULT_CONTROL_PUP_4_BIT_7_REG
,
49 u16 mask_results_pup_reg_map
[] = {
50 RESULT_CONTROL_BYTE_PUP_0_REG
, RESULT_CONTROL_BYTE_PUP_1_REG
,
51 RESULT_CONTROL_BYTE_PUP_2_REG
, RESULT_CONTROL_BYTE_PUP_3_REG
,
52 RESULT_CONTROL_BYTE_PUP_4_REG
55 u16 mask_results_dq_reg_map_pup3_ecc
[] = {
56 RESULT_CONTROL_PUP_0_BIT_0_REG
, RESULT_CONTROL_PUP_0_BIT_1_REG
,
57 RESULT_CONTROL_PUP_0_BIT_2_REG
, RESULT_CONTROL_PUP_0_BIT_3_REG
,
58 RESULT_CONTROL_PUP_0_BIT_4_REG
, RESULT_CONTROL_PUP_0_BIT_5_REG
,
59 RESULT_CONTROL_PUP_0_BIT_6_REG
, RESULT_CONTROL_PUP_0_BIT_7_REG
,
60 RESULT_CONTROL_PUP_1_BIT_0_REG
, RESULT_CONTROL_PUP_1_BIT_1_REG
,
61 RESULT_CONTROL_PUP_1_BIT_2_REG
, RESULT_CONTROL_PUP_1_BIT_3_REG
,
62 RESULT_CONTROL_PUP_1_BIT_4_REG
, RESULT_CONTROL_PUP_1_BIT_5_REG
,
63 RESULT_CONTROL_PUP_1_BIT_6_REG
, RESULT_CONTROL_PUP_1_BIT_7_REG
,
64 RESULT_CONTROL_PUP_2_BIT_0_REG
, RESULT_CONTROL_PUP_2_BIT_1_REG
,
65 RESULT_CONTROL_PUP_2_BIT_2_REG
, RESULT_CONTROL_PUP_2_BIT_3_REG
,
66 RESULT_CONTROL_PUP_2_BIT_4_REG
, RESULT_CONTROL_PUP_2_BIT_5_REG
,
67 RESULT_CONTROL_PUP_2_BIT_6_REG
, RESULT_CONTROL_PUP_2_BIT_7_REG
,
68 RESULT_CONTROL_PUP_4_BIT_0_REG
, RESULT_CONTROL_PUP_4_BIT_1_REG
,
69 RESULT_CONTROL_PUP_4_BIT_2_REG
, RESULT_CONTROL_PUP_4_BIT_3_REG
,
70 RESULT_CONTROL_PUP_4_BIT_4_REG
, RESULT_CONTROL_PUP_4_BIT_5_REG
,
71 RESULT_CONTROL_PUP_4_BIT_6_REG
, RESULT_CONTROL_PUP_4_BIT_7_REG
,
72 RESULT_CONTROL_PUP_4_BIT_0_REG
, RESULT_CONTROL_PUP_4_BIT_1_REG
,
73 RESULT_CONTROL_PUP_4_BIT_2_REG
, RESULT_CONTROL_PUP_4_BIT_3_REG
,
74 RESULT_CONTROL_PUP_4_BIT_4_REG
, RESULT_CONTROL_PUP_4_BIT_5_REG
,
75 RESULT_CONTROL_PUP_4_BIT_6_REG
, RESULT_CONTROL_PUP_4_BIT_7_REG
,
78 u16 mask_results_pup_reg_map_pup3_ecc
[] = {
79 RESULT_CONTROL_BYTE_PUP_0_REG
, RESULT_CONTROL_BYTE_PUP_1_REG
,
80 RESULT_CONTROL_BYTE_PUP_2_REG
, RESULT_CONTROL_BYTE_PUP_4_REG
,
81 RESULT_CONTROL_BYTE_PUP_4_REG
84 struct pattern_info pattern_table_16
[] = {
86 * num tx phases, tx burst, delay between, rx pattern,
87 * start_address, pattern_len
89 {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
90 {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
91 {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
92 {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
93 {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
94 {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
95 {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
96 {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
97 {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
98 {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
99 {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
100 {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
101 {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
102 {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
103 {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
104 {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
105 {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
106 {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
107 {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
108 {0xf, 0x7, 2, 0x7, 0x04c0, 16} /* PATTERN_VREF */
109 /*Note: actual start_address is <<3 of defined addess */
112 struct pattern_info pattern_table_32
[] = {
114 * num tx phases, tx burst, delay between, rx pattern,
115 * start_address, pattern_len
117 {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
118 {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
119 {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
120 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
121 {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
122 {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
123 {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
124 {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
125 {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
126 {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
127 {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
128 {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
129 {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
130 {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
131 {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
132 {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
133 {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
134 {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
135 {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
136 {0x1f, 0xf, 2, 0xf, 0x04c0, 32} /* PATTERN_VREF */
137 /*Note: actual start_address is <<3 of defined addess */
141 enum hws_ddr_cs traintrain_cs_type
;
143 enum hws_training_result train_result_type
;
144 enum hws_control_element train_control_element
;
145 enum hws_search_dir traine_search_dir
;
146 enum hws_dir train_direction
;
148 u32 train_init_value
;
149 u32 train_number_iterations
;
150 enum hws_pattern train_pattern
;
151 enum hws_edge_compare train_edge_compare
;
153 u32 train_if_acess
, train_if_id
, train_pup_access
;
154 u32 max_polling_for_done
= 1000000;
156 u32
*ddr3_tip_get_buf_ptr(u32 dev_num
, enum hws_search_dir search
,
157 enum hws_training_result result_type
,
162 buf_ptr
= &training_res
163 [MAX_INTERFACE_NUM
* MAX_BUS_NUM
* BUS_WIDTH_IN_BITS
* search
+
164 interface_num
* MAX_BUS_NUM
* BUS_WIDTH_IN_BITS
];
171 * Note: for one edge search only from fail to pass, else jitter can
172 * be be entered into solution.
174 int ddr3_tip_ip_training(u32 dev_num
, enum hws_access_type access_type
,
176 enum hws_access_type pup_access_type
,
177 u32 pup_num
, enum hws_training_result result_type
,
178 enum hws_control_element control_element
,
179 enum hws_search_dir search_dir
, enum hws_dir direction
,
180 u32 interface_mask
, u32 init_value
, u32 num_iter
,
181 enum hws_pattern pattern
,
182 enum hws_edge_compare edge_comp
,
183 enum hws_ddr_cs cs_type
, u32 cs_num
,
184 enum hws_training_ip_stat
*train_status
)
186 u32 mask_dq_num_of_regs
, mask_pup_num_of_regs
, index_cnt
, poll_cnt
,
189 u32 delay_between_burst
;
191 u32 read_data
[MAX_INTERFACE_NUM
];
192 struct pattern_info
*pattern_table
= ddr3_tip_get_pattern_table();
193 u16
*mask_results_pup_reg_map
= ddr3_tip_get_mask_results_pup_reg_map();
194 u16
*mask_results_dq_reg_map
= ddr3_tip_get_mask_results_dq_reg();
195 struct hws_topology_map
*tm
= ddr3_get_topology_map();
197 if (pup_num
>= tm
->num_of_bus_per_interface
) {
198 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
199 ("pup_num %d not valid\n", pup_num
));
201 if (interface_num
>= MAX_INTERFACE_NUM
) {
202 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
203 ("if_id %d not valid\n",
206 if (train_status
== NULL
) {
207 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
208 ("error param 4\n"));
213 if (cs_type
== CS_SINGLE
) {
215 CHECK_STATUS(ddr3_tip_if_write
216 (dev_num
, access_type
, interface_num
,
217 CS_ENABLE_REG
, 1 << 3, 1 << 3));
219 CHECK_STATUS(ddr3_tip_if_write
220 (dev_num
, access_type
, interface_num
,
221 ODPG_DATA_CONTROL_REG
,
222 (0x3 | (effective_cs
<< 26)), 0xc000003));
224 CHECK_STATUS(ddr3_tip_if_write
225 (dev_num
, access_type
, interface_num
,
226 CS_ENABLE_REG
, 0, 1 << 3));
228 CHECK_STATUS(ddr3_tip_if_write
229 (dev_num
, access_type
, interface_num
,
230 ODPG_DATA_CONTROL_REG
, 0x3 | cs_num
<< 26,
234 /* load pattern to ODPG */
235 ddr3_tip_load_pattern_to_odpg(dev_num
, access_type
, interface_num
,
237 pattern_table
[pattern
].start_addr
);
238 tx_burst_size
= (direction
== OPER_WRITE
) ?
239 pattern_table
[pattern
].tx_burst_size
: 0;
240 delay_between_burst
= (direction
== OPER_WRITE
) ? 2 : 0;
241 rd_mode
= (direction
== OPER_WRITE
) ? 1 : 0;
242 CHECK_STATUS(ddr3_tip_configure_odpg
243 (dev_num
, access_type
, interface_num
, direction
,
244 pattern_table
[pattern
].num_of_phases_tx
, tx_burst_size
,
245 pattern_table
[pattern
].num_of_phases_rx
,
246 delay_between_burst
, rd_mode
, effective_cs
, STRESS_NONE
,
248 reg_data
= (direction
== OPER_READ
) ? 0 : (0x3 << 30);
249 reg_data
|= (direction
== OPER_READ
) ? 0x60 : 0xfa;
250 CHECK_STATUS(ddr3_tip_if_write
251 (dev_num
, access_type
, interface_num
,
252 ODPG_WRITE_READ_MODE_ENABLE_REG
, reg_data
,
254 reg_data
= (edge_comp
== EDGE_PF
|| edge_comp
== EDGE_FP
) ? 0 : 1 << 6;
255 reg_data
|= (edge_comp
== EDGE_PF
|| edge_comp
== EDGE_PFP
) ?
258 /* change from Pass to Fail will lock the result */
259 if (pup_access_type
== ACCESS_TYPE_MULTICAST
)
260 reg_data
|= 0xe << 14;
262 reg_data
|= pup_num
<< 14;
264 if (edge_comp
== EDGE_FP
) {
265 /* don't search for readl edge change, only the state */
266 reg_data
|= (0 << 20);
267 } else if (edge_comp
== EDGE_FPF
) {
268 reg_data
|= (0 << 20);
270 reg_data
|= (3 << 20);
273 CHECK_STATUS(ddr3_tip_if_write
274 (dev_num
, access_type
, interface_num
,
275 ODPG_TRAINING_CONTROL_REG
,
276 reg_data
| (0x7 << 8) | (0x7 << 11),
277 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
278 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
279 reg_data
= (search_dir
== HWS_LOW2HIGH
) ? 0 : (1 << 8);
280 CHECK_STATUS(ddr3_tip_if_write
281 (dev_num
, access_type
, interface_num
, ODPG_OBJ1_OPCODE_REG
,
282 1 | reg_data
| init_value
<< 9 | (1 << 25) | (1 << 26),
283 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
286 * Write2_dunit(0x10b4, Number_iteration , [15:0])
287 * Max number of iterations
289 CHECK_STATUS(ddr3_tip_if_write(dev_num
, access_type
, interface_num
,
290 ODPG_OBJ1_ITER_CNT_REG
, num_iter
,
292 if (control_element
== HWS_CONTROL_ELEMENT_DQ_SKEW
&&
293 direction
== OPER_READ
) {
295 * Write2_dunit(0x10c0, 0x5f , [7:0])
296 * MC PBS Reg Address at DDR PHY
299 effective_cs
* CALIBRATED_OBJECTS_REG_ADDR_OFFSET
;
300 } else if (control_element
== HWS_CONTROL_ELEMENT_DQ_SKEW
&&
301 direction
== OPER_WRITE
) {
303 effective_cs
* CALIBRATED_OBJECTS_REG_ADDR_OFFSET
;
304 } else if (control_element
== HWS_CONTROL_ELEMENT_ADLL
&&
305 direction
== OPER_WRITE
) {
307 * LOOP 0x00000001 + 4*n:
308 * where n (0-3) represents M_CS number
311 * Write2_dunit(0x10c0, 0x1 , [7:0])
312 * ADLL WR Reg Address at DDR PHY
314 reg_data
= 1 + effective_cs
* CS_REGISTER_ADDR_OFFSET
;
315 } else if (control_element
== HWS_CONTROL_ELEMENT_ADLL
&&
316 direction
== OPER_READ
) {
317 /* ADLL RD Reg Address at DDR PHY */
318 reg_data
= 3 + effective_cs
* CS_REGISTER_ADDR_OFFSET
;
319 } else if (control_element
== HWS_CONTROL_ELEMENT_DQS_SKEW
&&
320 direction
== OPER_WRITE
) {
321 /* TBD not defined in 0.5.0 requirement */
322 } else if (control_element
== HWS_CONTROL_ELEMENT_DQS_SKEW
&&
323 direction
== OPER_READ
) {
324 /* TBD not defined in 0.5.0 requirement */
327 reg_data
|= (0x6 << 28);
328 CHECK_STATUS(ddr3_tip_if_write
329 (dev_num
, access_type
, interface_num
, CALIB_OBJ_PRFA_REG
,
330 reg_data
| (init_value
<< 8),
331 0xff | (0xffff << 8) | (0xf << 24) | (u32
) (0xf << 28)));
333 mask_dq_num_of_regs
= tm
->num_of_bus_per_interface
* BUS_WIDTH_IN_BITS
;
334 mask_pup_num_of_regs
= tm
->num_of_bus_per_interface
;
336 if (result_type
== RESULT_PER_BIT
) {
337 for (index_cnt
= 0; index_cnt
< mask_dq_num_of_regs
;
339 CHECK_STATUS(ddr3_tip_if_write
340 (dev_num
, access_type
, interface_num
,
341 mask_results_dq_reg_map
[index_cnt
], 0,
345 /* Mask disabled buses */
346 for (pup_id
= 0; pup_id
< tm
->num_of_bus_per_interface
;
348 if (IS_ACTIVE(tm
->bus_act_mask
, pup_id
) == 1)
351 for (index_cnt
= (mask_dq_num_of_regs
- pup_id
* 8);
353 (mask_dq_num_of_regs
- (pup_id
+ 1) * 8);
355 CHECK_STATUS(ddr3_tip_if_write
356 (dev_num
, access_type
,
358 mask_results_dq_reg_map
359 [index_cnt
], (1 << 24), 1 << 24));
363 for (index_cnt
= 0; index_cnt
< mask_pup_num_of_regs
;
365 CHECK_STATUS(ddr3_tip_if_write
366 (dev_num
, access_type
, interface_num
,
367 mask_results_pup_reg_map
[index_cnt
],
368 (1 << 24), 1 << 24));
370 } else if (result_type
== RESULT_PER_BYTE
) {
372 for (index_cnt
= 0; index_cnt
< mask_pup_num_of_regs
;
374 CHECK_STATUS(ddr3_tip_if_write
375 (dev_num
, access_type
, interface_num
,
376 mask_results_pup_reg_map
[index_cnt
], 0,
379 for (index_cnt
= 0; index_cnt
< mask_dq_num_of_regs
;
381 CHECK_STATUS(ddr3_tip_if_write
382 (dev_num
, access_type
, interface_num
,
383 mask_results_dq_reg_map
[index_cnt
],
384 (1 << 24), (1 << 24)));
388 /* Start Training Trigger */
389 CHECK_STATUS(ddr3_tip_if_write(dev_num
, access_type
, interface_num
,
390 ODPG_TRAINING_TRIGGER_REG
, 1, 1));
391 /* wait for all RFU tests to finish (or timeout) */
392 /* WA for 16 bit mode, more investigation needed */
395 /* Training "Done ?" */
396 for (index_cnt
= 0; index_cnt
< MAX_INTERFACE_NUM
; index_cnt
++) {
397 if (IS_ACTIVE(tm
->if_act_mask
, index_cnt
) == 0)
400 if (interface_mask
& (1 << index_cnt
)) {
401 /* need to check results for this Dunit */
402 for (poll_cnt
= 0; poll_cnt
< max_polling_for_done
;
404 CHECK_STATUS(ddr3_tip_if_read
405 (dev_num
, ACCESS_TYPE_UNICAST
,
407 ODPG_TRAINING_STATUS_REG
,
408 ®_data
, MASK_ALL_BITS
));
409 if ((reg_data
& 0x2) != 0) {
411 train_status
[index_cnt
] =
412 HWS_TRAINING_IP_STATUS_SUCCESS
;
417 if (poll_cnt
== max_polling_for_done
) {
418 train_status
[index_cnt
] =
419 HWS_TRAINING_IP_STATUS_TIMEOUT
;
422 /* Be sure that ODPG done */
423 CHECK_STATUS(is_odpg_access_done(dev_num
, index_cnt
));
426 /* Write ODPG done in Dunit */
427 CHECK_STATUS(ddr3_tip_if_write
428 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
429 ODPG_STATUS_DONE_REG
, 0, 0x1));
431 /* wait for all Dunit tests to finish (or timeout) */
432 /* Training "Done ?" */
433 /* Training "Pass ?" */
434 for (index_cnt
= 0; index_cnt
< MAX_INTERFACE_NUM
; index_cnt
++) {
435 if (IS_ACTIVE(tm
->if_act_mask
, index_cnt
) == 0)
438 if (interface_mask
& (1 << index_cnt
)) {
439 /* need to check results for this Dunit */
440 for (poll_cnt
= 0; poll_cnt
< max_polling_for_done
;
442 CHECK_STATUS(ddr3_tip_if_read
443 (dev_num
, ACCESS_TYPE_UNICAST
,
445 ODPG_TRAINING_TRIGGER_REG
,
446 read_data
, MASK_ALL_BITS
));
447 reg_data
= read_data
[index_cnt
];
448 if ((reg_data
& 0x2) != 0) {
450 if ((reg_data
& 0x4) == 0) {
451 train_status
[index_cnt
] =
452 HWS_TRAINING_IP_STATUS_SUCCESS
;
454 train_status
[index_cnt
] =
455 HWS_TRAINING_IP_STATUS_FAIL
;
461 if (poll_cnt
== max_polling_for_done
) {
462 train_status
[index_cnt
] =
463 HWS_TRAINING_IP_STATUS_TIMEOUT
;
468 CHECK_STATUS(ddr3_tip_if_write
469 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
470 ODPG_DATA_CONTROL_REG
, 0, MASK_ALL_BITS
));
476 * Load expected Pattern to ODPG
478 int ddr3_tip_load_pattern_to_odpg(u32 dev_num
, enum hws_access_type access_type
,
479 u32 if_id
, enum hws_pattern pattern
,
482 u32 pattern_length_cnt
= 0;
483 struct pattern_info
*pattern_table
= ddr3_tip_get_pattern_table();
485 for (pattern_length_cnt
= 0;
486 pattern_length_cnt
< pattern_table
[pattern
].pattern_len
;
487 pattern_length_cnt
++) {
488 CHECK_STATUS(ddr3_tip_if_write
489 (dev_num
, access_type
, if_id
,
490 ODPG_PATTERN_DATA_LOW_REG
,
491 pattern_table_get_word(dev_num
, pattern
,
492 (u8
) (pattern_length_cnt
*
493 2)), MASK_ALL_BITS
));
494 CHECK_STATUS(ddr3_tip_if_write
495 (dev_num
, access_type
, if_id
,
496 ODPG_PATTERN_DATA_HI_REG
,
497 pattern_table_get_word(dev_num
, pattern
,
498 (u8
) (pattern_length_cnt
*
501 CHECK_STATUS(ddr3_tip_if_write
502 (dev_num
, access_type
, if_id
,
503 ODPG_PATTERN_ADDR_REG
, pattern_length_cnt
,
507 CHECK_STATUS(ddr3_tip_if_write
508 (dev_num
, access_type
, if_id
,
509 ODPG_PATTERN_ADDR_OFFSET_REG
, load_addr
, MASK_ALL_BITS
));
517 int ddr3_tip_configure_odpg(u32 dev_num
, enum hws_access_type access_type
,
518 u32 if_id
, enum hws_dir direction
, u32 tx_phases
,
519 u32 tx_burst_size
, u32 rx_phases
,
520 u32 delay_between_burst
, u32 rd_mode
, u32 cs_num
,
521 u32 addr_stress_jump
, u32 single_pattern
)
526 data_value
= ((single_pattern
<< 2) | (tx_phases
<< 5) |
527 (tx_burst_size
<< 11) | (delay_between_burst
<< 15) |
528 (rx_phases
<< 21) | (rd_mode
<< 25) | (cs_num
<< 26) |
529 (addr_stress_jump
<< 29));
530 ret
= ddr3_tip_if_write(dev_num
, access_type
, if_id
,
531 ODPG_DATA_CONTROL_REG
, data_value
, 0xaffffffc);
538 int ddr3_tip_process_result(u32
*ar_result
, enum hws_edge e_edge
,
539 enum hws_edge_search e_edge_search
,
543 int tap_val
, max_val
= -10000, min_val
= 10000;
544 int lock_success
= 1;
546 for (i
= 0; i
< BUS_WIDTH_IN_BITS
; i
++) {
547 res
= GET_LOCK_RESULT(ar_result
[i
]);
552 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
553 ("lock failed for bit %d\n", i
));
556 if (lock_success
== 1) {
557 for (i
= 0; i
< BUS_WIDTH_IN_BITS
; i
++) {
558 tap_val
= GET_TAP_RESULT(ar_result
[i
], e_edge
);
559 if (tap_val
> max_val
)
561 if (tap_val
< min_val
)
563 if (e_edge_search
== TRAINING_EDGE_MAX
)
564 *edge_result
= (u32
) max_val
;
566 *edge_result
= (u32
) min_val
;
568 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
569 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
570 i
, ar_result
[i
], tap_val
,
582 * Read training search result
584 int ddr3_tip_read_training_result(u32 dev_num
, u32 if_id
,
585 enum hws_access_type pup_access_type
,
586 u32 pup_num
, u32 bit_num
,
587 enum hws_search_dir search
,
588 enum hws_dir direction
,
589 enum hws_training_result result_type
,
590 enum hws_training_load_op operation
,
591 u32 cs_num_type
, u32
**load_res
,
592 int is_read_from_db
, u8 cons_tap
,
593 int is_check_result_validity
)
595 u32 reg_offset
, pup_cnt
, start_pup
, end_pup
, start_reg
, end_reg
;
596 u32
*interface_train_res
= NULL
;
597 u16
*reg_addr
= NULL
;
598 u32 read_data
[MAX_INTERFACE_NUM
];
599 u16
*mask_results_pup_reg_map
= ddr3_tip_get_mask_results_pup_reg_map();
600 u16
*mask_results_dq_reg_map
= ddr3_tip_get_mask_results_dq_reg();
601 struct hws_topology_map
*tm
= ddr3_get_topology_map();
604 * Agreed assumption: all CS mask contain same number of bits,
605 * i.e. in multi CS, the number of CS per memory is the same for
608 CHECK_STATUS(ddr3_tip_if_write
609 (dev_num
, ACCESS_TYPE_UNICAST
, if_id
, CS_ENABLE_REG
,
610 (cs_num_type
== 0) ? 1 << 3 : 0, (1 << 3)));
611 CHECK_STATUS(ddr3_tip_if_write
612 (dev_num
, ACCESS_TYPE_UNICAST
, if_id
,
613 ODPG_DATA_CONTROL_REG
, (cs_num_type
<< 26), (3 << 26)));
614 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE
,
615 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
616 is_read_from_db
, cs_num_type
, operation
,
617 result_type
, direction
, search
, pup_num
,
618 if_id
, pup_access_type
));
620 if ((load_res
== NULL
) && (is_read_from_db
== 1)) {
621 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
622 ("ddr3_tip_read_training_result load_res = NULL"));
625 if (pup_num
>= tm
->num_of_bus_per_interface
) {
626 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
627 ("pup_num %d not valid\n", pup_num
));
629 if (if_id
>= MAX_INTERFACE_NUM
) {
630 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
631 ("if_id %d not valid\n", if_id
));
633 if (result_type
== RESULT_PER_BIT
)
634 reg_addr
= mask_results_dq_reg_map
;
636 reg_addr
= mask_results_pup_reg_map
;
637 if (pup_access_type
== ACCESS_TYPE_UNICAST
) {
640 } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
643 end_pup
= tm
->num_of_bus_per_interface
- 1;
646 for (pup_cnt
= start_pup
; pup_cnt
<= end_pup
; pup_cnt
++) {
647 VALIDATE_ACTIVE(tm
->bus_act_mask
, pup_cnt
);
648 DEBUG_TRAINING_IP_ENGINE(
650 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
651 if_id
, start_pup
, end_pup
, pup_cnt
));
652 if (result_type
== RESULT_PER_BIT
) {
653 if (bit_num
== ALL_BITS_PER_PUP
) {
654 start_reg
= pup_cnt
* BUS_WIDTH_IN_BITS
;
655 end_reg
= (pup_cnt
+ 1) * BUS_WIDTH_IN_BITS
- 1;
658 pup_cnt
* BUS_WIDTH_IN_BITS
+ bit_num
;
659 end_reg
= pup_cnt
* BUS_WIDTH_IN_BITS
+ bit_num
;
666 interface_train_res
=
667 ddr3_tip_get_buf_ptr(dev_num
, search
, result_type
,
669 DEBUG_TRAINING_IP_ENGINE(
671 ("start_reg %d end_reg %d interface %p\n",
672 start_reg
, end_reg
, interface_train_res
));
673 if (interface_train_res
== NULL
) {
674 DEBUG_TRAINING_IP_ENGINE(
676 ("interface_train_res is NULL\n"));
680 for (reg_offset
= start_reg
; reg_offset
<= end_reg
;
682 if (operation
== TRAINING_LOAD_OPERATION_UNLOAD
) {
683 if (is_read_from_db
== 0) {
684 CHECK_STATUS(ddr3_tip_if_read
688 reg_addr
[reg_offset
],
691 if (is_check_result_validity
== 1) {
692 if ((read_data
[if_id
] &
706 interface_train_res
[reg_offset
]
710 DEBUG_TRAINING_IP_ENGINE
712 ("reg_offset %d value 0x%x addr %p\n",
720 &interface_train_res
[start_reg
];
721 DEBUG_TRAINING_IP_ENGINE
723 ("*load_res %p\n", *load_res
));
726 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE
,
727 ("not supported\n"));
736 * Load all pattern to memory using ODPG
738 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num
)
740 u32 pattern
= 0, if_id
;
741 struct hws_topology_map
*tm
= ddr3_get_topology_map();
743 for (if_id
= 0; if_id
<= MAX_INTERFACE_NUM
- 1; if_id
++) {
744 VALIDATE_ACTIVE(tm
->if_act_mask
, if_id
);
745 training_result
[training_stage
][if_id
] = TEST_SUCCESS
;
748 for (if_id
= 0; if_id
<= MAX_INTERFACE_NUM
- 1; if_id
++) {
749 VALIDATE_ACTIVE(tm
->if_act_mask
, if_id
);
750 /* enable single cs */
751 CHECK_STATUS(ddr3_tip_if_write
752 (dev_num
, ACCESS_TYPE_UNICAST
, if_id
,
753 CS_ENABLE_REG
, (1 << 3), (1 << 3)));
756 for (pattern
= 0; pattern
< PATTERN_LIMIT
; pattern
++)
757 ddr3_tip_load_pattern_to_mem(dev_num
, pattern
);
763 * Wait till ODPG access is ready
765 int is_odpg_access_done(u32 dev_num
, u32 if_id
)
767 u32 poll_cnt
= 0, data_value
;
768 u32 read_data
[MAX_INTERFACE_NUM
];
770 for (poll_cnt
= 0; poll_cnt
< MAX_POLLING_ITERATIONS
; poll_cnt
++) {
771 CHECK_STATUS(ddr3_tip_if_read
772 (dev_num
, ACCESS_TYPE_UNICAST
, if_id
,
773 ODPG_BIST_DONE
, read_data
, MASK_ALL_BITS
));
774 data_value
= read_data
[if_id
];
775 if (((data_value
>> ODPG_BIST_DONE_BIT_OFFS
) & 0x1) ==
776 ODPG_BIST_DONE_BIT_VALUE
) {
777 data_value
= data_value
& 0xfffffffe;
778 CHECK_STATUS(ddr3_tip_if_write
779 (dev_num
, ACCESS_TYPE_UNICAST
,
780 if_id
, ODPG_BIST_DONE
, data_value
,
786 if (poll_cnt
>= MAX_POLLING_ITERATIONS
) {
787 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
788 ("Bist Activate: poll failure 2\n"));
796 * Load specific pattern to memory using ODPG
798 int ddr3_tip_load_pattern_to_mem(u32 dev_num
, enum hws_pattern pattern
)
801 struct pattern_info
*pattern_table
= ddr3_tip_get_pattern_table();
802 struct hws_topology_map
*tm
= ddr3_get_topology_map();
804 /* load pattern to memory */
806 * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
810 0x1 | (pattern_table
[pattern
].num_of_phases_tx
<< 5) |
811 (pattern_table
[pattern
].tx_burst_size
<< 11) |
812 (pattern_table
[pattern
].delay_between_bursts
<< 15) |
813 (pattern_table
[pattern
].num_of_phases_rx
<< 21) | (0x1 << 25) |
814 (effective_cs
<< 26);
815 CHECK_STATUS(ddr3_tip_if_write
816 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
817 ODPG_DATA_CONTROL_REG
, reg_data
, MASK_ALL_BITS
));
818 /* ODPG Write enable from BIST */
819 CHECK_STATUS(ddr3_tip_if_write
820 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
821 ODPG_DATA_CONTROL_REG
, (0x1 | (effective_cs
<< 26)),
823 /* disable error injection */
824 CHECK_STATUS(ddr3_tip_if_write
825 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
826 ODPG_WRITE_DATA_ERROR_REG
, 0, 0x1));
827 /* load pattern to ODPG */
828 ddr3_tip_load_pattern_to_odpg(dev_num
, ACCESS_TYPE_MULTICAST
,
829 PARAM_NOT_CARE
, pattern
,
830 pattern_table
[pattern
].start_addr
);
832 for (if_id
= 0; if_id
< MAX_INTERFACE_NUM
; if_id
++) {
833 if (IS_ACTIVE(tm
->if_act_mask
, if_id
) == 0)
836 CHECK_STATUS(ddr3_tip_if_write
837 (dev_num
, ACCESS_TYPE_UNICAST
, if_id
, 0x1498,
841 CHECK_STATUS(ddr3_tip_if_write
842 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
843 ODPG_ENABLE_REG
, 0x1 << ODPG_ENABLE_OFFS
,
844 (0x1 << ODPG_ENABLE_OFFS
)));
848 for (if_id
= 0; if_id
<= MAX_INTERFACE_NUM
- 1; if_id
++) {
849 VALIDATE_ACTIVE(tm
->if_act_mask
, if_id
);
850 CHECK_STATUS(is_odpg_access_done(dev_num
, if_id
));
853 /* Disable ODPG and stop write to memory */
854 CHECK_STATUS(ddr3_tip_if_write
855 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
856 ODPG_DATA_CONTROL_REG
, (0x1 << 30), (u32
) (0x3 << 30)));
858 /* return to default */
859 CHECK_STATUS(ddr3_tip_if_write
860 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
,
861 ODPG_DATA_CONTROL_REG
, 0, MASK_ALL_BITS
));
863 /* Disable odt0 for CS0 training - need to adjust for multy CS */
864 CHECK_STATUS(ddr3_tip_if_write
865 (dev_num
, ACCESS_TYPE_MULTICAST
, PARAM_NOT_CARE
, 0x1498,
868 /* temporary added */
875 * Load specific pattern to memory using CPU
877 int ddr3_tip_load_pattern_to_mem_by_cpu(u32 dev_num
, enum hws_pattern pattern
,
885 * Training search routine
887 int ddr3_tip_ip_training_wrapper_int(u32 dev_num
,
888 enum hws_access_type access_type
,
890 enum hws_access_type pup_access_type
,
891 u32 pup_num
, u32 bit_num
,
892 enum hws_training_result result_type
,
893 enum hws_control_element control_element
,
894 enum hws_search_dir search_dir
,
895 enum hws_dir direction
,
896 u32 interface_mask
, u32 init_value_l2h
,
897 u32 init_value_h2l
, u32 num_iter
,
898 enum hws_pattern pattern
,
899 enum hws_edge_compare edge_comp
,
900 enum hws_ddr_cs train_cs_type
, u32 cs_num
,
901 enum hws_training_ip_stat
*train_status
)
903 u32 interface_num
= 0, start_if
, end_if
, init_value_used
;
904 enum hws_search_dir search_dir_id
, start_search
, end_search
;
905 enum hws_edge_compare edge_comp_used
;
906 u8 cons_tap
= (direction
== OPER_WRITE
) ? (64) : (0);
907 struct hws_topology_map
*tm
= ddr3_get_topology_map();
909 if (train_status
== NULL
) {
910 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
911 ("train_status is NULL\n"));
915 if ((train_cs_type
> CS_NON_SINGLE
) ||
916 (edge_comp
>= EDGE_PFP
) ||
917 (pattern
>= PATTERN_LIMIT
) ||
918 (direction
> OPER_WRITE_AND_READ
) ||
919 (search_dir
> HWS_HIGH2LOW
) ||
920 (control_element
> HWS_CONTROL_ELEMENT_DQS_SKEW
) ||
921 (result_type
> RESULT_PER_BYTE
) ||
922 (pup_num
>= tm
->num_of_bus_per_interface
) ||
923 (pup_access_type
> ACCESS_TYPE_MULTICAST
) ||
924 (if_id
> 11) || (access_type
> ACCESS_TYPE_MULTICAST
)) {
925 DEBUG_TRAINING_IP_ENGINE(
927 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
928 train_cs_type
, edge_comp
, pattern
, direction
,
929 search_dir
, control_element
, result_type
, pup_num
,
930 pup_access_type
, if_id
, access_type
));
934 if (edge_comp
== EDGE_FPF
) {
935 start_search
= HWS_LOW2HIGH
;
936 end_search
= HWS_HIGH2LOW
;
937 edge_comp_used
= EDGE_FP
;
939 start_search
= search_dir
;
940 end_search
= search_dir
;
941 edge_comp_used
= edge_comp
;
944 for (search_dir_id
= start_search
; search_dir_id
<= end_search
;
946 init_value_used
= (search_dir_id
== HWS_LOW2HIGH
) ?
947 init_value_l2h
: init_value_h2l
;
948 DEBUG_TRAINING_IP_ENGINE(
950 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
951 dev_num
, access_type
, if_id
, pup_access_type
, pup_num
,
952 result_type
, control_element
, search_dir_id
,
953 direction
, interface_mask
, init_value_used
, num_iter
,
954 pattern
, edge_comp_used
, train_cs_type
, cs_num
));
956 ddr3_tip_ip_training(dev_num
, access_type
, if_id
,
957 pup_access_type
, pup_num
, result_type
,
958 control_element
, search_dir_id
, direction
,
959 interface_mask
, init_value_used
, num_iter
,
960 pattern
, edge_comp_used
, train_cs_type
,
961 cs_num
, train_status
);
962 if (access_type
== ACCESS_TYPE_MULTICAST
) {
964 end_if
= MAX_INTERFACE_NUM
- 1;
970 for (interface_num
= start_if
; interface_num
<= end_if
;
972 VALIDATE_ACTIVE(tm
->if_act_mask
, interface_num
);
974 CHECK_STATUS(ddr3_tip_read_training_result
975 (dev_num
, interface_num
, pup_access_type
,
976 pup_num
, bit_num
, search_dir_id
,
977 direction
, result_type
,
978 TRAINING_LOAD_OPERATION_UNLOAD
,
979 train_cs_type
, NULL
, 0, cons_tap
,
988 * Training search & read result routine
990 int ddr3_tip_ip_training_wrapper(u32 dev_num
, enum hws_access_type access_type
,
992 enum hws_access_type pup_access_type
,
994 enum hws_training_result result_type
,
995 enum hws_control_element control_element
,
996 enum hws_search_dir search_dir
,
997 enum hws_dir direction
, u32 interface_mask
,
998 u32 init_value_l2h
, u32 init_value_h2l
,
999 u32 num_iter
, enum hws_pattern pattern
,
1000 enum hws_edge_compare edge_comp
,
1001 enum hws_ddr_cs train_cs_type
, u32 cs_num
,
1002 enum hws_training_ip_stat
*train_status
)
1005 u32 interface_cnt
, bit_id
, start_if
, end_if
, bit_end
= 0;
1006 u32
*result
[HWS_SEARCH_DIR_LIMIT
] = { 0 };
1007 u8 cons_tap
= (direction
== OPER_WRITE
) ? (64) : (0);
1008 u8 bit_bit_mask
[MAX_BUS_NUM
] = { 0 }, bit_bit_mask_active
= 0;
1010 struct hws_topology_map
*tm
= ddr3_get_topology_map();
1012 if (pup_num
>= tm
->num_of_bus_per_interface
) {
1013 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
1014 ("pup_num %d not valid\n", pup_num
));
1017 if (if_id
>= MAX_INTERFACE_NUM
) {
1018 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR
,
1019 ("if_id %d not valid\n", if_id
));
1022 CHECK_STATUS(ddr3_tip_ip_training_wrapper_int
1023 (dev_num
, access_type
, if_id
, pup_access_type
, pup_num
,
1024 ALL_BITS_PER_PUP
, result_type
, control_element
,
1025 search_dir
, direction
, interface_mask
, init_value_l2h
,
1026 init_value_h2l
, num_iter
, pattern
, edge_comp
,
1027 train_cs_type
, cs_num
, train_status
));
1029 if (access_type
== ACCESS_TYPE_MULTICAST
) {
1031 end_if
= MAX_INTERFACE_NUM
- 1;
1037 for (interface_cnt
= start_if
; interface_cnt
<= end_if
;
1039 VALIDATE_ACTIVE(tm
->if_act_mask
, interface_cnt
);
1041 pup_id
<= (tm
->num_of_bus_per_interface
- 1); pup_id
++) {
1042 VALIDATE_ACTIVE(tm
->bus_act_mask
, pup_id
);
1043 if (result_type
== RESULT_PER_BIT
)
1044 bit_end
= BUS_WIDTH_IN_BITS
- 1;
1048 bit_bit_mask
[pup_id
] = 0;
1049 for (bit_id
= 0; bit_id
<= bit_end
; bit_id
++) {
1050 enum hws_search_dir search_dir_id
;
1051 for (search_dir_id
= HWS_LOW2HIGH
;
1052 search_dir_id
<= HWS_HIGH2LOW
;
1055 (ddr3_tip_read_training_result
1056 (dev_num
, interface_cnt
,
1057 ACCESS_TYPE_UNICAST
, pup_id
,
1058 bit_id
, search_dir_id
,
1059 direction
, result_type
,
1060 TRAINING_LOAD_OPERATION_UNLOAD
,
1062 &result
[search_dir_id
],
1065 e1
= GET_TAP_RESULT(result
[HWS_LOW2HIGH
][0],
1067 e2
= GET_TAP_RESULT(result
[HWS_HIGH2LOW
][0],
1069 DEBUG_TRAINING_IP_ENGINE(
1071 ("wrapper if_id %d pup_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1072 interface_cnt
, pup_id
, bit_id
,
1073 result
[HWS_LOW2HIGH
][0], e1
,
1074 result
[HWS_HIGH2LOW
][0], e2
));
1075 /* TBD validate is valid only for tx */
1076 if (VALIDATE_TRAINING_LIMIT(e1
, e2
) == 1 &&
1077 GET_LOCK_RESULT(result
[HWS_LOW2HIGH
][0]) &&
1078 GET_LOCK_RESULT(result
[HWS_LOW2HIGH
][0])) {
1079 /* Mark problem bits */
1080 bit_bit_mask
[pup_id
] |= 1 << bit_id
;
1081 bit_bit_mask_active
= 1;
1083 } /* For all bits */
1084 } /* For all PUPs */
1086 /* Fix problem bits */
1087 if (bit_bit_mask_active
!= 0) {
1088 u32
*l2h_if_train_res
= NULL
;
1089 u32
*h2l_if_train_res
= NULL
;
1091 ddr3_tip_get_buf_ptr(dev_num
, HWS_LOW2HIGH
,
1095 ddr3_tip_get_buf_ptr(dev_num
, HWS_HIGH2LOW
,
1099 ddr3_tip_ip_training(dev_num
, ACCESS_TYPE_UNICAST
,
1101 ACCESS_TYPE_MULTICAST
,
1102 PARAM_NOT_CARE
, result_type
,
1103 control_element
, HWS_LOW2HIGH
,
1104 direction
, interface_mask
,
1105 num_iter
/ 2, num_iter
/ 2,
1106 pattern
, EDGE_FP
, train_cs_type
,
1107 cs_num
, train_status
);
1110 pup_id
<= (tm
->num_of_bus_per_interface
- 1);
1112 VALIDATE_ACTIVE(tm
->bus_act_mask
, pup_id
);
1114 if (bit_bit_mask
[pup_id
] == 0)
1117 for (bit_id
= 0; bit_id
<= bit_end
; bit_id
++) {
1118 if ((bit_bit_mask
[pup_id
] &
1119 (1 << bit_id
)) == 0)
1122 (ddr3_tip_read_training_result
1123 (dev_num
, interface_cnt
,
1124 ACCESS_TYPE_UNICAST
, pup_id
,
1125 bit_id
, HWS_LOW2HIGH
,
1128 TRAINING_LOAD_OPERATION_UNLOAD
,
1129 CS_SINGLE
, &l2h_if_train_res
,
1134 ddr3_tip_ip_training(dev_num
, ACCESS_TYPE_UNICAST
,
1136 ACCESS_TYPE_MULTICAST
,
1137 PARAM_NOT_CARE
, result_type
,
1138 control_element
, HWS_HIGH2LOW
,
1139 direction
, interface_mask
,
1140 num_iter
/ 2, num_iter
/ 2,
1141 pattern
, EDGE_FP
, train_cs_type
,
1142 cs_num
, train_status
);
1145 pup_id
<= (tm
->num_of_bus_per_interface
- 1);
1147 VALIDATE_ACTIVE(tm
->bus_act_mask
, pup_id
);
1149 if (bit_bit_mask
[pup_id
] == 0)
1152 for (bit_id
= 0; bit_id
<= bit_end
; bit_id
++) {
1153 if ((bit_bit_mask
[pup_id
] &
1154 (1 << bit_id
)) == 0)
1157 (ddr3_tip_read_training_result
1158 (dev_num
, interface_cnt
,
1159 ACCESS_TYPE_UNICAST
, pup_id
,
1160 bit_id
, HWS_HIGH2LOW
, direction
,
1162 TRAINING_LOAD_OPERATION_UNLOAD
,
1163 CS_SINGLE
, &h2l_if_train_res
,
1167 } /* if bit_bit_mask_active */
1168 } /* For all Interfacess */
1176 int ddr3_tip_load_phy_values(int b_load
)
1178 u32 bus_cnt
= 0, if_id
, dev_num
= 0;
1179 struct hws_topology_map
*tm
= ddr3_get_topology_map();
1181 for (if_id
= 0; if_id
<= MAX_INTERFACE_NUM
- 1; if_id
++) {
1182 VALIDATE_ACTIVE(tm
->if_act_mask
, if_id
);
1183 for (bus_cnt
= 0; bus_cnt
< GET_TOPOLOGY_NUM_OF_BUSES();
1185 VALIDATE_ACTIVE(tm
->bus_act_mask
, bus_cnt
);
1187 CHECK_STATUS(ddr3_tip_bus_read
1189 ACCESS_TYPE_UNICAST
, bus_cnt
,
1191 WRITE_CENTRALIZATION_PHY_REG
+
1193 CS_REGISTER_ADDR_OFFSET
),
1194 &phy_reg_bk
[if_id
][bus_cnt
]
1196 CHECK_STATUS(ddr3_tip_bus_read
1198 ACCESS_TYPE_UNICAST
, bus_cnt
,
1202 CS_REGISTER_ADDR_OFFSET
),
1203 &phy_reg_bk
[if_id
][bus_cnt
]
1205 CHECK_STATUS(ddr3_tip_bus_read
1207 ACCESS_TYPE_UNICAST
, bus_cnt
,
1209 READ_CENTRALIZATION_PHY_REG
+
1211 CS_REGISTER_ADDR_OFFSET
),
1212 &phy_reg_bk
[if_id
][bus_cnt
]
1215 CHECK_STATUS(ddr3_tip_bus_write
1216 (dev_num
, ACCESS_TYPE_UNICAST
,
1217 if_id
, ACCESS_TYPE_UNICAST
,
1218 bus_cnt
, DDR_PHY_DATA
,
1219 WRITE_CENTRALIZATION_PHY_REG
+
1221 CS_REGISTER_ADDR_OFFSET
),
1222 phy_reg_bk
[if_id
][bus_cnt
]
1224 CHECK_STATUS(ddr3_tip_bus_write
1225 (dev_num
, ACCESS_TYPE_UNICAST
,
1226 if_id
, ACCESS_TYPE_UNICAST
,
1227 bus_cnt
, DDR_PHY_DATA
,
1230 CS_REGISTER_ADDR_OFFSET
),
1231 phy_reg_bk
[if_id
][bus_cnt
]
1233 CHECK_STATUS(ddr3_tip_bus_write
1234 (dev_num
, ACCESS_TYPE_UNICAST
,
1235 if_id
, ACCESS_TYPE_UNICAST
,
1236 bus_cnt
, DDR_PHY_DATA
,
1237 READ_CENTRALIZATION_PHY_REG
+
1239 CS_REGISTER_ADDR_OFFSET
),
1240 phy_reg_bk
[if_id
][bus_cnt
]
1249 int ddr3_tip_training_ip_test(u32 dev_num
, enum hws_training_result result_type
,
1250 enum hws_search_dir search_dir
,
1251 enum hws_dir direction
,
1252 enum hws_edge_compare edge
,
1253 u32 init_val1
, u32 init_val2
,
1254 u32 num_of_iterations
,
1255 u32 start_pattern
, u32 end_pattern
)
1257 u32 pattern
, if_id
, pup_id
;
1258 enum hws_training_ip_stat train_status
[MAX_INTERFACE_NUM
];
1260 u32 search_state
= 0;
1261 struct hws_topology_map
*tm
= ddr3_get_topology_map();
1263 ddr3_tip_load_phy_values(1);
1265 for (pattern
= start_pattern
; pattern
<= end_pattern
; pattern
++) {
1266 for (search_state
= 0; search_state
< HWS_SEARCH_DIR_LIMIT
;
1268 ddr3_tip_ip_training_wrapper(dev_num
,
1269 ACCESS_TYPE_MULTICAST
, 0,
1270 ACCESS_TYPE_MULTICAST
, 0,
1272 HWS_CONTROL_ELEMENT_ADLL
,
1273 search_dir
, direction
,
1276 num_of_iterations
, pattern
,
1281 for (if_id
= 0; if_id
<= MAX_INTERFACE_NUM
- 1;
1283 VALIDATE_ACTIVE(tm
->if_act_mask
, if_id
);
1284 for (pup_id
= 0; pup_id
<
1285 tm
->num_of_bus_per_interface
;
1287 VALIDATE_ACTIVE(tm
->bus_act_mask
,
1290 (ddr3_tip_read_training_result
1292 ACCESS_TYPE_UNICAST
, pup_id
,
1295 direction
, result_type
,
1296 TRAINING_LOAD_OPERATION_UNLOAD
,
1297 CS_SINGLE
, &res
, 1, 0,
1299 if (result_type
== RESULT_PER_BYTE
) {
1300 DEBUG_TRAINING_IP_ENGINE
1302 ("search_state %d if_id %d pup_id %d 0x%x\n",
1303 search_state
, if_id
,
1306 DEBUG_TRAINING_IP_ENGINE
1308 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1309 search_state
, if_id
,
1321 ddr3_tip_load_phy_values(0);
1326 struct pattern_info
*ddr3_tip_get_pattern_table()
1328 struct hws_topology_map
*tm
= ddr3_get_topology_map();
1330 if (DDR3_IS_16BIT_DRAM_MODE(tm
->bus_act_mask
) == 0)
1331 return pattern_table_32
;
1333 return pattern_table_16
;
1336 u16
*ddr3_tip_get_mask_results_dq_reg()
1338 struct hws_topology_map
*tm
= ddr3_get_topology_map();
1340 if (DDR3_IS_ECC_PUP3_MODE(tm
->bus_act_mask
))
1341 return mask_results_dq_reg_map_pup3_ecc
;
1343 return mask_results_dq_reg_map
;
1346 u16
*ddr3_tip_get_mask_results_pup_reg_map()
1348 struct hws_topology_map
*tm
= ddr3_get_topology_map();
1350 if (DDR3_IS_ECC_PUP3_MODE(tm
->bus_act_mask
))
1351 return mask_results_pup_reg_map_pup3_ecc
;
1353 return mask_results_pup_reg_map
;