]> git.ipfire.org Git - people/ms/u-boot.git/blob - drivers/ddr/marvell/a38x/ddr3_training_ip_engine.c
Merge git://git.denx.de/u-boot-marvell
[people/ms/u-boot.git] / drivers / ddr / marvell / a38x / ddr3_training_ip_engine.c
1 /*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7 #include <common.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12
13 #include "ddr3_init.h"
14
15 #define PATTERN_1 0x55555555
16 #define PATTERN_2 0xaaaaaaaa
17
18 #define VALIDATE_TRAINING_LIMIT(e1, e2) \
19 ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
20
21 u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
22
23 u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
24 HWS_SEARCH_DIR_LIMIT];
25
26 u16 mask_results_dq_reg_map[] = {
27 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
28 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
29 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
30 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
31 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
32 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
33 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
34 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
35 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
36 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
37 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
38 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
39 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
40 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
41 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
42 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
43 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
44 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
45 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
46 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
47 };
48
49 u16 mask_results_pup_reg_map[] = {
50 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
51 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
52 RESULT_CONTROL_BYTE_PUP_4_REG
53 };
54
55 u16 mask_results_dq_reg_map_pup3_ecc[] = {
56 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
57 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
58 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
59 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
60 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
61 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
62 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
63 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
64 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
65 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
66 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
67 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
68 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
69 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
70 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
71 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
72 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
73 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
74 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
75 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
76 };
77
78 u16 mask_results_pup_reg_map_pup3_ecc[] = {
79 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
80 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
81 RESULT_CONTROL_BYTE_PUP_4_REG
82 };
83
84 struct pattern_info pattern_table_16[] = {
85 /*
86 * num tx phases, tx burst, delay between, rx pattern,
87 * start_address, pattern_len
88 */
89 {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
90 {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
91 {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
92 {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
93 {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
94 {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
95 {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
96 {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
97 {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
98 {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
99 {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
100 {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
101 {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
102 {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
103 {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
104 {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
105 {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
106 {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
107 {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
108 {0xf, 0x7, 2, 0x7, 0x04c0, 16} /* PATTERN_VREF */
109 /*Note: actual start_address is <<3 of defined addess */
110 };
111
112 struct pattern_info pattern_table_32[] = {
113 /*
114 * num tx phases, tx burst, delay between, rx pattern,
115 * start_address, pattern_len
116 */
117 {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
118 {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
119 {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
120 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
121 {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
122 {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
123 {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
124 {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
125 {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
126 {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
127 {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
128 {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
129 {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
130 {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
131 {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
132 {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
133 {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
134 {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
135 {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
136 {0x1f, 0xf, 2, 0xf, 0x04c0, 32} /* PATTERN_VREF */
137 /*Note: actual start_address is <<3 of defined addess */
138 };
139
140 u32 train_dev_num;
141 enum hws_ddr_cs traintrain_cs_type;
142 u32 train_pup_num;
143 enum hws_training_result train_result_type;
144 enum hws_control_element train_control_element;
145 enum hws_search_dir traine_search_dir;
146 enum hws_dir train_direction;
147 u32 train_if_select;
148 u32 train_init_value;
149 u32 train_number_iterations;
150 enum hws_pattern train_pattern;
151 enum hws_edge_compare train_edge_compare;
152 u32 train_cs_num;
153 u32 train_if_acess, train_if_id, train_pup_access;
154 u32 max_polling_for_done = 1000000;
155
156 u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
157 enum hws_training_result result_type,
158 u32 interface_num)
159 {
160 u32 *buf_ptr = NULL;
161
162 buf_ptr = &training_res
163 [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
164 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
165
166 return buf_ptr;
167 }
168
169 /*
170 * IP Training search
171 * Note: for one edge search only from fail to pass, else jitter can
172 * be be entered into solution.
173 */
174 int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
175 u32 interface_num,
176 enum hws_access_type pup_access_type,
177 u32 pup_num, enum hws_training_result result_type,
178 enum hws_control_element control_element,
179 enum hws_search_dir search_dir, enum hws_dir direction,
180 u32 interface_mask, u32 init_value, u32 num_iter,
181 enum hws_pattern pattern,
182 enum hws_edge_compare edge_comp,
183 enum hws_ddr_cs cs_type, u32 cs_num,
184 enum hws_training_ip_stat *train_status)
185 {
186 u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt, poll_cnt,
187 reg_data, pup_id;
188 u32 tx_burst_size;
189 u32 delay_between_burst;
190 u32 rd_mode;
191 u32 read_data[MAX_INTERFACE_NUM];
192 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
193 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
194 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
195 struct hws_topology_map *tm = ddr3_get_topology_map();
196
197 if (pup_num >= tm->num_of_bus_per_interface) {
198 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
199 ("pup_num %d not valid\n", pup_num));
200 }
201 if (interface_num >= MAX_INTERFACE_NUM) {
202 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
203 ("if_id %d not valid\n",
204 interface_num));
205 }
206 if (train_status == NULL) {
207 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
208 ("error param 4\n"));
209 return MV_BAD_PARAM;
210 }
211
212 /* load pattern */
213 if (cs_type == CS_SINGLE) {
214 /* All CSs to CS0 */
215 CHECK_STATUS(ddr3_tip_if_write
216 (dev_num, access_type, interface_num,
217 CS_ENABLE_REG, 1 << 3, 1 << 3));
218 /* All CSs to CS0 */
219 CHECK_STATUS(ddr3_tip_if_write
220 (dev_num, access_type, interface_num,
221 ODPG_DATA_CONTROL_REG,
222 (0x3 | (effective_cs << 26)), 0xc000003));
223 } else {
224 CHECK_STATUS(ddr3_tip_if_write
225 (dev_num, access_type, interface_num,
226 CS_ENABLE_REG, 0, 1 << 3));
227 /* CS select */
228 CHECK_STATUS(ddr3_tip_if_write
229 (dev_num, access_type, interface_num,
230 ODPG_DATA_CONTROL_REG, 0x3 | cs_num << 26,
231 0x3 | 3 << 26));
232 }
233
234 /* load pattern to ODPG */
235 ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
236 pattern,
237 pattern_table[pattern].start_addr);
238 tx_burst_size = (direction == OPER_WRITE) ?
239 pattern_table[pattern].tx_burst_size : 0;
240 delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
241 rd_mode = (direction == OPER_WRITE) ? 1 : 0;
242 CHECK_STATUS(ddr3_tip_configure_odpg
243 (dev_num, access_type, interface_num, direction,
244 pattern_table[pattern].num_of_phases_tx, tx_burst_size,
245 pattern_table[pattern].num_of_phases_rx,
246 delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
247 DURATION_SINGLE));
248 reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
249 reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
250 CHECK_STATUS(ddr3_tip_if_write
251 (dev_num, access_type, interface_num,
252 ODPG_WRITE_READ_MODE_ENABLE_REG, reg_data,
253 MASK_ALL_BITS));
254 reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
255 reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
256 (1 << 7) : 0;
257
258 /* change from Pass to Fail will lock the result */
259 if (pup_access_type == ACCESS_TYPE_MULTICAST)
260 reg_data |= 0xe << 14;
261 else
262 reg_data |= pup_num << 14;
263
264 if (edge_comp == EDGE_FP) {
265 /* don't search for readl edge change, only the state */
266 reg_data |= (0 << 20);
267 } else if (edge_comp == EDGE_FPF) {
268 reg_data |= (0 << 20);
269 } else {
270 reg_data |= (3 << 20);
271 }
272
273 CHECK_STATUS(ddr3_tip_if_write
274 (dev_num, access_type, interface_num,
275 ODPG_TRAINING_CONTROL_REG,
276 reg_data | (0x7 << 8) | (0x7 << 11),
277 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
278 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
279 reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
280 CHECK_STATUS(ddr3_tip_if_write
281 (dev_num, access_type, interface_num, ODPG_OBJ1_OPCODE_REG,
282 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
283 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
284
285 /*
286 * Write2_dunit(0x10b4, Number_iteration , [15:0])
287 * Max number of iterations
288 */
289 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
290 ODPG_OBJ1_ITER_CNT_REG, num_iter,
291 0xffff));
292 if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
293 direction == OPER_READ) {
294 /*
295 * Write2_dunit(0x10c0, 0x5f , [7:0])
296 * MC PBS Reg Address at DDR PHY
297 */
298 reg_data = 0x5f +
299 effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
300 } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
301 direction == OPER_WRITE) {
302 reg_data = 0x1f +
303 effective_cs * CALIBRATED_OBJECTS_REG_ADDR_OFFSET;
304 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
305 direction == OPER_WRITE) {
306 /*
307 * LOOP 0x00000001 + 4*n:
308 * where n (0-3) represents M_CS number
309 */
310 /*
311 * Write2_dunit(0x10c0, 0x1 , [7:0])
312 * ADLL WR Reg Address at DDR PHY
313 */
314 reg_data = 1 + effective_cs * CS_REGISTER_ADDR_OFFSET;
315 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
316 direction == OPER_READ) {
317 /* ADLL RD Reg Address at DDR PHY */
318 reg_data = 3 + effective_cs * CS_REGISTER_ADDR_OFFSET;
319 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
320 direction == OPER_WRITE) {
321 /* TBD not defined in 0.5.0 requirement */
322 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
323 direction == OPER_READ) {
324 /* TBD not defined in 0.5.0 requirement */
325 }
326
327 reg_data |= (0x6 << 28);
328 CHECK_STATUS(ddr3_tip_if_write
329 (dev_num, access_type, interface_num, CALIB_OBJ_PRFA_REG,
330 reg_data | (init_value << 8),
331 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
332
333 mask_dq_num_of_regs = tm->num_of_bus_per_interface * BUS_WIDTH_IN_BITS;
334 mask_pup_num_of_regs = tm->num_of_bus_per_interface;
335
336 if (result_type == RESULT_PER_BIT) {
337 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
338 index_cnt++) {
339 CHECK_STATUS(ddr3_tip_if_write
340 (dev_num, access_type, interface_num,
341 mask_results_dq_reg_map[index_cnt], 0,
342 1 << 24));
343 }
344
345 /* Mask disabled buses */
346 for (pup_id = 0; pup_id < tm->num_of_bus_per_interface;
347 pup_id++) {
348 if (IS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
349 continue;
350
351 for (index_cnt = (mask_dq_num_of_regs - pup_id * 8);
352 index_cnt <
353 (mask_dq_num_of_regs - (pup_id + 1) * 8);
354 index_cnt++) {
355 CHECK_STATUS(ddr3_tip_if_write
356 (dev_num, access_type,
357 interface_num,
358 mask_results_dq_reg_map
359 [index_cnt], (1 << 24), 1 << 24));
360 }
361 }
362
363 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
364 index_cnt++) {
365 CHECK_STATUS(ddr3_tip_if_write
366 (dev_num, access_type, interface_num,
367 mask_results_pup_reg_map[index_cnt],
368 (1 << 24), 1 << 24));
369 }
370 } else if (result_type == RESULT_PER_BYTE) {
371 /* write to adll */
372 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
373 index_cnt++) {
374 CHECK_STATUS(ddr3_tip_if_write
375 (dev_num, access_type, interface_num,
376 mask_results_pup_reg_map[index_cnt], 0,
377 1 << 24));
378 }
379 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
380 index_cnt++) {
381 CHECK_STATUS(ddr3_tip_if_write
382 (dev_num, access_type, interface_num,
383 mask_results_dq_reg_map[index_cnt],
384 (1 << 24), (1 << 24)));
385 }
386 }
387
388 /* Start Training Trigger */
389 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
390 ODPG_TRAINING_TRIGGER_REG, 1, 1));
391 /* wait for all RFU tests to finish (or timeout) */
392 /* WA for 16 bit mode, more investigation needed */
393 mdelay(1);
394
395 /* Training "Done ?" */
396 for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
397 if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
398 continue;
399
400 if (interface_mask & (1 << index_cnt)) {
401 /* need to check results for this Dunit */
402 for (poll_cnt = 0; poll_cnt < max_polling_for_done;
403 poll_cnt++) {
404 CHECK_STATUS(ddr3_tip_if_read
405 (dev_num, ACCESS_TYPE_UNICAST,
406 index_cnt,
407 ODPG_TRAINING_STATUS_REG,
408 &reg_data, MASK_ALL_BITS));
409 if ((reg_data & 0x2) != 0) {
410 /*done */
411 train_status[index_cnt] =
412 HWS_TRAINING_IP_STATUS_SUCCESS;
413 break;
414 }
415 }
416
417 if (poll_cnt == max_polling_for_done) {
418 train_status[index_cnt] =
419 HWS_TRAINING_IP_STATUS_TIMEOUT;
420 }
421 }
422 /* Be sure that ODPG done */
423 CHECK_STATUS(is_odpg_access_done(dev_num, index_cnt));
424 }
425
426 /* Write ODPG done in Dunit */
427 CHECK_STATUS(ddr3_tip_if_write
428 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
429 ODPG_STATUS_DONE_REG, 0, 0x1));
430
431 /* wait for all Dunit tests to finish (or timeout) */
432 /* Training "Done ?" */
433 /* Training "Pass ?" */
434 for (index_cnt = 0; index_cnt < MAX_INTERFACE_NUM; index_cnt++) {
435 if (IS_ACTIVE(tm->if_act_mask, index_cnt) == 0)
436 continue;
437
438 if (interface_mask & (1 << index_cnt)) {
439 /* need to check results for this Dunit */
440 for (poll_cnt = 0; poll_cnt < max_polling_for_done;
441 poll_cnt++) {
442 CHECK_STATUS(ddr3_tip_if_read
443 (dev_num, ACCESS_TYPE_UNICAST,
444 index_cnt,
445 ODPG_TRAINING_TRIGGER_REG,
446 read_data, MASK_ALL_BITS));
447 reg_data = read_data[index_cnt];
448 if ((reg_data & 0x2) != 0) {
449 /* done */
450 if ((reg_data & 0x4) == 0) {
451 train_status[index_cnt] =
452 HWS_TRAINING_IP_STATUS_SUCCESS;
453 } else {
454 train_status[index_cnt] =
455 HWS_TRAINING_IP_STATUS_FAIL;
456 }
457 break;
458 }
459 }
460
461 if (poll_cnt == max_polling_for_done) {
462 train_status[index_cnt] =
463 HWS_TRAINING_IP_STATUS_TIMEOUT;
464 }
465 }
466 }
467
468 CHECK_STATUS(ddr3_tip_if_write
469 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
470 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
471
472 return MV_OK;
473 }
474
475 /*
476 * Load expected Pattern to ODPG
477 */
478 int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
479 u32 if_id, enum hws_pattern pattern,
480 u32 load_addr)
481 {
482 u32 pattern_length_cnt = 0;
483 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
484
485 for (pattern_length_cnt = 0;
486 pattern_length_cnt < pattern_table[pattern].pattern_len;
487 pattern_length_cnt++) {
488 CHECK_STATUS(ddr3_tip_if_write
489 (dev_num, access_type, if_id,
490 ODPG_PATTERN_DATA_LOW_REG,
491 pattern_table_get_word(dev_num, pattern,
492 (u8) (pattern_length_cnt *
493 2)), MASK_ALL_BITS));
494 CHECK_STATUS(ddr3_tip_if_write
495 (dev_num, access_type, if_id,
496 ODPG_PATTERN_DATA_HI_REG,
497 pattern_table_get_word(dev_num, pattern,
498 (u8) (pattern_length_cnt *
499 2 + 1)),
500 MASK_ALL_BITS));
501 CHECK_STATUS(ddr3_tip_if_write
502 (dev_num, access_type, if_id,
503 ODPG_PATTERN_ADDR_REG, pattern_length_cnt,
504 MASK_ALL_BITS));
505 }
506
507 CHECK_STATUS(ddr3_tip_if_write
508 (dev_num, access_type, if_id,
509 ODPG_PATTERN_ADDR_OFFSET_REG, load_addr, MASK_ALL_BITS));
510
511 return MV_OK;
512 }
513
514 /*
515 * Configure ODPG
516 */
517 int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
518 u32 if_id, enum hws_dir direction, u32 tx_phases,
519 u32 tx_burst_size, u32 rx_phases,
520 u32 delay_between_burst, u32 rd_mode, u32 cs_num,
521 u32 addr_stress_jump, u32 single_pattern)
522 {
523 u32 data_value = 0;
524 int ret;
525
526 data_value = ((single_pattern << 2) | (tx_phases << 5) |
527 (tx_burst_size << 11) | (delay_between_burst << 15) |
528 (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
529 (addr_stress_jump << 29));
530 ret = ddr3_tip_if_write(dev_num, access_type, if_id,
531 ODPG_DATA_CONTROL_REG, data_value, 0xaffffffc);
532 if (ret != MV_OK)
533 return ret;
534
535 return MV_OK;
536 }
537
538 int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
539 enum hws_edge_search e_edge_search,
540 u32 *edge_result)
541 {
542 u32 i, res;
543 int tap_val, max_val = -10000, min_val = 10000;
544 int lock_success = 1;
545
546 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
547 res = GET_LOCK_RESULT(ar_result[i]);
548 if (res == 0) {
549 lock_success = 0;
550 break;
551 }
552 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
553 ("lock failed for bit %d\n", i));
554 }
555
556 if (lock_success == 1) {
557 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
558 tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
559 if (tap_val > max_val)
560 max_val = tap_val;
561 if (tap_val < min_val)
562 min_val = tap_val;
563 if (e_edge_search == TRAINING_EDGE_MAX)
564 *edge_result = (u32) max_val;
565 else
566 *edge_result = (u32) min_val;
567
568 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
569 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
570 i, ar_result[i], tap_val,
571 max_val, min_val,
572 *edge_result));
573 }
574 } else {
575 return MV_FAIL;
576 }
577
578 return MV_OK;
579 }
580
581 /*
582 * Read training search result
583 */
584 int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
585 enum hws_access_type pup_access_type,
586 u32 pup_num, u32 bit_num,
587 enum hws_search_dir search,
588 enum hws_dir direction,
589 enum hws_training_result result_type,
590 enum hws_training_load_op operation,
591 u32 cs_num_type, u32 **load_res,
592 int is_read_from_db, u8 cons_tap,
593 int is_check_result_validity)
594 {
595 u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
596 u32 *interface_train_res = NULL;
597 u16 *reg_addr = NULL;
598 u32 read_data[MAX_INTERFACE_NUM];
599 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
600 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
601 struct hws_topology_map *tm = ddr3_get_topology_map();
602
603 /*
604 * Agreed assumption: all CS mask contain same number of bits,
605 * i.e. in multi CS, the number of CS per memory is the same for
606 * all pups
607 */
608 CHECK_STATUS(ddr3_tip_if_write
609 (dev_num, ACCESS_TYPE_UNICAST, if_id, CS_ENABLE_REG,
610 (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
611 CHECK_STATUS(ddr3_tip_if_write
612 (dev_num, ACCESS_TYPE_UNICAST, if_id,
613 ODPG_DATA_CONTROL_REG, (cs_num_type << 26), (3 << 26)));
614 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
615 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
616 is_read_from_db, cs_num_type, operation,
617 result_type, direction, search, pup_num,
618 if_id, pup_access_type));
619
620 if ((load_res == NULL) && (is_read_from_db == 1)) {
621 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
622 ("ddr3_tip_read_training_result load_res = NULL"));
623 return MV_FAIL;
624 }
625 if (pup_num >= tm->num_of_bus_per_interface) {
626 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
627 ("pup_num %d not valid\n", pup_num));
628 }
629 if (if_id >= MAX_INTERFACE_NUM) {
630 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
631 ("if_id %d not valid\n", if_id));
632 }
633 if (result_type == RESULT_PER_BIT)
634 reg_addr = mask_results_dq_reg_map;
635 else
636 reg_addr = mask_results_pup_reg_map;
637 if (pup_access_type == ACCESS_TYPE_UNICAST) {
638 start_pup = pup_num;
639 end_pup = pup_num;
640 } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
641
642 start_pup = 0;
643 end_pup = tm->num_of_bus_per_interface - 1;
644 }
645
646 for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
647 VALIDATE_ACTIVE(tm->bus_act_mask, pup_cnt);
648 DEBUG_TRAINING_IP_ENGINE(
649 DEBUG_LEVEL_TRACE,
650 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
651 if_id, start_pup, end_pup, pup_cnt));
652 if (result_type == RESULT_PER_BIT) {
653 if (bit_num == ALL_BITS_PER_PUP) {
654 start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
655 end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
656 } else {
657 start_reg =
658 pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
659 end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
660 }
661 } else {
662 start_reg = pup_cnt;
663 end_reg = pup_cnt;
664 }
665
666 interface_train_res =
667 ddr3_tip_get_buf_ptr(dev_num, search, result_type,
668 if_id);
669 DEBUG_TRAINING_IP_ENGINE(
670 DEBUG_LEVEL_TRACE,
671 ("start_reg %d end_reg %d interface %p\n",
672 start_reg, end_reg, interface_train_res));
673 if (interface_train_res == NULL) {
674 DEBUG_TRAINING_IP_ENGINE(
675 DEBUG_LEVEL_ERROR,
676 ("interface_train_res is NULL\n"));
677 return MV_FAIL;
678 }
679
680 for (reg_offset = start_reg; reg_offset <= end_reg;
681 reg_offset++) {
682 if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
683 if (is_read_from_db == 0) {
684 CHECK_STATUS(ddr3_tip_if_read
685 (dev_num,
686 ACCESS_TYPE_UNICAST,
687 if_id,
688 reg_addr[reg_offset],
689 read_data,
690 MASK_ALL_BITS));
691 if (is_check_result_validity == 1) {
692 if ((read_data[if_id] &
693 0x02000000) == 0) {
694 interface_train_res
695 [reg_offset] =
696 0x02000000 +
697 64 + cons_tap;
698 } else {
699 interface_train_res
700 [reg_offset] =
701 read_data
702 [if_id] +
703 cons_tap;
704 }
705 } else {
706 interface_train_res[reg_offset]
707 = read_data[if_id] +
708 cons_tap;
709 }
710 DEBUG_TRAINING_IP_ENGINE
711 (DEBUG_LEVEL_TRACE,
712 ("reg_offset %d value 0x%x addr %p\n",
713 reg_offset,
714 interface_train_res
715 [reg_offset],
716 &interface_train_res
717 [reg_offset]));
718 } else {
719 *load_res =
720 &interface_train_res[start_reg];
721 DEBUG_TRAINING_IP_ENGINE
722 (DEBUG_LEVEL_TRACE,
723 ("*load_res %p\n", *load_res));
724 }
725 } else {
726 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
727 ("not supported\n"));
728 }
729 }
730 }
731
732 return MV_OK;
733 }
734
735 /*
736 * Load all pattern to memory using ODPG
737 */
738 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
739 {
740 u32 pattern = 0, if_id;
741 struct hws_topology_map *tm = ddr3_get_topology_map();
742
743 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
744 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
745 training_result[training_stage][if_id] = TEST_SUCCESS;
746 }
747
748 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
749 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
750 /* enable single cs */
751 CHECK_STATUS(ddr3_tip_if_write
752 (dev_num, ACCESS_TYPE_UNICAST, if_id,
753 CS_ENABLE_REG, (1 << 3), (1 << 3)));
754 }
755
756 for (pattern = 0; pattern < PATTERN_LIMIT; pattern++)
757 ddr3_tip_load_pattern_to_mem(dev_num, pattern);
758
759 return MV_OK;
760 }
761
762 /*
763 * Wait till ODPG access is ready
764 */
765 int is_odpg_access_done(u32 dev_num, u32 if_id)
766 {
767 u32 poll_cnt = 0, data_value;
768 u32 read_data[MAX_INTERFACE_NUM];
769
770 for (poll_cnt = 0; poll_cnt < MAX_POLLING_ITERATIONS; poll_cnt++) {
771 CHECK_STATUS(ddr3_tip_if_read
772 (dev_num, ACCESS_TYPE_UNICAST, if_id,
773 ODPG_BIST_DONE, read_data, MASK_ALL_BITS));
774 data_value = read_data[if_id];
775 if (((data_value >> ODPG_BIST_DONE_BIT_OFFS) & 0x1) ==
776 ODPG_BIST_DONE_BIT_VALUE) {
777 data_value = data_value & 0xfffffffe;
778 CHECK_STATUS(ddr3_tip_if_write
779 (dev_num, ACCESS_TYPE_UNICAST,
780 if_id, ODPG_BIST_DONE, data_value,
781 MASK_ALL_BITS));
782 break;
783 }
784 }
785
786 if (poll_cnt >= MAX_POLLING_ITERATIONS) {
787 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
788 ("Bist Activate: poll failure 2\n"));
789 return MV_FAIL;
790 }
791
792 return MV_OK;
793 }
794
795 /*
796 * Load specific pattern to memory using ODPG
797 */
798 int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
799 {
800 u32 reg_data, if_id;
801 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
802 struct hws_topology_map *tm = ddr3_get_topology_map();
803
804 /* load pattern to memory */
805 /*
806 * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
807 * rx pattern phases
808 */
809 reg_data =
810 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
811 (pattern_table[pattern].tx_burst_size << 11) |
812 (pattern_table[pattern].delay_between_bursts << 15) |
813 (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
814 (effective_cs << 26);
815 CHECK_STATUS(ddr3_tip_if_write
816 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
817 ODPG_DATA_CONTROL_REG, reg_data, MASK_ALL_BITS));
818 /* ODPG Write enable from BIST */
819 CHECK_STATUS(ddr3_tip_if_write
820 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
821 ODPG_DATA_CONTROL_REG, (0x1 | (effective_cs << 26)),
822 0xc000003));
823 /* disable error injection */
824 CHECK_STATUS(ddr3_tip_if_write
825 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
826 ODPG_WRITE_DATA_ERROR_REG, 0, 0x1));
827 /* load pattern to ODPG */
828 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
829 PARAM_NOT_CARE, pattern,
830 pattern_table[pattern].start_addr);
831
832 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
833 if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
834 continue;
835
836 CHECK_STATUS(ddr3_tip_if_write
837 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1498,
838 0x3, 0xf));
839 }
840
841 CHECK_STATUS(ddr3_tip_if_write
842 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
843 ODPG_ENABLE_REG, 0x1 << ODPG_ENABLE_OFFS,
844 (0x1 << ODPG_ENABLE_OFFS)));
845
846 mdelay(1);
847
848 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
849 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
850 CHECK_STATUS(is_odpg_access_done(dev_num, if_id));
851 }
852
853 /* Disable ODPG and stop write to memory */
854 CHECK_STATUS(ddr3_tip_if_write
855 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
856 ODPG_DATA_CONTROL_REG, (0x1 << 30), (u32) (0x3 << 30)));
857
858 /* return to default */
859 CHECK_STATUS(ddr3_tip_if_write
860 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
861 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
862
863 /* Disable odt0 for CS0 training - need to adjust for multy CS */
864 CHECK_STATUS(ddr3_tip_if_write
865 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
866 0x0, 0xf));
867
868 /* temporary added */
869 mdelay(1);
870
871 return MV_OK;
872 }
873
874 /*
875 * Load specific pattern to memory using CPU
876 */
877 int ddr3_tip_load_pattern_to_mem_by_cpu(u32 dev_num, enum hws_pattern pattern,
878 u32 offset)
879 {
880 /* eranba - TBD */
881 return MV_OK;
882 }
883
884 /*
885 * Training search routine
886 */
887 int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
888 enum hws_access_type access_type,
889 u32 if_id,
890 enum hws_access_type pup_access_type,
891 u32 pup_num, u32 bit_num,
892 enum hws_training_result result_type,
893 enum hws_control_element control_element,
894 enum hws_search_dir search_dir,
895 enum hws_dir direction,
896 u32 interface_mask, u32 init_value_l2h,
897 u32 init_value_h2l, u32 num_iter,
898 enum hws_pattern pattern,
899 enum hws_edge_compare edge_comp,
900 enum hws_ddr_cs train_cs_type, u32 cs_num,
901 enum hws_training_ip_stat *train_status)
902 {
903 u32 interface_num = 0, start_if, end_if, init_value_used;
904 enum hws_search_dir search_dir_id, start_search, end_search;
905 enum hws_edge_compare edge_comp_used;
906 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
907 struct hws_topology_map *tm = ddr3_get_topology_map();
908
909 if (train_status == NULL) {
910 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
911 ("train_status is NULL\n"));
912 return MV_FAIL;
913 }
914
915 if ((train_cs_type > CS_NON_SINGLE) ||
916 (edge_comp >= EDGE_PFP) ||
917 (pattern >= PATTERN_LIMIT) ||
918 (direction > OPER_WRITE_AND_READ) ||
919 (search_dir > HWS_HIGH2LOW) ||
920 (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
921 (result_type > RESULT_PER_BYTE) ||
922 (pup_num >= tm->num_of_bus_per_interface) ||
923 (pup_access_type > ACCESS_TYPE_MULTICAST) ||
924 (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
925 DEBUG_TRAINING_IP_ENGINE(
926 DEBUG_LEVEL_ERROR,
927 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
928 train_cs_type, edge_comp, pattern, direction,
929 search_dir, control_element, result_type, pup_num,
930 pup_access_type, if_id, access_type));
931 return MV_FAIL;
932 }
933
934 if (edge_comp == EDGE_FPF) {
935 start_search = HWS_LOW2HIGH;
936 end_search = HWS_HIGH2LOW;
937 edge_comp_used = EDGE_FP;
938 } else {
939 start_search = search_dir;
940 end_search = search_dir;
941 edge_comp_used = edge_comp;
942 }
943
944 for (search_dir_id = start_search; search_dir_id <= end_search;
945 search_dir_id++) {
946 init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
947 init_value_l2h : init_value_h2l;
948 DEBUG_TRAINING_IP_ENGINE(
949 DEBUG_LEVEL_TRACE,
950 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
951 dev_num, access_type, if_id, pup_access_type, pup_num,
952 result_type, control_element, search_dir_id,
953 direction, interface_mask, init_value_used, num_iter,
954 pattern, edge_comp_used, train_cs_type, cs_num));
955
956 ddr3_tip_ip_training(dev_num, access_type, if_id,
957 pup_access_type, pup_num, result_type,
958 control_element, search_dir_id, direction,
959 interface_mask, init_value_used, num_iter,
960 pattern, edge_comp_used, train_cs_type,
961 cs_num, train_status);
962 if (access_type == ACCESS_TYPE_MULTICAST) {
963 start_if = 0;
964 end_if = MAX_INTERFACE_NUM - 1;
965 } else {
966 start_if = if_id;
967 end_if = if_id;
968 }
969
970 for (interface_num = start_if; interface_num <= end_if;
971 interface_num++) {
972 VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
973 cs_num = 0;
974 CHECK_STATUS(ddr3_tip_read_training_result
975 (dev_num, interface_num, pup_access_type,
976 pup_num, bit_num, search_dir_id,
977 direction, result_type,
978 TRAINING_LOAD_OPERATION_UNLOAD,
979 train_cs_type, NULL, 0, cons_tap,
980 0));
981 }
982 }
983
984 return MV_OK;
985 }
986
987 /*
988 * Training search & read result routine
989 */
990 int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
991 u32 if_id,
992 enum hws_access_type pup_access_type,
993 u32 pup_num,
994 enum hws_training_result result_type,
995 enum hws_control_element control_element,
996 enum hws_search_dir search_dir,
997 enum hws_dir direction, u32 interface_mask,
998 u32 init_value_l2h, u32 init_value_h2l,
999 u32 num_iter, enum hws_pattern pattern,
1000 enum hws_edge_compare edge_comp,
1001 enum hws_ddr_cs train_cs_type, u32 cs_num,
1002 enum hws_training_ip_stat *train_status)
1003 {
1004 u8 e1, e2;
1005 u32 interface_cnt, bit_id, start_if, end_if, bit_end = 0;
1006 u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
1007 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
1008 u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
1009 u8 pup_id;
1010 struct hws_topology_map *tm = ddr3_get_topology_map();
1011
1012 if (pup_num >= tm->num_of_bus_per_interface) {
1013 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1014 ("pup_num %d not valid\n", pup_num));
1015 }
1016
1017 if (if_id >= MAX_INTERFACE_NUM) {
1018 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1019 ("if_id %d not valid\n", if_id));
1020 }
1021
1022 CHECK_STATUS(ddr3_tip_ip_training_wrapper_int
1023 (dev_num, access_type, if_id, pup_access_type, pup_num,
1024 ALL_BITS_PER_PUP, result_type, control_element,
1025 search_dir, direction, interface_mask, init_value_l2h,
1026 init_value_h2l, num_iter, pattern, edge_comp,
1027 train_cs_type, cs_num, train_status));
1028
1029 if (access_type == ACCESS_TYPE_MULTICAST) {
1030 start_if = 0;
1031 end_if = MAX_INTERFACE_NUM - 1;
1032 } else {
1033 start_if = if_id;
1034 end_if = if_id;
1035 }
1036
1037 for (interface_cnt = start_if; interface_cnt <= end_if;
1038 interface_cnt++) {
1039 VALIDATE_ACTIVE(tm->if_act_mask, interface_cnt);
1040 for (pup_id = 0;
1041 pup_id <= (tm->num_of_bus_per_interface - 1); pup_id++) {
1042 VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1043 if (result_type == RESULT_PER_BIT)
1044 bit_end = BUS_WIDTH_IN_BITS - 1;
1045 else
1046 bit_end = 0;
1047
1048 bit_bit_mask[pup_id] = 0;
1049 for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1050 enum hws_search_dir search_dir_id;
1051 for (search_dir_id = HWS_LOW2HIGH;
1052 search_dir_id <= HWS_HIGH2LOW;
1053 search_dir_id++) {
1054 CHECK_STATUS
1055 (ddr3_tip_read_training_result
1056 (dev_num, interface_cnt,
1057 ACCESS_TYPE_UNICAST, pup_id,
1058 bit_id, search_dir_id,
1059 direction, result_type,
1060 TRAINING_LOAD_OPERATION_UNLOAD,
1061 CS_SINGLE,
1062 &result[search_dir_id],
1063 1, 0, 0));
1064 }
1065 e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0],
1066 EDGE_1);
1067 e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0],
1068 EDGE_1);
1069 DEBUG_TRAINING_IP_ENGINE(
1070 DEBUG_LEVEL_INFO,
1071 ("wrapper if_id %d pup_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1072 interface_cnt, pup_id, bit_id,
1073 result[HWS_LOW2HIGH][0], e1,
1074 result[HWS_HIGH2LOW][0], e2));
1075 /* TBD validate is valid only for tx */
1076 if (VALIDATE_TRAINING_LIMIT(e1, e2) == 1 &&
1077 GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
1078 GET_LOCK_RESULT(result[HWS_LOW2HIGH][0])) {
1079 /* Mark problem bits */
1080 bit_bit_mask[pup_id] |= 1 << bit_id;
1081 bit_bit_mask_active = 1;
1082 }
1083 } /* For all bits */
1084 } /* For all PUPs */
1085
1086 /* Fix problem bits */
1087 if (bit_bit_mask_active != 0) {
1088 u32 *l2h_if_train_res = NULL;
1089 u32 *h2l_if_train_res = NULL;
1090 l2h_if_train_res =
1091 ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH,
1092 result_type,
1093 interface_cnt);
1094 h2l_if_train_res =
1095 ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW,
1096 result_type,
1097 interface_cnt);
1098
1099 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
1100 interface_cnt,
1101 ACCESS_TYPE_MULTICAST,
1102 PARAM_NOT_CARE, result_type,
1103 control_element, HWS_LOW2HIGH,
1104 direction, interface_mask,
1105 num_iter / 2, num_iter / 2,
1106 pattern, EDGE_FP, train_cs_type,
1107 cs_num, train_status);
1108
1109 for (pup_id = 0;
1110 pup_id <= (tm->num_of_bus_per_interface - 1);
1111 pup_id++) {
1112 VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1113
1114 if (bit_bit_mask[pup_id] == 0)
1115 continue;
1116
1117 for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1118 if ((bit_bit_mask[pup_id] &
1119 (1 << bit_id)) == 0)
1120 continue;
1121 CHECK_STATUS
1122 (ddr3_tip_read_training_result
1123 (dev_num, interface_cnt,
1124 ACCESS_TYPE_UNICAST, pup_id,
1125 bit_id, HWS_LOW2HIGH,
1126 direction,
1127 result_type,
1128 TRAINING_LOAD_OPERATION_UNLOAD,
1129 CS_SINGLE, &l2h_if_train_res,
1130 0, 0, 1));
1131 }
1132 }
1133
1134 ddr3_tip_ip_training(dev_num, ACCESS_TYPE_UNICAST,
1135 interface_cnt,
1136 ACCESS_TYPE_MULTICAST,
1137 PARAM_NOT_CARE, result_type,
1138 control_element, HWS_HIGH2LOW,
1139 direction, interface_mask,
1140 num_iter / 2, num_iter / 2,
1141 pattern, EDGE_FP, train_cs_type,
1142 cs_num, train_status);
1143
1144 for (pup_id = 0;
1145 pup_id <= (tm->num_of_bus_per_interface - 1);
1146 pup_id++) {
1147 VALIDATE_ACTIVE(tm->bus_act_mask, pup_id);
1148
1149 if (bit_bit_mask[pup_id] == 0)
1150 continue;
1151
1152 for (bit_id = 0; bit_id <= bit_end; bit_id++) {
1153 if ((bit_bit_mask[pup_id] &
1154 (1 << bit_id)) == 0)
1155 continue;
1156 CHECK_STATUS
1157 (ddr3_tip_read_training_result
1158 (dev_num, interface_cnt,
1159 ACCESS_TYPE_UNICAST, pup_id,
1160 bit_id, HWS_HIGH2LOW, direction,
1161 result_type,
1162 TRAINING_LOAD_OPERATION_UNLOAD,
1163 CS_SINGLE, &h2l_if_train_res,
1164 0, cons_tap, 1));
1165 }
1166 }
1167 } /* if bit_bit_mask_active */
1168 } /* For all Interfacess */
1169
1170 return MV_OK;
1171 }
1172
1173 /*
1174 * Load phy values
1175 */
1176 int ddr3_tip_load_phy_values(int b_load)
1177 {
1178 u32 bus_cnt = 0, if_id, dev_num = 0;
1179 struct hws_topology_map *tm = ddr3_get_topology_map();
1180
1181 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1182 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1183 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
1184 bus_cnt++) {
1185 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1186 if (b_load == 1) {
1187 CHECK_STATUS(ddr3_tip_bus_read
1188 (dev_num, if_id,
1189 ACCESS_TYPE_UNICAST, bus_cnt,
1190 DDR_PHY_DATA,
1191 WRITE_CENTRALIZATION_PHY_REG +
1192 (effective_cs *
1193 CS_REGISTER_ADDR_OFFSET),
1194 &phy_reg_bk[if_id][bus_cnt]
1195 [0]));
1196 CHECK_STATUS(ddr3_tip_bus_read
1197 (dev_num, if_id,
1198 ACCESS_TYPE_UNICAST, bus_cnt,
1199 DDR_PHY_DATA,
1200 RL_PHY_REG +
1201 (effective_cs *
1202 CS_REGISTER_ADDR_OFFSET),
1203 &phy_reg_bk[if_id][bus_cnt]
1204 [1]));
1205 CHECK_STATUS(ddr3_tip_bus_read
1206 (dev_num, if_id,
1207 ACCESS_TYPE_UNICAST, bus_cnt,
1208 DDR_PHY_DATA,
1209 READ_CENTRALIZATION_PHY_REG +
1210 (effective_cs *
1211 CS_REGISTER_ADDR_OFFSET),
1212 &phy_reg_bk[if_id][bus_cnt]
1213 [2]));
1214 } else {
1215 CHECK_STATUS(ddr3_tip_bus_write
1216 (dev_num, ACCESS_TYPE_UNICAST,
1217 if_id, ACCESS_TYPE_UNICAST,
1218 bus_cnt, DDR_PHY_DATA,
1219 WRITE_CENTRALIZATION_PHY_REG +
1220 (effective_cs *
1221 CS_REGISTER_ADDR_OFFSET),
1222 phy_reg_bk[if_id][bus_cnt]
1223 [0]));
1224 CHECK_STATUS(ddr3_tip_bus_write
1225 (dev_num, ACCESS_TYPE_UNICAST,
1226 if_id, ACCESS_TYPE_UNICAST,
1227 bus_cnt, DDR_PHY_DATA,
1228 RL_PHY_REG +
1229 (effective_cs *
1230 CS_REGISTER_ADDR_OFFSET),
1231 phy_reg_bk[if_id][bus_cnt]
1232 [1]));
1233 CHECK_STATUS(ddr3_tip_bus_write
1234 (dev_num, ACCESS_TYPE_UNICAST,
1235 if_id, ACCESS_TYPE_UNICAST,
1236 bus_cnt, DDR_PHY_DATA,
1237 READ_CENTRALIZATION_PHY_REG +
1238 (effective_cs *
1239 CS_REGISTER_ADDR_OFFSET),
1240 phy_reg_bk[if_id][bus_cnt]
1241 [2]));
1242 }
1243 }
1244 }
1245
1246 return MV_OK;
1247 }
1248
1249 int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
1250 enum hws_search_dir search_dir,
1251 enum hws_dir direction,
1252 enum hws_edge_compare edge,
1253 u32 init_val1, u32 init_val2,
1254 u32 num_of_iterations,
1255 u32 start_pattern, u32 end_pattern)
1256 {
1257 u32 pattern, if_id, pup_id;
1258 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
1259 u32 *res = NULL;
1260 u32 search_state = 0;
1261 struct hws_topology_map *tm = ddr3_get_topology_map();
1262
1263 ddr3_tip_load_phy_values(1);
1264
1265 for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
1266 for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
1267 search_state++) {
1268 ddr3_tip_ip_training_wrapper(dev_num,
1269 ACCESS_TYPE_MULTICAST, 0,
1270 ACCESS_TYPE_MULTICAST, 0,
1271 result_type,
1272 HWS_CONTROL_ELEMENT_ADLL,
1273 search_dir, direction,
1274 0xfff, init_val1,
1275 init_val2,
1276 num_of_iterations, pattern,
1277 edge, CS_SINGLE,
1278 PARAM_NOT_CARE,
1279 train_status);
1280
1281 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
1282 if_id++) {
1283 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1284 for (pup_id = 0; pup_id <
1285 tm->num_of_bus_per_interface;
1286 pup_id++) {
1287 VALIDATE_ACTIVE(tm->bus_act_mask,
1288 pup_id);
1289 CHECK_STATUS
1290 (ddr3_tip_read_training_result
1291 (dev_num, if_id,
1292 ACCESS_TYPE_UNICAST, pup_id,
1293 ALL_BITS_PER_PUP,
1294 search_state,
1295 direction, result_type,
1296 TRAINING_LOAD_OPERATION_UNLOAD,
1297 CS_SINGLE, &res, 1, 0,
1298 0));
1299 if (result_type == RESULT_PER_BYTE) {
1300 DEBUG_TRAINING_IP_ENGINE
1301 (DEBUG_LEVEL_INFO,
1302 ("search_state %d if_id %d pup_id %d 0x%x\n",
1303 search_state, if_id,
1304 pup_id, res[0]));
1305 } else {
1306 DEBUG_TRAINING_IP_ENGINE
1307 (DEBUG_LEVEL_INFO,
1308 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1309 search_state, if_id,
1310 pup_id, res[0],
1311 res[1], res[2],
1312 res[3], res[4],
1313 res[5], res[6],
1314 res[7]));
1315 }
1316 }
1317 } /* interface */
1318 } /* search */
1319 } /* pattern */
1320
1321 ddr3_tip_load_phy_values(0);
1322
1323 return MV_OK;
1324 }
1325
1326 struct pattern_info *ddr3_tip_get_pattern_table()
1327 {
1328 struct hws_topology_map *tm = ddr3_get_topology_map();
1329
1330 if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
1331 return pattern_table_32;
1332 else
1333 return pattern_table_16;
1334 }
1335
1336 u16 *ddr3_tip_get_mask_results_dq_reg()
1337 {
1338 struct hws_topology_map *tm = ddr3_get_topology_map();
1339
1340 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1341 return mask_results_dq_reg_map_pup3_ecc;
1342 else
1343 return mask_results_dq_reg_map;
1344 }
1345
1346 u16 *ddr3_tip_get_mask_results_pup_reg_map()
1347 {
1348 struct hws_topology_map *tm = ddr3_get_topology_map();
1349
1350 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1351 return mask_results_pup_reg_map_pup3_ecc;
1352 else
1353 return mask_results_pup_reg_map;
1354 }