]> git.ipfire.org Git - people/ms/u-boot.git/blob - drivers/ddr/marvell/a38x/ddr3_init.c
Merge git://git.denx.de/u-boot-marvell
[people/ms/u-boot.git] / drivers / ddr / marvell / a38x / ddr3_init.c
1 /*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7 #include <common.h>
8 #include <i2c.h>
9 #include <spl.h>
10 #include <asm/io.h>
11 #include <asm/arch/cpu.h>
12 #include <asm/arch/soc.h>
13
14 #include "ddr3_init.h"
15
16 #include "../../../../arch/arm/mach-mvebu/serdes/a38x/sys_env_lib.h"
17
18 static struct dlb_config ddr3_dlb_config_table[] = {
19 {REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
20 {DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
21 {DLB_AGING_REGISTER, 0x0f7f007f},
22 {DLB_EVICTION_CONTROL_REG, 0x0000129f},
23 {DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
24 {DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
25 {DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
26 {DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
27 {DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
28 {DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
29 {DLB_MAIN_QUEUE_MAP, 0x00000543},
30 {DLB_LINE_SPLIT, 0x00000000},
31 {DLB_USER_COMMAND_REG, 0x00000000},
32 {0x0, 0x0}
33 };
34
35 static struct dlb_config ddr3_dlb_config_table_a0[] = {
36 {REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
37 {DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
38 {DLB_AGING_REGISTER, 0x0f7f007f},
39 {DLB_EVICTION_CONTROL_REG, 0x0000129f},
40 {DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
41 {DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
42 {DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
43 {DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
44 {DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
45 {DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
46 {DLB_MAIN_QUEUE_MAP, 0x00000543},
47 {DLB_LINE_SPLIT, 0x00000000},
48 {DLB_USER_COMMAND_REG, 0x00000000},
49 {0x0, 0x0}
50 };
51
52 #if defined(CONFIG_ARMADA_38X)
53 struct dram_modes {
54 char *mode_name;
55 u8 cpu_freq;
56 u8 fab_freq;
57 u8 chip_id;
58 u8 chip_board_rev;
59 struct reg_data *regs;
60 };
61
62 struct dram_modes ddr_modes[] = {
63 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
64 /* Conf name, CPUFreq, Fab_freq, Chip ID, Chip/Board, MC regs*/
65 #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
66 {"a38x_customer_0_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID0,
67 ddr3_customer_800},
68 {"a38x_customer_1_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID1,
69 ddr3_customer_800},
70 #else
71 {"a38x_533", DDR_FREQ_533, 0, 0x0, MARVELL_BOARD, ddr3_a38x_533},
72 {"a38x_667", DDR_FREQ_667, 0, 0x0, MARVELL_BOARD, ddr3_a38x_667},
73 {"a38x_800", DDR_FREQ_800, 0, 0x0, MARVELL_BOARD, ddr3_a38x_800},
74 {"a38x_933", DDR_FREQ_933, 0, 0x0, MARVELL_BOARD, ddr3_a38x_933},
75 #endif
76 #endif
77 };
78 #endif /* defined(CONFIG_ARMADA_38X) */
79
80 /* Translates topology map definitions to real memory size in bits */
81 u32 mem_size[] = {
82 ADDR_SIZE_512MB, ADDR_SIZE_1GB, ADDR_SIZE_2GB, ADDR_SIZE_4GB,
83 ADDR_SIZE_8GB
84 };
85
86 static char *ddr_type = "DDR3";
87
88 /*
89 * Set 1 to use dynamic DUNIT configuration,
90 * set 0 (supported for A380 and AC3) to configure DUNIT in values set by
91 * ddr3_tip_init_specific_reg_config
92 */
93 u8 generic_init_controller = 1;
94
95 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
96 static u32 ddr3_get_static_ddr_mode(void);
97 #endif
98 static int ddr3_hws_tune_training_params(u8 dev_num);
99 static int ddr3_update_topology_map(struct hws_topology_map *topology_map);
100
101 /* device revision */
102 #define DEV_VERSION_ID_REG 0x1823c
103 #define REVISON_ID_OFFS 8
104 #define REVISON_ID_MASK 0xf00
105
106 /* A38x revisions */
107 #define MV_88F68XX_Z1_ID 0x0
108 #define MV_88F68XX_A0_ID 0x4
109 /* A39x revisions */
110 #define MV_88F69XX_Z1_ID 0x2
111
112 /*
113 * sys_env_device_rev_get - Get Marvell controller device revision number
114 *
115 * DESCRIPTION:
116 * This function returns 8bit describing the device revision as defined
117 * Revision ID Register.
118 *
119 * INPUT:
120 * None.
121 *
122 * OUTPUT:
123 * None.
124 *
125 * RETURN:
126 * 8bit desscribing Marvell controller revision number
127 */
128 u8 sys_env_device_rev_get(void)
129 {
130 u32 value;
131
132 value = reg_read(DEV_VERSION_ID_REG);
133 return (value & (REVISON_ID_MASK)) >> REVISON_ID_OFFS;
134 }
135
136 /*
137 * sys_env_dlb_config_ptr_get
138 *
139 * DESCRIPTION: defines pointer to to DLB COnfiguration table
140 *
141 * INPUT: none
142 *
143 * OUTPUT: pointer to DLB COnfiguration table
144 *
145 * RETURN:
146 * returns pointer to DLB COnfiguration table
147 */
148 struct dlb_config *sys_env_dlb_config_ptr_get(void)
149 {
150 #ifdef CONFIG_ARMADA_39X
151 return &ddr3_dlb_config_table_a0[0];
152 #else
153 if (sys_env_device_rev_get() == MV_88F68XX_A0_ID)
154 return &ddr3_dlb_config_table_a0[0];
155 else
156 return &ddr3_dlb_config_table[0];
157 #endif
158 }
159
160 /*
161 * sys_env_get_cs_ena_from_reg
162 *
163 * DESCRIPTION: Get bit mask of enabled CS
164 *
165 * INPUT: None
166 *
167 * OUTPUT: None
168 *
169 * RETURN:
170 * Bit mask of enabled CS, 1 if only CS0 enabled,
171 * 3 if both CS0 and CS1 enabled
172 */
173 u32 sys_env_get_cs_ena_from_reg(void)
174 {
175 return reg_read(REG_DDR3_RANK_CTRL_ADDR) &
176 REG_DDR3_RANK_CTRL_CS_ENA_MASK;
177 }
178
179 static void ddr3_restore_and_set_final_windows(u32 *win)
180 {
181 u32 win_ctrl_reg, num_of_win_regs;
182 u32 cs_ena = sys_env_get_cs_ena_from_reg();
183 u32 ui;
184
185 win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
186 num_of_win_regs = 16;
187
188 /* Return XBAR windows 4-7 or 16-19 init configuration */
189 for (ui = 0; ui < num_of_win_regs; ui++)
190 reg_write((win_ctrl_reg + 0x4 * ui), win[ui]);
191
192 printf("%s Training Sequence - Switching XBAR Window to FastPath Window\n",
193 ddr_type);
194
195 #if defined DYNAMIC_CS_SIZE_CONFIG
196 if (ddr3_fast_path_dynamic_cs_size_config(cs_ena) != MV_OK)
197 printf("ddr3_fast_path_dynamic_cs_size_config FAILED\n");
198 #else
199 u32 reg, cs;
200 reg = 0x1fffffe1;
201 for (cs = 0; cs < MAX_CS; cs++) {
202 if (cs_ena & (1 << cs)) {
203 reg |= (cs << 2);
204 break;
205 }
206 }
207 /* Open fast path Window to - 0.5G */
208 reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, reg);
209 #endif
210 }
211
212 static int ddr3_save_and_set_training_windows(u32 *win)
213 {
214 u32 cs_ena;
215 u32 reg, tmp_count, cs, ui;
216 u32 win_ctrl_reg, win_base_reg, win_remap_reg;
217 u32 num_of_win_regs, win_jump_index;
218 win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
219 win_base_reg = REG_XBAR_WIN_4_BASE_ADDR;
220 win_remap_reg = REG_XBAR_WIN_4_REMAP_ADDR;
221 win_jump_index = 0x10;
222 num_of_win_regs = 16;
223 struct hws_topology_map *tm = ddr3_get_topology_map();
224
225 #ifdef DISABLE_L2_FILTERING_DURING_DDR_TRAINING
226 /*
227 * Disable L2 filtering during DDR training
228 * (when Cross Bar window is open)
229 */
230 reg_write(ADDRESS_FILTERING_END_REGISTER, 0);
231 #endif
232
233 cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask;
234
235 /* Close XBAR Window 19 - Not needed */
236 /* {0x000200e8} - Open Mbus Window - 2G */
237 reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0);
238
239 /* Save XBAR Windows 4-19 init configurations */
240 for (ui = 0; ui < num_of_win_regs; ui++)
241 win[ui] = reg_read(win_ctrl_reg + 0x4 * ui);
242
243 /* Open XBAR Windows 4-7 or 16-19 for other CS */
244 reg = 0;
245 tmp_count = 0;
246 for (cs = 0; cs < MAX_CS; cs++) {
247 if (cs_ena & (1 << cs)) {
248 switch (cs) {
249 case 0:
250 reg = 0x0e00;
251 break;
252 case 1:
253 reg = 0x0d00;
254 break;
255 case 2:
256 reg = 0x0b00;
257 break;
258 case 3:
259 reg = 0x0700;
260 break;
261 }
262 reg |= (1 << 0);
263 reg |= (SDRAM_CS_SIZE & 0xffff0000);
264
265 reg_write(win_ctrl_reg + win_jump_index * tmp_count,
266 reg);
267 reg = (((SDRAM_CS_SIZE + 1) * (tmp_count)) &
268 0xffff0000);
269 reg_write(win_base_reg + win_jump_index * tmp_count,
270 reg);
271
272 if (win_remap_reg <= REG_XBAR_WIN_7_REMAP_ADDR)
273 reg_write(win_remap_reg +
274 win_jump_index * tmp_count, 0);
275
276 tmp_count++;
277 }
278 }
279
280 return MV_OK;
281 }
282
283 /*
284 * Name: ddr3_init - Main DDR3 Init function
285 * Desc: This routine initialize the DDR3 MC and runs HW training.
286 * Args: None.
287 * Notes:
288 * Returns: None.
289 */
290 int ddr3_init(void)
291 {
292 u32 reg = 0;
293 u32 soc_num;
294 int status;
295 u32 win[16];
296
297 /* SoC/Board special Initializtions */
298 /* Get version from internal library */
299 ddr3_print_version();
300
301 /*Add sub_version string */
302 DEBUG_INIT_C("", SUB_VERSION, 1);
303
304 /* Switching CPU to MRVL ID */
305 soc_num = (reg_read(REG_SAMPLE_RESET_HIGH_ADDR) & SAR1_CPU_CORE_MASK) >>
306 SAR1_CPU_CORE_OFFSET;
307 switch (soc_num) {
308 case 0x3:
309 reg_bit_set(CPU_CONFIGURATION_REG(3), CPU_MRVL_ID_OFFSET);
310 reg_bit_set(CPU_CONFIGURATION_REG(2), CPU_MRVL_ID_OFFSET);
311 case 0x1:
312 reg_bit_set(CPU_CONFIGURATION_REG(1), CPU_MRVL_ID_OFFSET);
313 case 0x0:
314 reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET);
315 default:
316 break;
317 }
318
319 /*
320 * Set DRAM Reset Mask in case detected GPIO indication of wakeup from
321 * suspend i.e the DRAM values will not be overwritten / reset when
322 * waking from suspend
323 */
324 if (sys_env_suspend_wakeup_check() ==
325 SUSPEND_WAKEUP_ENABLED_GPIO_DETECTED) {
326 reg_bit_set(REG_SDRAM_INIT_CTRL_ADDR,
327 1 << REG_SDRAM_INIT_RESET_MASK_OFFS);
328 }
329
330 /*
331 * Stage 0 - Set board configuration
332 */
333
334 /* Check if DRAM is already initialized */
335 if (reg_read(REG_BOOTROM_ROUTINE_ADDR) &
336 (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS)) {
337 printf("%s Training Sequence - 2nd boot - Skip\n", ddr_type);
338 return MV_OK;
339 }
340
341 /*
342 * Stage 1 - Dunit Setup
343 */
344
345 /* Fix read ready phases for all SOC in reg 0x15c8 */
346 reg = reg_read(REG_TRAINING_DEBUG_3_ADDR);
347 reg &= ~(REG_TRAINING_DEBUG_3_MASK);
348 reg |= 0x4; /* Phase 0 */
349 reg &= ~(REG_TRAINING_DEBUG_3_MASK << REG_TRAINING_DEBUG_3_OFFS);
350 reg |= (0x4 << (1 * REG_TRAINING_DEBUG_3_OFFS)); /* Phase 1 */
351 reg &= ~(REG_TRAINING_DEBUG_3_MASK << (3 * REG_TRAINING_DEBUG_3_OFFS));
352 reg |= (0x6 << (3 * REG_TRAINING_DEBUG_3_OFFS)); /* Phase 3 */
353 reg &= ~(REG_TRAINING_DEBUG_3_MASK << (4 * REG_TRAINING_DEBUG_3_OFFS));
354 reg |= (0x6 << (4 * REG_TRAINING_DEBUG_3_OFFS));
355 reg &= ~(REG_TRAINING_DEBUG_3_MASK << (5 * REG_TRAINING_DEBUG_3_OFFS));
356 reg |= (0x6 << (5 * REG_TRAINING_DEBUG_3_OFFS));
357 reg_write(REG_TRAINING_DEBUG_3_ADDR, reg);
358
359 /*
360 * Axi_bresp_mode[8] = Compliant,
361 * Axi_addr_decode_cntrl[11] = Internal,
362 * Axi_data_bus_width[0] = 128bit
363 * */
364 /* 0x14a8 - AXI Control Register */
365 reg_write(REG_DRAM_AXI_CTRL_ADDR, 0);
366
367 /*
368 * Stage 2 - Training Values Setup
369 */
370 /* Set X-BAR windows for the training sequence */
371 ddr3_save_and_set_training_windows(win);
372
373 #ifdef SUPPORT_STATIC_DUNIT_CONFIG
374 /*
375 * Load static controller configuration (in case dynamic/generic init
376 * is not enabled
377 */
378 if (generic_init_controller == 0) {
379 ddr3_tip_init_specific_reg_config(0,
380 ddr_modes
381 [ddr3_get_static_ddr_mode
382 ()].regs);
383 }
384 #endif
385
386 /* Load topology for New Training IP */
387 status = ddr3_load_topology_map();
388 if (MV_OK != status) {
389 printf("%s Training Sequence topology load - FAILED\n",
390 ddr_type);
391 return status;
392 }
393
394 /* Tune training algo paramteres */
395 status = ddr3_hws_tune_training_params(0);
396 if (MV_OK != status)
397 return status;
398
399 /* Set log level for training lib */
400 ddr3_hws_set_log_level(DEBUG_BLOCK_ALL, DEBUG_LEVEL_ERROR);
401
402 /* Start New Training IP */
403 status = ddr3_hws_hw_training();
404 if (MV_OK != status) {
405 printf("%s Training Sequence - FAILED\n", ddr_type);
406 return status;
407 }
408
409 /*
410 * Stage 3 - Finish
411 */
412 /* Restore and set windows */
413 ddr3_restore_and_set_final_windows(win);
414
415 /* Update DRAM init indication in bootROM register */
416 reg = reg_read(REG_BOOTROM_ROUTINE_ADDR);
417 reg_write(REG_BOOTROM_ROUTINE_ADDR,
418 reg | (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS));
419
420 /* DLB config */
421 ddr3_new_tip_dlb_config();
422
423 #if defined(ECC_SUPPORT)
424 if (ddr3_if_ecc_enabled())
425 ddr3_new_tip_ecc_scrub();
426 #endif
427
428 printf("%s Training Sequence - Ended Successfully\n", ddr_type);
429
430 return MV_OK;
431 }
432
433 /*
434 * Name: ddr3_get_cpu_freq
435 * Desc: read S@R and return CPU frequency
436 * Args:
437 * Notes:
438 * Returns: required value
439 */
440 u32 ddr3_get_cpu_freq(void)
441 {
442 return ddr3_tip_get_init_freq();
443 }
444
445 /*
446 * Name: ddr3_get_fab_opt
447 * Desc: read S@R and return CPU frequency
448 * Args:
449 * Notes:
450 * Returns: required value
451 */
452 u32 ddr3_get_fab_opt(void)
453 {
454 return 0; /* No fabric */
455 }
456
457 /*
458 * Name: ddr3_get_static_m_cValue - Init Memory controller with
459 * static parameters
460 * Desc: Use this routine to init the controller without the HW training
461 * procedure.
462 * User must provide compatible header file with registers data.
463 * Args: None.
464 * Notes:
465 * Returns: None.
466 */
467 u32 ddr3_get_static_mc_value(u32 reg_addr, u32 offset1, u32 mask1,
468 u32 offset2, u32 mask2)
469 {
470 u32 reg, temp;
471
472 reg = reg_read(reg_addr);
473
474 temp = (reg >> offset1) & mask1;
475 if (mask2)
476 temp |= (reg >> offset2) & mask2;
477
478 return temp;
479 }
480
481 /*
482 * Name: ddr3_get_static_ddr_mode - Init Memory controller with
483 * static parameters
484 * Desc: Use this routine to init the controller without the HW training
485 * procedure.
486 * User must provide compatible header file with registers data.
487 * Args: None.
488 * Notes:
489 * Returns: None.
490 */
491 u32 ddr3_get_static_ddr_mode(void)
492 {
493 u32 chip_board_rev, i;
494 u32 size;
495
496 /* Valid only for A380 only, MSYS using dynamic controller config */
497 #ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
498 /*
499 * Customer boards select DDR mode according to
500 * board ID & Sample@Reset
501 */
502 chip_board_rev = mv_board_id_get();
503 #else
504 /* Marvell boards select DDR mode according to Sample@Reset only */
505 chip_board_rev = MARVELL_BOARD;
506 #endif
507
508 size = ARRAY_SIZE(ddr_modes);
509 for (i = 0; i < size; i++) {
510 if ((ddr3_get_cpu_freq() == ddr_modes[i].cpu_freq) &&
511 (ddr3_get_fab_opt() == ddr_modes[i].fab_freq) &&
512 (chip_board_rev == ddr_modes[i].chip_board_rev))
513 return i;
514 }
515
516 DEBUG_INIT_S("\n*** Error: ddr3_get_static_ddr_mode: No match for requested DDR mode. ***\n\n");
517
518 return 0;
519 }
520
521 /******************************************************************************
522 * Name: ddr3_get_cs_num_from_reg
523 * Desc:
524 * Args:
525 * Notes:
526 * Returns:
527 */
528 u32 ddr3_get_cs_num_from_reg(void)
529 {
530 u32 cs_ena = sys_env_get_cs_ena_from_reg();
531 u32 cs_count = 0;
532 u32 cs;
533
534 for (cs = 0; cs < MAX_CS; cs++) {
535 if (cs_ena & (1 << cs))
536 cs_count++;
537 }
538
539 return cs_count;
540 }
541
542 /*
543 * Name: ddr3_load_topology_map
544 * Desc:
545 * Args:
546 * Notes:
547 * Returns:
548 */
549 int ddr3_load_topology_map(void)
550 {
551 struct hws_topology_map *tm = ddr3_get_topology_map();
552
553 #if defined(MV_DDR_TOPOLOGY_UPDATE_FROM_TWSI)
554 /* Update topology data */
555 if (MV_OK != ddr3_update_topology_map(tm)) {
556 DEBUG_INIT_FULL_S("Failed update of DDR3 Topology map\n");
557 }
558 #endif
559
560 return MV_OK;
561 }
562
563 void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps)
564 {
565 u32 tmp, hclk = 200;
566
567 switch (freq_mode) {
568 case 4:
569 tmp = 1; /* DDR_400; */
570 hclk = 200;
571 break;
572 case 0x8:
573 tmp = 1; /* DDR_666; */
574 hclk = 333;
575 break;
576 case 0xc:
577 tmp = 1; /* DDR_800; */
578 hclk = 400;
579 break;
580 default:
581 *ddr_freq = 0;
582 *hclk_ps = 0;
583 break;
584 }
585
586 *ddr_freq = tmp; /* DDR freq define */
587 *hclk_ps = 1000000 / hclk; /* values are 1/HCLK in ps */
588
589 return;
590 }
591
592 void ddr3_new_tip_dlb_config(void)
593 {
594 u32 reg, i = 0;
595 struct dlb_config *config_table_ptr = sys_env_dlb_config_ptr_get();
596
597 /* Write the configuration */
598 while (config_table_ptr[i].reg_addr != 0) {
599 reg_write(config_table_ptr[i].reg_addr,
600 config_table_ptr[i].reg_data);
601 i++;
602 }
603
604 /* Enable DLB */
605 reg = reg_read(REG_STATIC_DRAM_DLB_CONTROL);
606 reg |= DLB_ENABLE | DLB_WRITE_COALESING | DLB_AXI_PREFETCH_EN |
607 DLB_MBUS_PREFETCH_EN | PREFETCH_N_LN_SZ_TR;
608 reg_write(REG_STATIC_DRAM_DLB_CONTROL, reg);
609 }
610
611 int ddr3_fast_path_dynamic_cs_size_config(u32 cs_ena)
612 {
613 u32 reg, cs;
614 u32 mem_total_size = 0;
615 u32 cs_mem_size = 0;
616 u32 mem_total_size_c, cs_mem_size_c;
617
618 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
619 u32 physical_mem_size;
620 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
621 struct hws_topology_map *tm = ddr3_get_topology_map();
622 #endif
623
624 /* Open fast path windows */
625 for (cs = 0; cs < MAX_CS; cs++) {
626 if (cs_ena & (1 << cs)) {
627 /* get CS size */
628 if (ddr3_calc_mem_cs_size(cs, &cs_mem_size) != MV_OK)
629 return MV_FAIL;
630
631 #ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
632 /*
633 * if number of address pins doesn't allow to use max
634 * mem size that is defined in topology
635 * mem size is defined by DEVICE_MAX_DRAM_ADDRESS_SIZE
636 */
637 physical_mem_size = mem_size
638 [tm->interface_params[0].memory_size];
639
640 if (ddr3_get_device_width(cs) == 16) {
641 /*
642 * 16bit mem device can be twice more - no need
643 * in less significant pin
644 */
645 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
646 }
647
648 if (physical_mem_size > max_mem_size) {
649 cs_mem_size = max_mem_size *
650 (ddr3_get_bus_width() /
651 ddr3_get_device_width(cs));
652 printf("Updated Physical Mem size is from 0x%x to %x\n",
653 physical_mem_size,
654 DEVICE_MAX_DRAM_ADDRESS_SIZE);
655 }
656 #endif
657
658 /* set fast path window control for the cs */
659 reg = 0xffffe1;
660 reg |= (cs << 2);
661 reg |= (cs_mem_size - 1) & 0xffff0000;
662 /*Open fast path Window */
663 reg_write(REG_FASTPATH_WIN_CTRL_ADDR(cs), reg);
664
665 /* Set fast path window base address for the cs */
666 reg = ((cs_mem_size) * cs) & 0xffff0000;
667 /* Set base address */
668 reg_write(REG_FASTPATH_WIN_BASE_ADDR(cs), reg);
669
670 /*
671 * Since memory size may be bigger than 4G the summ may
672 * be more than 32 bit word,
673 * so to estimate the result divide mem_total_size and
674 * cs_mem_size by 0x10000 (it is equal to >> 16)
675 */
676 mem_total_size_c = mem_total_size >> 16;
677 cs_mem_size_c = cs_mem_size >> 16;
678 /* if the sum less than 2 G - calculate the value */
679 if (mem_total_size_c + cs_mem_size_c < 0x10000)
680 mem_total_size += cs_mem_size;
681 else /* put max possible size */
682 mem_total_size = L2_FILTER_FOR_MAX_MEMORY_SIZE;
683 }
684 }
685
686 /* Set L2 filtering to Max Memory size */
687 reg_write(ADDRESS_FILTERING_END_REGISTER, mem_total_size);
688
689 return MV_OK;
690 }
691
692 u32 ddr3_get_bus_width(void)
693 {
694 u32 bus_width;
695
696 bus_width = (reg_read(REG_SDRAM_CONFIG_ADDR) & 0x8000) >>
697 REG_SDRAM_CONFIG_WIDTH_OFFS;
698
699 return (bus_width == 0) ? 16 : 32;
700 }
701
702 u32 ddr3_get_device_width(u32 cs)
703 {
704 u32 device_width;
705
706 device_width = (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) &
707 (0x3 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs))) >>
708 (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs);
709
710 return (device_width == 0) ? 8 : 16;
711 }
712
713 float ddr3_get_device_size(u32 cs)
714 {
715 u32 device_size_low, device_size_high, device_size;
716 u32 data, cs_low_offset, cs_high_offset;
717
718 cs_low_offset = REG_SDRAM_ADDRESS_SIZE_OFFS + cs * 4;
719 cs_high_offset = REG_SDRAM_ADDRESS_SIZE_OFFS +
720 REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs;
721
722 data = reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR);
723 device_size_low = (data >> cs_low_offset) & 0x3;
724 device_size_high = (data >> cs_high_offset) & 0x1;
725
726 device_size = device_size_low | (device_size_high << 2);
727
728 switch (device_size) {
729 case 0:
730 return 2;
731 case 2:
732 return 0.5;
733 case 3:
734 return 1;
735 case 4:
736 return 4;
737 case 5:
738 return 8;
739 case 1:
740 default:
741 DEBUG_INIT_C("Error: Wrong device size of Cs: ", cs, 1);
742 /*
743 * Small value will give wrong emem size in
744 * ddr3_calc_mem_cs_size
745 */
746 return 0.01;
747 }
748 }
749
750 int ddr3_calc_mem_cs_size(u32 cs, u32 *cs_size)
751 {
752 float cs_mem_size;
753
754 /* Calculate in GiB */
755 cs_mem_size = ((ddr3_get_bus_width() / ddr3_get_device_width(cs)) *
756 ddr3_get_device_size(cs)) / 8;
757
758 /*
759 * Multiple controller bus width, 2x for 64 bit
760 * (SoC controller may be 32 or 64 bit,
761 * so bit 15 in 0x1400, that means if whole bus used or only half,
762 * have a differnt meaning
763 */
764 cs_mem_size *= DDR_CONTROLLER_BUS_WIDTH_MULTIPLIER;
765
766 if (cs_mem_size == 0.125) {
767 *cs_size = 128 << 20;
768 } else if (cs_mem_size == 0.25) {
769 *cs_size = 256 << 20;
770 } else if (cs_mem_size == 0.5) {
771 *cs_size = 512 << 20;
772 } else if (cs_mem_size == 1) {
773 *cs_size = 1 << 30;
774 } else if (cs_mem_size == 2) {
775 *cs_size = 2 << 30;
776 } else {
777 DEBUG_INIT_C("Error: Wrong Memory size of Cs: ", cs, 1);
778 return MV_BAD_VALUE;
779 }
780
781 return MV_OK;
782 }
783
784 #if defined(MV_DDR_TOPOLOGY_UPDATE_FROM_TWSI)
785 /*
786 * Name: ddr3_update_topology_map
787 * Desc:
788 * Args:
789 * Notes: Update topology map by Sat_r values
790 * Returns:
791 */
792 static int ddr3_update_topology_map(struct hws_topology_map *tm)
793 {
794 struct topology_update_info topology_update_info;
795
796 topology_update_info.update_width = 0;
797 topology_update_info.update_ecc = 0;
798 topology_update_info.update_ecc_pup3_mode = 0;
799 sys_env_get_topology_update_info(&topology_update_info);
800 if (topology_update_info.update_width) {
801 tm->bus_act_mask &=
802 ~(TOPOLOGY_UPDATE_WIDTH_32BIT_MASK);
803 if (topology_update_info.width == TOPOLOGY_UPDATE_WIDTH_16BIT)
804 tm->bus_act_mask =
805 TOPOLOGY_UPDATE_WIDTH_16BIT_MASK;
806 else
807 tm->bus_act_mask =
808 TOPOLOGY_UPDATE_WIDTH_32BIT_MASK;
809 }
810
811 if (topology_update_info.update_ecc) {
812 if (topology_update_info.ecc == TOPOLOGY_UPDATE_ECC_OFF) {
813 tm->bus_act_mask &=
814 ~(1 << topology_update_info.ecc_pup_mode_offset);
815 } else {
816 tm->bus_act_mask |=
817 topology_update_info.
818 ecc << topology_update_info.ecc_pup_mode_offset;
819 }
820 }
821
822 return MV_OK;
823 }
824 #endif
825
826 /*
827 * Name: ddr3_hws_tune_training_params
828 * Desc:
829 * Args:
830 * Notes: Tune internal training params
831 * Returns:
832 */
833 static int ddr3_hws_tune_training_params(u8 dev_num)
834 {
835 struct tune_train_params params;
836 int status;
837
838 /* NOTE: do not remove any field initilization */
839 params.ck_delay = TUNE_TRAINING_PARAMS_CK_DELAY;
840 params.ck_delay_16 = TUNE_TRAINING_PARAMS_CK_DELAY_16;
841 params.p_finger = TUNE_TRAINING_PARAMS_PFINGER;
842 params.n_finger = TUNE_TRAINING_PARAMS_NFINGER;
843 params.phy_reg3_val = TUNE_TRAINING_PARAMS_PHYREG3VAL;
844
845 status = ddr3_tip_tune_training_params(dev_num, &params);
846 if (MV_OK != status) {
847 printf("%s Training Sequence - FAILED\n", ddr_type);
848 return status;
849 }
850
851 return MV_OK;
852 }