]> git.ipfire.org Git - people/ms/u-boot.git/blob - arch/powerpc/cpu/ppc4xx/44x_spd_ddr2.c
drivers, block: remove sil680 driver
[people/ms/u-boot.git] / arch / powerpc / cpu / ppc4xx / 44x_spd_ddr2.c
1 /*
2 * arch/powerpc/cpu/ppc4xx/44x_spd_ddr2.c
3 * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
4 * DDR2 controller (non Denali Core). Those currently are:
5 *
6 * 405: 405EX(r)
7 * 440/460: 440SP/440SPe/460EX/460GT
8 *
9 * Copyright (c) 2008 Nuovation System Designs, LLC
10 * Grant Erickson <gerickson@nuovations.com>
11
12 * (C) Copyright 2007-2009
13 * Stefan Roese, DENX Software Engineering, sr@denx.de.
14 *
15 * COPYRIGHT AMCC CORPORATION 2004
16 *
17 * SPDX-License-Identifier: GPL-2.0+
18 */
19
20 /* define DEBUG for debugging output (obviously ;-)) */
21 #if 0
22 #define DEBUG
23 #endif
24
25 #include <common.h>
26 #include <command.h>
27 #include <asm/ppc4xx.h>
28 #include <i2c.h>
29 #include <asm/io.h>
30 #include <asm/processor.h>
31 #include <asm/mmu.h>
32 #include <asm/cache.h>
33
34 #include "ecc.h"
35
36 DECLARE_GLOBAL_DATA_PTR;
37
38 #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic) \
39 do { \
40 u32 data; \
41 mfsdram(SDRAM_##mnemonic, data); \
42 printf("%20s[%02x] = 0x%08X\n", \
43 "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
44 } while (0)
45
46 #define PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(mnemonic) \
47 do { \
48 u32 data; \
49 data = mfdcr(SDRAM_##mnemonic); \
50 printf("%20s[%02x] = 0x%08X\n", \
51 "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
52 } while (0)
53
54 static void update_rdcc(void)
55 {
56 u32 val;
57
58 /*
59 * Complete RDSS configuration as mentioned on page 7 of the AMCC
60 * PowerPC440SP/SPe DDR2 application note:
61 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
62 *
63 * Or item #10 "10. Complete RDSS configuration" in chapter
64 * "22.2.9 SDRAM Initialization" of AMCC PPC460EX/EXr/GT users
65 * manual.
66 */
67 mfsdram(SDRAM_RTSR, val);
68 if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
69 mfsdram(SDRAM_RDCC, val);
70 if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
71 val += 0x40000000;
72 mtsdram(SDRAM_RDCC, val);
73 }
74 }
75 }
76
77 #if defined(CONFIG_440)
78 /*
79 * This DDR2 setup code can dynamically setup the TLB entries for the DDR2
80 * memory region. Right now the cache should still be disabled in U-Boot
81 * because of the EMAC driver, that need its buffer descriptor to be located
82 * in non cached memory.
83 *
84 * If at some time this restriction doesn't apply anymore, just define
85 * CONFIG_4xx_DCACHE in the board config file and this code should setup
86 * everything correctly.
87 */
88 #ifdef CONFIG_4xx_DCACHE
89 /* enable caching on SDRAM */
90 #define MY_TLB_WORD2_I_ENABLE 0
91 #else
92 /* disable caching on SDRAM */
93 #define MY_TLB_WORD2_I_ENABLE TLB_WORD2_I_ENABLE
94 #endif /* CONFIG_4xx_DCACHE */
95
96 void dcbz_area(u32 start_address, u32 num_bytes);
97 #endif /* CONFIG_440 */
98
99 #define MAXRANKS 4
100 #define MAXBXCF 4
101
102 #define MULDIV64(m1, m2, d) (u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
103
104 /*-----------------------------------------------------------------------------+
105 * sdram_memsize
106 *-----------------------------------------------------------------------------*/
107 phys_size_t sdram_memsize(void)
108 {
109 phys_size_t mem_size;
110 unsigned long mcopt2;
111 unsigned long mcstat;
112 unsigned long mb0cf;
113 unsigned long sdsz;
114 unsigned long i;
115
116 mem_size = 0;
117
118 mfsdram(SDRAM_MCOPT2, mcopt2);
119 mfsdram(SDRAM_MCSTAT, mcstat);
120
121 /* DDR controller must be enabled and not in self-refresh. */
122 /* Otherwise memsize is zero. */
123 if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
124 && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
125 && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
126 == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
127 for (i = 0; i < MAXBXCF; i++) {
128 mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
129 /* Banks enabled */
130 if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
131 #if defined(CONFIG_440)
132 sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
133 #else
134 sdsz = mb0cf & SDRAM_RXBAS_SDSZ_MASK;
135 #endif
136 switch(sdsz) {
137 case SDRAM_RXBAS_SDSZ_8:
138 mem_size+=8;
139 break;
140 case SDRAM_RXBAS_SDSZ_16:
141 mem_size+=16;
142 break;
143 case SDRAM_RXBAS_SDSZ_32:
144 mem_size+=32;
145 break;
146 case SDRAM_RXBAS_SDSZ_64:
147 mem_size+=64;
148 break;
149 case SDRAM_RXBAS_SDSZ_128:
150 mem_size+=128;
151 break;
152 case SDRAM_RXBAS_SDSZ_256:
153 mem_size+=256;
154 break;
155 case SDRAM_RXBAS_SDSZ_512:
156 mem_size+=512;
157 break;
158 case SDRAM_RXBAS_SDSZ_1024:
159 mem_size+=1024;
160 break;
161 case SDRAM_RXBAS_SDSZ_2048:
162 mem_size+=2048;
163 break;
164 case SDRAM_RXBAS_SDSZ_4096:
165 mem_size+=4096;
166 break;
167 default:
168 printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
169 , sdsz);
170 mem_size=0;
171 break;
172 }
173 }
174 }
175 }
176
177 return mem_size << 20;
178 }
179
180 /*-----------------------------------------------------------------------------+
181 * is_ecc_enabled
182 *-----------------------------------------------------------------------------*/
183 static unsigned long is_ecc_enabled(void)
184 {
185 unsigned long val;
186
187 mfsdram(SDRAM_MCOPT1, val);
188
189 return SDRAM_MCOPT1_MCHK_CHK_DECODE(val);
190 }
191
192 /*-----------------------------------------------------------------------------+
193 * board_add_ram_info
194 *-----------------------------------------------------------------------------*/
195 void board_add_ram_info(int use_default)
196 {
197 PPC4xx_SYS_INFO board_cfg;
198 u32 val;
199
200 if (is_ecc_enabled())
201 puts(" (ECC");
202 else
203 puts(" (ECC not");
204
205 get_sys_info(&board_cfg);
206
207 #if defined(CONFIG_405EX)
208 val = board_cfg.freqPLB;
209 #else
210 mfsdr(SDR0_DDR0, val);
211 val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
212 #endif
213 printf(" enabled, %d MHz", (val * 2) / 1000000);
214
215 mfsdram(SDRAM_MMODE, val);
216 val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
217 printf(", CL%d)", val);
218 }
219
220 #if defined(CONFIG_SPD_EEPROM)
221
222 /*-----------------------------------------------------------------------------+
223 * Defines
224 *-----------------------------------------------------------------------------*/
225 #define SDRAM_DDR1 1
226 #define SDRAM_DDR2 2
227 #define SDRAM_NONE 0
228
229 #define MAXDIMMS 2
230 #define MAX_SPD_BYTES 256 /* Max number of bytes on the DIMM's SPD EEPROM */
231
232 #define ONE_BILLION 1000000000
233
234 #define CMD_NOP (7 << 19)
235 #define CMD_PRECHARGE (2 << 19)
236 #define CMD_REFRESH (1 << 19)
237 #define CMD_EMR (0 << 19)
238 #define CMD_READ (5 << 19)
239 #define CMD_WRITE (4 << 19)
240
241 #define SELECT_MR (0 << 16)
242 #define SELECT_EMR (1 << 16)
243 #define SELECT_EMR2 (2 << 16)
244 #define SELECT_EMR3 (3 << 16)
245
246 /* MR */
247 #define DLL_RESET 0x00000100
248
249 #define WRITE_RECOV_2 (1 << 9)
250 #define WRITE_RECOV_3 (2 << 9)
251 #define WRITE_RECOV_4 (3 << 9)
252 #define WRITE_RECOV_5 (4 << 9)
253 #define WRITE_RECOV_6 (5 << 9)
254
255 #define BURST_LEN_4 0x00000002
256
257 /* EMR */
258 #define ODT_0_OHM 0x00000000
259 #define ODT_50_OHM 0x00000044
260 #define ODT_75_OHM 0x00000004
261 #define ODT_150_OHM 0x00000040
262
263 #define ODS_FULL 0x00000000
264 #define ODS_REDUCED 0x00000002
265 #define OCD_CALIB_DEF 0x00000380
266
267 /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
268 #define ODT_EB0R (0x80000000 >> 8)
269 #define ODT_EB0W (0x80000000 >> 7)
270 #define CALC_ODT_R(n) (ODT_EB0R << (n << 1))
271 #define CALC_ODT_W(n) (ODT_EB0W << (n << 1))
272 #define CALC_ODT_RW(n) (CALC_ODT_R(n) | CALC_ODT_W(n))
273
274 /* Defines for the Read Cycle Delay test */
275 #define NUMMEMTESTS 8
276 #define NUMMEMWORDS 8
277 #define NUMLOOPS 64 /* memory test loops */
278
279 /*
280 * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
281 * To support such configurations, we "only" map the first 2GB via the TLB's. We
282 * need some free virtual address space for the remaining peripherals like, SoC
283 * devices, FLASH etc.
284 *
285 * Note that ECC is currently not supported on configurations with more than 2GB
286 * SDRAM. This is because we only map the first 2GB on such systems, and therefore
287 * the ECC parity byte of the remaining area can't be written.
288 */
289
290 /*
291 * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
292 */
293 void __spd_ddr_init_hang (void)
294 {
295 hang ();
296 }
297 void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
298
299 /*
300 * To provide an interface for board specific config values in this common
301 * DDR setup code, we implement he "weak" default functions here. They return
302 * the default value back to the caller.
303 *
304 * Please see include/configs/yucca.h for an example fora board specific
305 * implementation.
306 */
307 u32 __ddr_wrdtr(u32 default_val)
308 {
309 return default_val;
310 }
311 u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
312
313 u32 __ddr_clktr(u32 default_val)
314 {
315 return default_val;
316 }
317 u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
318
319
320 /* Private Structure Definitions */
321
322 /* enum only to ease code for cas latency setting */
323 typedef enum ddr_cas_id {
324 DDR_CAS_2 = 20,
325 DDR_CAS_2_5 = 25,
326 DDR_CAS_3 = 30,
327 DDR_CAS_4 = 40,
328 DDR_CAS_5 = 50
329 } ddr_cas_id_t;
330
331 /*-----------------------------------------------------------------------------+
332 * Prototypes
333 *-----------------------------------------------------------------------------*/
334 static void get_spd_info(unsigned long *dimm_populated,
335 unsigned char *iic0_dimm_addr,
336 unsigned long num_dimm_banks);
337 static void check_mem_type(unsigned long *dimm_populated,
338 unsigned char *iic0_dimm_addr,
339 unsigned long num_dimm_banks);
340 static void check_frequency(unsigned long *dimm_populated,
341 unsigned char *iic0_dimm_addr,
342 unsigned long num_dimm_banks);
343 static void check_rank_number(unsigned long *dimm_populated,
344 unsigned char *iic0_dimm_addr,
345 unsigned long num_dimm_banks);
346 static void check_voltage_type(unsigned long *dimm_populated,
347 unsigned char *iic0_dimm_addr,
348 unsigned long num_dimm_banks);
349 static void program_memory_queue(unsigned long *dimm_populated,
350 unsigned char *iic0_dimm_addr,
351 unsigned long num_dimm_banks);
352 static void program_codt(unsigned long *dimm_populated,
353 unsigned char *iic0_dimm_addr,
354 unsigned long num_dimm_banks);
355 static void program_mode(unsigned long *dimm_populated,
356 unsigned char *iic0_dimm_addr,
357 unsigned long num_dimm_banks,
358 ddr_cas_id_t *selected_cas,
359 int *write_recovery);
360 static void program_tr(unsigned long *dimm_populated,
361 unsigned char *iic0_dimm_addr,
362 unsigned long num_dimm_banks);
363 static void program_rtr(unsigned long *dimm_populated,
364 unsigned char *iic0_dimm_addr,
365 unsigned long num_dimm_banks);
366 static void program_bxcf(unsigned long *dimm_populated,
367 unsigned char *iic0_dimm_addr,
368 unsigned long num_dimm_banks);
369 static void program_copt1(unsigned long *dimm_populated,
370 unsigned char *iic0_dimm_addr,
371 unsigned long num_dimm_banks);
372 static void program_initplr(unsigned long *dimm_populated,
373 unsigned char *iic0_dimm_addr,
374 unsigned long num_dimm_banks,
375 ddr_cas_id_t selected_cas,
376 int write_recovery);
377 #ifdef CONFIG_DDR_ECC
378 static void program_ecc(unsigned long *dimm_populated,
379 unsigned char *iic0_dimm_addr,
380 unsigned long num_dimm_banks,
381 unsigned long tlb_word2_i_value);
382 #endif
383 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
384 static void program_DQS_calibration(unsigned long *dimm_populated,
385 unsigned char *iic0_dimm_addr,
386 unsigned long num_dimm_banks);
387 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
388 static void test(void);
389 #else
390 static void DQS_calibration_process(void);
391 #endif
392 #endif
393
394 static unsigned char spd_read(uchar chip, uint addr)
395 {
396 unsigned char data[2];
397
398 if (i2c_probe(chip) == 0)
399 if (i2c_read(chip, addr, 1, data, 1) == 0)
400 return data[0];
401
402 return 0;
403 }
404
405 /*-----------------------------------------------------------------------------+
406 * dram_init. Initializes the 440SP Memory Queue and DDR SDRAM controller.
407 * Note: This routine runs from flash with a stack set up in the chip's
408 * sram space. It is important that the routine does not require .sbss, .bss or
409 * .data sections. It also cannot call routines that require these sections.
410 *-----------------------------------------------------------------------------*/
411 /*-----------------------------------------------------------------------------
412 * Function: dram_init
413 * Description: Configures SDRAM memory banks for DDR operation.
414 * Auto Memory Configuration option reads the DDR SDRAM EEPROMs
415 * via the IIC bus and then configures the DDR SDRAM memory
416 * banks appropriately. If Auto Memory Configuration is
417 * not used, it is assumed that no DIMM is plugged
418 *-----------------------------------------------------------------------------*/
419 int dram_init(void)
420 {
421 unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
422 unsigned long dimm_populated[MAXDIMMS] = {SDRAM_NONE, SDRAM_NONE};
423 unsigned long num_dimm_banks; /* on board dimm banks */
424 unsigned long val;
425 ddr_cas_id_t selected_cas = DDR_CAS_5; /* preset to silence compiler */
426 int write_recovery;
427 phys_size_t dram_size = 0;
428
429 if (IS_ENABLED(CONFIG_SYS_RAMBOOT)) {
430 /*
431 * Reduce RAM size to avoid overwriting memory used by
432 * current stack? Not sure what is happening.
433 */
434 gd->ram_size = sdram_memsize() / 2;
435
436 return 0;
437 }
438
439 num_dimm_banks = sizeof(iic0_dimm_addr);
440
441 /*------------------------------------------------------------------
442 * Reset the DDR-SDRAM controller.
443 *-----------------------------------------------------------------*/
444 mtsdr(SDR0_SRST, SDR0_SRST0_DMC);
445 mtsdr(SDR0_SRST, 0x00000000);
446
447 /*
448 * Make sure I2C controller is initialized
449 * before continuing.
450 */
451
452 /* switch to correct I2C bus */
453 i2c_set_bus_num(CONFIG_SYS_SPD_BUS_NUM);
454
455 /*------------------------------------------------------------------
456 * Clear out the serial presence detect buffers.
457 * Perform IIC reads from the dimm. Fill in the spds.
458 * Check to see if the dimm slots are populated
459 *-----------------------------------------------------------------*/
460 get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
461
462 /*------------------------------------------------------------------
463 * Check the memory type for the dimms plugged.
464 *-----------------------------------------------------------------*/
465 check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
466
467 /*------------------------------------------------------------------
468 * Check the frequency supported for the dimms plugged.
469 *-----------------------------------------------------------------*/
470 check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
471
472 /*------------------------------------------------------------------
473 * Check the total rank number.
474 *-----------------------------------------------------------------*/
475 check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
476
477 /*------------------------------------------------------------------
478 * Check the voltage type for the dimms plugged.
479 *-----------------------------------------------------------------*/
480 check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
481
482 /*------------------------------------------------------------------
483 * Program SDRAM controller options 2 register
484 * Except Enabling of the memory controller.
485 *-----------------------------------------------------------------*/
486 mfsdram(SDRAM_MCOPT2, val);
487 mtsdram(SDRAM_MCOPT2,
488 (val &
489 ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
490 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
491 SDRAM_MCOPT2_ISIE_MASK))
492 | (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
493 SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
494 SDRAM_MCOPT2_ISIE_ENABLE));
495
496 /*------------------------------------------------------------------
497 * Program SDRAM controller options 1 register
498 * Note: Does not enable the memory controller.
499 *-----------------------------------------------------------------*/
500 program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
501
502 /*------------------------------------------------------------------
503 * Set the SDRAM Controller On Die Termination Register
504 *-----------------------------------------------------------------*/
505 program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
506
507 /*------------------------------------------------------------------
508 * Program SDRAM refresh register.
509 *-----------------------------------------------------------------*/
510 program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
511
512 /*------------------------------------------------------------------
513 * Program SDRAM mode register.
514 *-----------------------------------------------------------------*/
515 program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
516 &selected_cas, &write_recovery);
517
518 /*------------------------------------------------------------------
519 * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
520 *-----------------------------------------------------------------*/
521 mfsdram(SDRAM_WRDTR, val);
522 mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
523 ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
524
525 /*------------------------------------------------------------------
526 * Set the SDRAM Clock Timing Register
527 *-----------------------------------------------------------------*/
528 mfsdram(SDRAM_CLKTR, val);
529 mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
530 ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
531
532 /*------------------------------------------------------------------
533 * Program the BxCF registers.
534 *-----------------------------------------------------------------*/
535 program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
536
537 /*------------------------------------------------------------------
538 * Program SDRAM timing registers.
539 *-----------------------------------------------------------------*/
540 program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
541
542 /*------------------------------------------------------------------
543 * Set the Extended Mode register
544 *-----------------------------------------------------------------*/
545 mfsdram(SDRAM_MEMODE, val);
546 mtsdram(SDRAM_MEMODE,
547 (val & ~(SDRAM_MEMODE_DIC_MASK | SDRAM_MEMODE_DLL_MASK |
548 SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
549 (SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
550 | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
551
552 /*------------------------------------------------------------------
553 * Program Initialization preload registers.
554 *-----------------------------------------------------------------*/
555 program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
556 selected_cas, write_recovery);
557
558 /*------------------------------------------------------------------
559 * Delay to ensure 200usec have elapsed since reset.
560 *-----------------------------------------------------------------*/
561 udelay(400);
562
563 /*------------------------------------------------------------------
564 * Set the memory queue core base addr.
565 *-----------------------------------------------------------------*/
566 program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
567
568 /*------------------------------------------------------------------
569 * Program SDRAM controller options 2 register
570 * Enable the memory controller.
571 *-----------------------------------------------------------------*/
572 mfsdram(SDRAM_MCOPT2, val);
573 mtsdram(SDRAM_MCOPT2,
574 (val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
575 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
576 SDRAM_MCOPT2_IPTR_EXECUTE);
577
578 /*------------------------------------------------------------------
579 * Wait for IPTR_EXECUTE init sequence to complete.
580 *-----------------------------------------------------------------*/
581 do {
582 mfsdram(SDRAM_MCSTAT, val);
583 } while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
584
585 /* enable the controller only after init sequence completes */
586 mfsdram(SDRAM_MCOPT2, val);
587 mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
588
589 /* Make sure delay-line calibration is done before proceeding */
590 do {
591 mfsdram(SDRAM_DLCR, val);
592 } while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
593
594 /* get installed memory size */
595 dram_size = sdram_memsize();
596
597 /*
598 * Limit size to 2GB
599 */
600 if (dram_size > CONFIG_MAX_MEM_MAPPED)
601 dram_size = CONFIG_MAX_MEM_MAPPED;
602
603 /* and program tlb entries for this size (dynamic) */
604
605 /*
606 * Program TLB entries with caches enabled, for best performace
607 * while auto-calibrating and ECC generation
608 */
609 program_tlb(0, 0, dram_size, 0);
610
611 /*------------------------------------------------------------------
612 * DQS calibration.
613 *-----------------------------------------------------------------*/
614 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
615 DQS_autocalibration();
616 #else
617 program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
618 #endif
619 /*
620 * Now complete RDSS configuration as mentioned on page 7 of the AMCC
621 * PowerPC440SP/SPe DDR2 application note:
622 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
623 */
624 update_rdcc();
625
626 #ifdef CONFIG_DDR_ECC
627 /*------------------------------------------------------------------
628 * If ecc is enabled, initialize the parity bits.
629 *-----------------------------------------------------------------*/
630 program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
631 #endif
632
633 /*
634 * Flush the dcache before removing the TLB with caches
635 * enabled. Otherwise this might lead to problems later on,
636 * e.g. while booting Linux (as seen on ICON-440SPe).
637 */
638 flush_dcache();
639
640 /*
641 * Now after initialization (auto-calibration and ECC generation)
642 * remove the TLB entries with caches enabled and program again with
643 * desired cache functionality
644 */
645 remove_tlb(0, dram_size);
646 program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
647
648 ppc4xx_ibm_ddr2_register_dump();
649
650 /*
651 * Clear potential errors resulting from auto-calibration.
652 * If not done, then we could get an interrupt later on when
653 * exceptions are enabled.
654 */
655 set_mcsr(get_mcsr());
656
657 gd->ram_size = sdram_memsize();
658
659 return 0;
660 }
661
662 static void get_spd_info(unsigned long *dimm_populated,
663 unsigned char *iic0_dimm_addr,
664 unsigned long num_dimm_banks)
665 {
666 unsigned long dimm_num;
667 unsigned long dimm_found;
668 unsigned char num_of_bytes;
669 unsigned char total_size;
670
671 dimm_found = false;
672 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
673 num_of_bytes = 0;
674 total_size = 0;
675
676 num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
677 debug("\nspd_read(0x%x) returned %d\n",
678 iic0_dimm_addr[dimm_num], num_of_bytes);
679 total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
680 debug("spd_read(0x%x) returned %d\n",
681 iic0_dimm_addr[dimm_num], total_size);
682
683 if ((num_of_bytes != 0) && (total_size != 0)) {
684 dimm_populated[dimm_num] = true;
685 dimm_found = true;
686 debug("DIMM slot %lu: populated\n", dimm_num);
687 } else {
688 dimm_populated[dimm_num] = false;
689 debug("DIMM slot %lu: Not populated\n", dimm_num);
690 }
691 }
692
693 if (dimm_found == false) {
694 printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
695 spd_ddr_init_hang ();
696 }
697 }
698
699
700 /*------------------------------------------------------------------
701 * For the memory DIMMs installed, this routine verifies that they
702 * really are DDR specific DIMMs.
703 *-----------------------------------------------------------------*/
704 static void check_mem_type(unsigned long *dimm_populated,
705 unsigned char *iic0_dimm_addr,
706 unsigned long num_dimm_banks)
707 {
708 unsigned long dimm_num;
709 unsigned long dimm_type;
710
711 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
712 if (dimm_populated[dimm_num] == true) {
713 dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
714 switch (dimm_type) {
715 case 1:
716 printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
717 "slot %d.\n", (unsigned int)dimm_num);
718 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
719 printf("Replace the DIMM module with a supported DIMM.\n\n");
720 spd_ddr_init_hang ();
721 break;
722 case 2:
723 printf("ERROR: EDO DIMM detected in slot %d.\n",
724 (unsigned int)dimm_num);
725 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
726 printf("Replace the DIMM module with a supported DIMM.\n\n");
727 spd_ddr_init_hang ();
728 break;
729 case 3:
730 printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
731 (unsigned int)dimm_num);
732 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
733 printf("Replace the DIMM module with a supported DIMM.\n\n");
734 spd_ddr_init_hang ();
735 break;
736 case 4:
737 printf("ERROR: SDRAM DIMM detected in slot %d.\n",
738 (unsigned int)dimm_num);
739 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
740 printf("Replace the DIMM module with a supported DIMM.\n\n");
741 spd_ddr_init_hang ();
742 break;
743 case 5:
744 printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
745 (unsigned int)dimm_num);
746 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
747 printf("Replace the DIMM module with a supported DIMM.\n\n");
748 spd_ddr_init_hang ();
749 break;
750 case 6:
751 printf("ERROR: SGRAM DIMM detected in slot %d.\n",
752 (unsigned int)dimm_num);
753 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
754 printf("Replace the DIMM module with a supported DIMM.\n\n");
755 spd_ddr_init_hang ();
756 break;
757 case 7:
758 debug("DIMM slot %lu: DDR1 SDRAM detected\n", dimm_num);
759 dimm_populated[dimm_num] = SDRAM_DDR1;
760 break;
761 case 8:
762 debug("DIMM slot %lu: DDR2 SDRAM detected\n", dimm_num);
763 dimm_populated[dimm_num] = SDRAM_DDR2;
764 break;
765 default:
766 printf("ERROR: Unknown DIMM detected in slot %d.\n",
767 (unsigned int)dimm_num);
768 printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
769 printf("Replace the DIMM module with a supported DIMM.\n\n");
770 spd_ddr_init_hang ();
771 break;
772 }
773 }
774 }
775 for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
776 if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
777 && (dimm_populated[dimm_num] != SDRAM_NONE)
778 && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
779 printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
780 spd_ddr_init_hang ();
781 }
782 }
783 }
784
785 /*------------------------------------------------------------------
786 * For the memory DIMMs installed, this routine verifies that
787 * frequency previously calculated is supported.
788 *-----------------------------------------------------------------*/
789 static void check_frequency(unsigned long *dimm_populated,
790 unsigned char *iic0_dimm_addr,
791 unsigned long num_dimm_banks)
792 {
793 unsigned long dimm_num;
794 unsigned long tcyc_reg;
795 unsigned long cycle_time;
796 unsigned long calc_cycle_time;
797 unsigned long sdram_freq;
798 unsigned long sdr_ddrpll;
799 PPC4xx_SYS_INFO board_cfg;
800
801 /*------------------------------------------------------------------
802 * Get the board configuration info.
803 *-----------------------------------------------------------------*/
804 get_sys_info(&board_cfg);
805
806 mfsdr(SDR0_DDR0, sdr_ddrpll);
807 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
808
809 /*
810 * calc_cycle_time is calculated from DDR frequency set by board/chip
811 * and is expressed in multiple of 10 picoseconds
812 * to match the way DIMM cycle time is calculated below.
813 */
814 calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
815
816 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
817 if (dimm_populated[dimm_num] != SDRAM_NONE) {
818 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
819 /*
820 * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
821 * the higher order nibble (bits 4-7) designates the cycle time
822 * to a granularity of 1ns;
823 * the value presented by the lower order nibble (bits 0-3)
824 * has a granularity of .1ns and is added to the value designated
825 * by the higher nibble. In addition, four lines of the lower order
826 * nibble are assigned to support +.25,+.33, +.66 and +.75.
827 */
828 /* Convert from hex to decimal */
829 if ((tcyc_reg & 0x0F) == 0x0D)
830 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
831 else if ((tcyc_reg & 0x0F) == 0x0C)
832 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
833 else if ((tcyc_reg & 0x0F) == 0x0B)
834 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
835 else if ((tcyc_reg & 0x0F) == 0x0A)
836 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
837 else
838 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
839 ((tcyc_reg & 0x0F)*10);
840 debug("cycle_time=%lu [10 picoseconds]\n", cycle_time);
841
842 if (cycle_time > (calc_cycle_time + 10)) {
843 /*
844 * the provided sdram cycle_time is too small
845 * for the available DIMM cycle_time.
846 * The additionnal 100ps is here to accept a small incertainty.
847 */
848 printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
849 "slot %d \n while calculated cycle time is %d ps.\n",
850 (unsigned int)(cycle_time*10),
851 (unsigned int)dimm_num,
852 (unsigned int)(calc_cycle_time*10));
853 printf("Replace the DIMM, or change DDR frequency via "
854 "strapping bits.\n\n");
855 spd_ddr_init_hang ();
856 }
857 }
858 }
859 }
860
861 /*------------------------------------------------------------------
862 * For the memory DIMMs installed, this routine verifies two
863 * ranks/banks maximum are availables.
864 *-----------------------------------------------------------------*/
865 static void check_rank_number(unsigned long *dimm_populated,
866 unsigned char *iic0_dimm_addr,
867 unsigned long num_dimm_banks)
868 {
869 unsigned long dimm_num;
870 unsigned long dimm_rank;
871 unsigned long total_rank = 0;
872
873 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
874 if (dimm_populated[dimm_num] != SDRAM_NONE) {
875 dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
876 if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
877 dimm_rank = (dimm_rank & 0x0F) +1;
878 else
879 dimm_rank = dimm_rank & 0x0F;
880
881
882 if (dimm_rank > MAXRANKS) {
883 printf("ERROR: DRAM DIMM detected with %lu ranks in "
884 "slot %lu is not supported.\n", dimm_rank, dimm_num);
885 printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
886 printf("Replace the DIMM module with a supported DIMM.\n\n");
887 spd_ddr_init_hang ();
888 } else
889 total_rank += dimm_rank;
890 }
891 if (total_rank > MAXRANKS) {
892 printf("ERROR: DRAM DIMM detected with a total of %d ranks "
893 "for all slots.\n", (unsigned int)total_rank);
894 printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
895 printf("Remove one of the DIMM modules.\n\n");
896 spd_ddr_init_hang ();
897 }
898 }
899 }
900
901 /*------------------------------------------------------------------
902 * only support 2.5V modules.
903 * This routine verifies this.
904 *-----------------------------------------------------------------*/
905 static void check_voltage_type(unsigned long *dimm_populated,
906 unsigned char *iic0_dimm_addr,
907 unsigned long num_dimm_banks)
908 {
909 unsigned long dimm_num;
910 unsigned long voltage_type;
911
912 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
913 if (dimm_populated[dimm_num] != SDRAM_NONE) {
914 voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
915 switch (voltage_type) {
916 case 0x00:
917 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
918 printf("This DIMM is 5.0 Volt/TTL.\n");
919 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
920 (unsigned int)dimm_num);
921 spd_ddr_init_hang ();
922 break;
923 case 0x01:
924 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
925 printf("This DIMM is LVTTL.\n");
926 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
927 (unsigned int)dimm_num);
928 spd_ddr_init_hang ();
929 break;
930 case 0x02:
931 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
932 printf("This DIMM is 1.5 Volt.\n");
933 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
934 (unsigned int)dimm_num);
935 spd_ddr_init_hang ();
936 break;
937 case 0x03:
938 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
939 printf("This DIMM is 3.3 Volt/TTL.\n");
940 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
941 (unsigned int)dimm_num);
942 spd_ddr_init_hang ();
943 break;
944 case 0x04:
945 /* 2.5 Voltage only for DDR1 */
946 break;
947 case 0x05:
948 /* 1.8 Voltage only for DDR2 */
949 break;
950 default:
951 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
952 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
953 (unsigned int)dimm_num);
954 spd_ddr_init_hang ();
955 break;
956 }
957 }
958 }
959 }
960
961 /*-----------------------------------------------------------------------------+
962 * program_copt1.
963 *-----------------------------------------------------------------------------*/
964 static void program_copt1(unsigned long *dimm_populated,
965 unsigned char *iic0_dimm_addr,
966 unsigned long num_dimm_banks)
967 {
968 unsigned long dimm_num;
969 unsigned long mcopt1;
970 unsigned long ecc_enabled;
971 unsigned long ecc = 0;
972 unsigned long data_width = 0;
973 unsigned long dimm_32bit;
974 unsigned long dimm_64bit;
975 unsigned long registered = 0;
976 unsigned long attribute = 0;
977 unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
978 unsigned long bankcount;
979 unsigned long val;
980
981 #ifdef CONFIG_DDR_ECC
982 ecc_enabled = true;
983 #else
984 ecc_enabled = false;
985 #endif
986 dimm_32bit = false;
987 dimm_64bit = false;
988 buf0 = false;
989 buf1 = false;
990
991 /*------------------------------------------------------------------
992 * Set memory controller options reg 1, SDRAM_MCOPT1.
993 *-----------------------------------------------------------------*/
994 mfsdram(SDRAM_MCOPT1, val);
995 mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
996 SDRAM_MCOPT1_PMU_MASK | SDRAM_MCOPT1_DMWD_MASK |
997 SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
998 SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
999 SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
1000 SDRAM_MCOPT1_DREF_MASK);
1001
1002 mcopt1 |= SDRAM_MCOPT1_QDEP;
1003 mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
1004 mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
1005 mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
1006 mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
1007 mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
1008
1009 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1010 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1011 /* test ecc support */
1012 ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
1013 if (ecc != 0x02) /* ecc not supported */
1014 ecc_enabled = false;
1015
1016 /* test bank count */
1017 bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
1018 if (bankcount == 0x04) /* bank count = 4 */
1019 mcopt1 |= SDRAM_MCOPT1_4_BANKS;
1020 else /* bank count = 8 */
1021 mcopt1 |= SDRAM_MCOPT1_8_BANKS;
1022
1023 /* test for buffered/unbuffered, registered, differential clocks */
1024 registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
1025 attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
1026
1027 /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
1028 if (dimm_num == 0) {
1029 if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
1030 mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
1031 if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
1032 mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
1033 if (registered == 1) { /* DDR2 always buffered */
1034 /* TODO: what about above comments ? */
1035 mcopt1 |= SDRAM_MCOPT1_RDEN;
1036 buf0 = true;
1037 } else {
1038 /* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
1039 if ((attribute & 0x02) == 0x00) {
1040 /* buffered not supported */
1041 buf0 = false;
1042 } else {
1043 mcopt1 |= SDRAM_MCOPT1_RDEN;
1044 buf0 = true;
1045 }
1046 }
1047 }
1048 else if (dimm_num == 1) {
1049 if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
1050 mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
1051 if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
1052 mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
1053 if (registered == 1) {
1054 /* DDR2 always buffered */
1055 mcopt1 |= SDRAM_MCOPT1_RDEN;
1056 buf1 = true;
1057 } else {
1058 if ((attribute & 0x02) == 0x00) {
1059 /* buffered not supported */
1060 buf1 = false;
1061 } else {
1062 mcopt1 |= SDRAM_MCOPT1_RDEN;
1063 buf1 = true;
1064 }
1065 }
1066 }
1067
1068 /* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
1069 data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
1070 (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
1071
1072 switch (data_width) {
1073 case 72:
1074 case 64:
1075 dimm_64bit = true;
1076 break;
1077 case 40:
1078 case 32:
1079 dimm_32bit = true;
1080 break;
1081 default:
1082 printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
1083 data_width);
1084 printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
1085 break;
1086 }
1087 }
1088 }
1089
1090 /* verify matching properties */
1091 if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
1092 if (buf0 != buf1) {
1093 printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
1094 spd_ddr_init_hang ();
1095 }
1096 }
1097
1098 if ((dimm_64bit == true) && (dimm_32bit == true)) {
1099 printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
1100 spd_ddr_init_hang ();
1101 } else if ((dimm_64bit == true) && (dimm_32bit == false)) {
1102 mcopt1 |= SDRAM_MCOPT1_DMWD_64;
1103 } else if ((dimm_64bit == false) && (dimm_32bit == true)) {
1104 mcopt1 |= SDRAM_MCOPT1_DMWD_32;
1105 } else {
1106 printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
1107 spd_ddr_init_hang ();
1108 }
1109
1110 if (ecc_enabled == true)
1111 mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
1112 else
1113 mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
1114
1115 mtsdram(SDRAM_MCOPT1, mcopt1);
1116 }
1117
1118 /*-----------------------------------------------------------------------------+
1119 * program_codt.
1120 *-----------------------------------------------------------------------------*/
1121 static void program_codt(unsigned long *dimm_populated,
1122 unsigned char *iic0_dimm_addr,
1123 unsigned long num_dimm_banks)
1124 {
1125 unsigned long codt;
1126 unsigned long modt0 = 0;
1127 unsigned long modt1 = 0;
1128 unsigned long modt2 = 0;
1129 unsigned long modt3 = 0;
1130 unsigned char dimm_num;
1131 unsigned char dimm_rank;
1132 unsigned char total_rank = 0;
1133 unsigned char total_dimm = 0;
1134 unsigned char dimm_type = 0;
1135 unsigned char firstSlot = 0;
1136
1137 /*------------------------------------------------------------------
1138 * Set the SDRAM Controller On Die Termination Register
1139 *-----------------------------------------------------------------*/
1140 mfsdram(SDRAM_CODT, codt);
1141 codt &= ~(SDRAM_CODT_DQS_SINGLE_END | SDRAM_CODT_CKSE_SINGLE_END);
1142 codt |= SDRAM_CODT_IO_NMODE;
1143
1144 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1145 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1146 dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
1147 if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
1148 dimm_rank = (dimm_rank & 0x0F) + 1;
1149 dimm_type = SDRAM_DDR2;
1150 } else {
1151 dimm_rank = dimm_rank & 0x0F;
1152 dimm_type = SDRAM_DDR1;
1153 }
1154
1155 total_rank += dimm_rank;
1156 total_dimm++;
1157 if ((dimm_num == 0) && (total_dimm == 1))
1158 firstSlot = true;
1159 else
1160 firstSlot = false;
1161 }
1162 }
1163 if (dimm_type == SDRAM_DDR2) {
1164 codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
1165 if ((total_dimm == 1) && (firstSlot == true)) {
1166 if (total_rank == 1) { /* PUUU */
1167 codt |= CALC_ODT_R(0);
1168 modt0 = CALC_ODT_W(0);
1169 modt1 = 0x00000000;
1170 modt2 = 0x00000000;
1171 modt3 = 0x00000000;
1172 }
1173 if (total_rank == 2) { /* PPUU */
1174 codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
1175 modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
1176 modt1 = 0x00000000;
1177 modt2 = 0x00000000;
1178 modt3 = 0x00000000;
1179 }
1180 } else if ((total_dimm == 1) && (firstSlot != true)) {
1181 if (total_rank == 1) { /* UUPU */
1182 codt |= CALC_ODT_R(2);
1183 modt0 = 0x00000000;
1184 modt1 = 0x00000000;
1185 modt2 = CALC_ODT_W(2);
1186 modt3 = 0x00000000;
1187 }
1188 if (total_rank == 2) { /* UUPP */
1189 codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
1190 modt0 = 0x00000000;
1191 modt1 = 0x00000000;
1192 modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
1193 modt3 = 0x00000000;
1194 }
1195 }
1196 if (total_dimm == 2) {
1197 if (total_rank == 2) { /* PUPU */
1198 codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
1199 modt0 = CALC_ODT_RW(2);
1200 modt1 = 0x00000000;
1201 modt2 = CALC_ODT_RW(0);
1202 modt3 = 0x00000000;
1203 }
1204 if (total_rank == 4) { /* PPPP */
1205 codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
1206 CALC_ODT_R(2) | CALC_ODT_R(3);
1207 modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
1208 modt1 = 0x00000000;
1209 modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
1210 modt3 = 0x00000000;
1211 }
1212 }
1213 } else {
1214 codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
1215 modt0 = 0x00000000;
1216 modt1 = 0x00000000;
1217 modt2 = 0x00000000;
1218 modt3 = 0x00000000;
1219
1220 if (total_dimm == 1) {
1221 if (total_rank == 1)
1222 codt |= 0x00800000;
1223 if (total_rank == 2)
1224 codt |= 0x02800000;
1225 }
1226 if (total_dimm == 2) {
1227 if (total_rank == 2)
1228 codt |= 0x08800000;
1229 if (total_rank == 4)
1230 codt |= 0x2a800000;
1231 }
1232 }
1233
1234 debug("nb of dimm %d\n", total_dimm);
1235 debug("nb of rank %d\n", total_rank);
1236 if (total_dimm == 1)
1237 debug("dimm in slot %d\n", firstSlot);
1238
1239 mtsdram(SDRAM_CODT, codt);
1240 mtsdram(SDRAM_MODT0, modt0);
1241 mtsdram(SDRAM_MODT1, modt1);
1242 mtsdram(SDRAM_MODT2, modt2);
1243 mtsdram(SDRAM_MODT3, modt3);
1244 }
1245
1246 /*-----------------------------------------------------------------------------+
1247 * program_initplr.
1248 *-----------------------------------------------------------------------------*/
1249 static void program_initplr(unsigned long *dimm_populated,
1250 unsigned char *iic0_dimm_addr,
1251 unsigned long num_dimm_banks,
1252 ddr_cas_id_t selected_cas,
1253 int write_recovery)
1254 {
1255 u32 cas = 0;
1256 u32 odt = 0;
1257 u32 ods = 0;
1258 u32 mr;
1259 u32 wr;
1260 u32 emr;
1261 u32 emr2;
1262 u32 emr3;
1263 int dimm_num;
1264 int total_dimm = 0;
1265
1266 /******************************************************
1267 ** Assumption: if more than one DIMM, all DIMMs are the same
1268 ** as already checked in check_memory_type
1269 ******************************************************/
1270
1271 if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
1272 mtsdram(SDRAM_INITPLR0, 0x81B80000);
1273 mtsdram(SDRAM_INITPLR1, 0x81900400);
1274 mtsdram(SDRAM_INITPLR2, 0x81810000);
1275 mtsdram(SDRAM_INITPLR3, 0xff800162);
1276 mtsdram(SDRAM_INITPLR4, 0x81900400);
1277 mtsdram(SDRAM_INITPLR5, 0x86080000);
1278 mtsdram(SDRAM_INITPLR6, 0x86080000);
1279 mtsdram(SDRAM_INITPLR7, 0x81000062);
1280 } else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
1281 switch (selected_cas) {
1282 case DDR_CAS_3:
1283 cas = 3 << 4;
1284 break;
1285 case DDR_CAS_4:
1286 cas = 4 << 4;
1287 break;
1288 case DDR_CAS_5:
1289 cas = 5 << 4;
1290 break;
1291 default:
1292 printf("ERROR: ucode error on selected_cas value %d", selected_cas);
1293 spd_ddr_init_hang ();
1294 break;
1295 }
1296
1297 #if 0
1298 /*
1299 * ToDo - Still a problem with the write recovery:
1300 * On the Corsair CM2X512-5400C4 module, setting write recovery
1301 * in the INITPLR reg to the value calculated in program_mode()
1302 * results in not correctly working DDR2 memory (crash after
1303 * relocation).
1304 *
1305 * So for now, set the write recovery to 3. This seems to work
1306 * on the Corair module too.
1307 *
1308 * 2007-03-01, sr
1309 */
1310 switch (write_recovery) {
1311 case 3:
1312 wr = WRITE_RECOV_3;
1313 break;
1314 case 4:
1315 wr = WRITE_RECOV_4;
1316 break;
1317 case 5:
1318 wr = WRITE_RECOV_5;
1319 break;
1320 case 6:
1321 wr = WRITE_RECOV_6;
1322 break;
1323 default:
1324 printf("ERROR: write recovery not support (%d)", write_recovery);
1325 spd_ddr_init_hang ();
1326 break;
1327 }
1328 #else
1329 wr = WRITE_RECOV_3; /* test-only, see description above */
1330 #endif
1331
1332 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
1333 if (dimm_populated[dimm_num] != SDRAM_NONE)
1334 total_dimm++;
1335 if (total_dimm == 1) {
1336 odt = ODT_150_OHM;
1337 ods = ODS_FULL;
1338 } else if (total_dimm == 2) {
1339 odt = ODT_75_OHM;
1340 ods = ODS_REDUCED;
1341 } else {
1342 printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
1343 spd_ddr_init_hang ();
1344 }
1345
1346 mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
1347 emr = CMD_EMR | SELECT_EMR | odt | ods;
1348 emr2 = CMD_EMR | SELECT_EMR2;
1349 emr3 = CMD_EMR | SELECT_EMR3;
1350 /* NOP - Wait 106 MemClk cycles */
1351 mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
1352 SDRAM_INITPLR_IMWT_ENCODE(106));
1353 udelay(1000);
1354 /* precharge 4 MemClk cycles */
1355 mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1356 SDRAM_INITPLR_IMWT_ENCODE(4));
1357 /* EMR2 - Wait tMRD (2 MemClk cycles) */
1358 mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
1359 SDRAM_INITPLR_IMWT_ENCODE(2));
1360 /* EMR3 - Wait tMRD (2 MemClk cycles) */
1361 mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
1362 SDRAM_INITPLR_IMWT_ENCODE(2));
1363 /* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
1364 mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
1365 SDRAM_INITPLR_IMWT_ENCODE(2));
1366 /* MR w/ DLL reset - 200 cycle wait for DLL reset */
1367 mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
1368 SDRAM_INITPLR_IMWT_ENCODE(200));
1369 udelay(1000);
1370 /* precharge 4 MemClk cycles */
1371 mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1372 SDRAM_INITPLR_IMWT_ENCODE(4));
1373 /* Refresh 25 MemClk cycles */
1374 mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1375 SDRAM_INITPLR_IMWT_ENCODE(25));
1376 /* Refresh 25 MemClk cycles */
1377 mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1378 SDRAM_INITPLR_IMWT_ENCODE(25));
1379 /* Refresh 25 MemClk cycles */
1380 mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1381 SDRAM_INITPLR_IMWT_ENCODE(25));
1382 /* Refresh 25 MemClk cycles */
1383 mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1384 SDRAM_INITPLR_IMWT_ENCODE(25));
1385 /* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
1386 mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
1387 SDRAM_INITPLR_IMWT_ENCODE(2));
1388 /* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
1389 mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
1390 SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
1391 /* EMR OCD Exit */
1392 mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
1393 SDRAM_INITPLR_IMWT_ENCODE(2));
1394 } else {
1395 printf("ERROR: ucode error as unknown DDR type in program_initplr");
1396 spd_ddr_init_hang ();
1397 }
1398 }
1399
1400 /*------------------------------------------------------------------
1401 * This routine programs the SDRAM_MMODE register.
1402 * the selected_cas is an output parameter, that will be passed
1403 * by caller to call the above program_initplr( )
1404 *-----------------------------------------------------------------*/
1405 static void program_mode(unsigned long *dimm_populated,
1406 unsigned char *iic0_dimm_addr,
1407 unsigned long num_dimm_banks,
1408 ddr_cas_id_t *selected_cas,
1409 int *write_recovery)
1410 {
1411 unsigned long dimm_num;
1412 unsigned long sdram_ddr1;
1413 unsigned long t_wr_ns;
1414 unsigned long t_wr_clk;
1415 unsigned long cas_bit;
1416 unsigned long cas_index;
1417 unsigned long sdram_freq;
1418 unsigned long ddr_check;
1419 unsigned long mmode;
1420 unsigned long tcyc_reg;
1421 unsigned long cycle_2_0_clk;
1422 unsigned long cycle_2_5_clk;
1423 unsigned long cycle_3_0_clk;
1424 unsigned long cycle_4_0_clk;
1425 unsigned long cycle_5_0_clk;
1426 unsigned long max_2_0_tcyc_ns_x_100;
1427 unsigned long max_2_5_tcyc_ns_x_100;
1428 unsigned long max_3_0_tcyc_ns_x_100;
1429 unsigned long max_4_0_tcyc_ns_x_100;
1430 unsigned long max_5_0_tcyc_ns_x_100;
1431 unsigned long cycle_time_ns_x_100[3];
1432 PPC4xx_SYS_INFO board_cfg;
1433 unsigned char cas_2_0_available;
1434 unsigned char cas_2_5_available;
1435 unsigned char cas_3_0_available;
1436 unsigned char cas_4_0_available;
1437 unsigned char cas_5_0_available;
1438 unsigned long sdr_ddrpll;
1439
1440 /*------------------------------------------------------------------
1441 * Get the board configuration info.
1442 *-----------------------------------------------------------------*/
1443 get_sys_info(&board_cfg);
1444
1445 mfsdr(SDR0_DDR0, sdr_ddrpll);
1446 sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
1447 debug("sdram_freq=%lu\n", sdram_freq);
1448
1449 /*------------------------------------------------------------------
1450 * Handle the timing. We need to find the worst case timing of all
1451 * the dimm modules installed.
1452 *-----------------------------------------------------------------*/
1453 t_wr_ns = 0;
1454 cas_2_0_available = true;
1455 cas_2_5_available = true;
1456 cas_3_0_available = true;
1457 cas_4_0_available = true;
1458 cas_5_0_available = true;
1459 max_2_0_tcyc_ns_x_100 = 10;
1460 max_2_5_tcyc_ns_x_100 = 10;
1461 max_3_0_tcyc_ns_x_100 = 10;
1462 max_4_0_tcyc_ns_x_100 = 10;
1463 max_5_0_tcyc_ns_x_100 = 10;
1464 sdram_ddr1 = true;
1465
1466 /* loop through all the DIMM slots on the board */
1467 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1468 /* If a dimm is installed in a particular slot ... */
1469 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1470 if (dimm_populated[dimm_num] == SDRAM_DDR1)
1471 sdram_ddr1 = true;
1472 else
1473 sdram_ddr1 = false;
1474
1475 cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
1476 debug("cas_bit[SPD byte 18]=%02lx\n", cas_bit);
1477
1478 /* For a particular DIMM, grab the three CAS values it supports */
1479 for (cas_index = 0; cas_index < 3; cas_index++) {
1480 switch (cas_index) {
1481 case 0:
1482 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
1483 break;
1484 case 1:
1485 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
1486 break;
1487 default:
1488 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
1489 break;
1490 }
1491
1492 if ((tcyc_reg & 0x0F) >= 10) {
1493 if ((tcyc_reg & 0x0F) == 0x0D) {
1494 /* Convert from hex to decimal */
1495 cycle_time_ns_x_100[cas_index] =
1496 (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
1497 } else {
1498 printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
1499 "in slot %d\n", (unsigned int)dimm_num);
1500 spd_ddr_init_hang ();
1501 }
1502 } else {
1503 /* Convert from hex to decimal */
1504 cycle_time_ns_x_100[cas_index] =
1505 (((tcyc_reg & 0xF0) >> 4) * 100) +
1506 ((tcyc_reg & 0x0F)*10);
1507 }
1508 debug("cas_index=%lu: cycle_time_ns_x_100=%lu\n", cas_index,
1509 cycle_time_ns_x_100[cas_index]);
1510 }
1511
1512 /* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
1513 /* supported for a particular DIMM. */
1514 cas_index = 0;
1515
1516 if (sdram_ddr1) {
1517 /*
1518 * DDR devices use the following bitmask for CAS latency:
1519 * Bit 7 6 5 4 3 2 1 0
1520 * TBD 4.0 3.5 3.0 2.5 2.0 1.5 1.0
1521 */
1522 if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
1523 (cycle_time_ns_x_100[cas_index] != 0)) {
1524 max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1525 cycle_time_ns_x_100[cas_index]);
1526 cas_index++;
1527 } else {
1528 if (cas_index != 0)
1529 cas_index++;
1530 cas_4_0_available = false;
1531 }
1532
1533 if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1534 (cycle_time_ns_x_100[cas_index] != 0)) {
1535 max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1536 cycle_time_ns_x_100[cas_index]);
1537 cas_index++;
1538 } else {
1539 if (cas_index != 0)
1540 cas_index++;
1541 cas_3_0_available = false;
1542 }
1543
1544 if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1545 (cycle_time_ns_x_100[cas_index] != 0)) {
1546 max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
1547 cycle_time_ns_x_100[cas_index]);
1548 cas_index++;
1549 } else {
1550 if (cas_index != 0)
1551 cas_index++;
1552 cas_2_5_available = false;
1553 }
1554
1555 if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
1556 (cycle_time_ns_x_100[cas_index] != 0)) {
1557 max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
1558 cycle_time_ns_x_100[cas_index]);
1559 cas_index++;
1560 } else {
1561 if (cas_index != 0)
1562 cas_index++;
1563 cas_2_0_available = false;
1564 }
1565 } else {
1566 /*
1567 * DDR2 devices use the following bitmask for CAS latency:
1568 * Bit 7 6 5 4 3 2 1 0
1569 * TBD 6.0 5.0 4.0 3.0 2.0 TBD TBD
1570 */
1571 if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
1572 (cycle_time_ns_x_100[cas_index] != 0)) {
1573 max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
1574 cycle_time_ns_x_100[cas_index]);
1575 cas_index++;
1576 } else {
1577 if (cas_index != 0)
1578 cas_index++;
1579 cas_5_0_available = false;
1580 }
1581
1582 if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1583 (cycle_time_ns_x_100[cas_index] != 0)) {
1584 max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1585 cycle_time_ns_x_100[cas_index]);
1586 cas_index++;
1587 } else {
1588 if (cas_index != 0)
1589 cas_index++;
1590 cas_4_0_available = false;
1591 }
1592
1593 if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1594 (cycle_time_ns_x_100[cas_index] != 0)) {
1595 max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1596 cycle_time_ns_x_100[cas_index]);
1597 cas_index++;
1598 } else {
1599 if (cas_index != 0)
1600 cas_index++;
1601 cas_3_0_available = false;
1602 }
1603 }
1604 }
1605 }
1606
1607 /*------------------------------------------------------------------
1608 * Set the SDRAM mode, SDRAM_MMODE
1609 *-----------------------------------------------------------------*/
1610 mfsdram(SDRAM_MMODE, mmode);
1611 mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
1612
1613 /* add 10 here because of rounding problems */
1614 cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
1615 cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
1616 cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
1617 cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
1618 cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
1619 debug("cycle_3_0_clk=%lu\n", cycle_3_0_clk);
1620 debug("cycle_4_0_clk=%lu\n", cycle_4_0_clk);
1621 debug("cycle_5_0_clk=%lu\n", cycle_5_0_clk);
1622
1623 if (sdram_ddr1 == true) { /* DDR1 */
1624 if ((cas_2_0_available == true) &&
1625 (sdram_freq <= cycle_2_0_clk)) {
1626 mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
1627 *selected_cas = DDR_CAS_2;
1628 } else if ((cas_2_5_available == true) &&
1629 (sdram_freq <= cycle_2_5_clk)) {
1630 mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
1631 *selected_cas = DDR_CAS_2_5;
1632 } else if ((cas_3_0_available == true) &&
1633 (sdram_freq <= cycle_3_0_clk)) {
1634 mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
1635 *selected_cas = DDR_CAS_3;
1636 } else {
1637 printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1638 printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
1639 printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
1640 spd_ddr_init_hang ();
1641 }
1642 } else { /* DDR2 */
1643 debug("cas_3_0_available=%d\n", cas_3_0_available);
1644 debug("cas_4_0_available=%d\n", cas_4_0_available);
1645 debug("cas_5_0_available=%d\n", cas_5_0_available);
1646 if ((cas_3_0_available == true) &&
1647 (sdram_freq <= cycle_3_0_clk)) {
1648 mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
1649 *selected_cas = DDR_CAS_3;
1650 } else if ((cas_4_0_available == true) &&
1651 (sdram_freq <= cycle_4_0_clk)) {
1652 mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
1653 *selected_cas = DDR_CAS_4;
1654 } else if ((cas_5_0_available == true) &&
1655 (sdram_freq <= cycle_5_0_clk)) {
1656 mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
1657 *selected_cas = DDR_CAS_5;
1658 } else {
1659 printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1660 printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
1661 printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
1662 printf("cas3=%d cas4=%d cas5=%d\n",
1663 cas_3_0_available, cas_4_0_available, cas_5_0_available);
1664 printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
1665 sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
1666 spd_ddr_init_hang ();
1667 }
1668 }
1669
1670 if (sdram_ddr1 == true)
1671 mmode |= SDRAM_MMODE_WR_DDR1;
1672 else {
1673
1674 /* loop through all the DIMM slots on the board */
1675 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1676 /* If a dimm is installed in a particular slot ... */
1677 if (dimm_populated[dimm_num] != SDRAM_NONE)
1678 t_wr_ns = max(t_wr_ns, (unsigned long)
1679 spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1680 }
1681
1682 /*
1683 * convert from nanoseconds to ddr clocks
1684 * round up if necessary
1685 */
1686 t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
1687 ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
1688 if (sdram_freq != ddr_check)
1689 t_wr_clk++;
1690
1691 switch (t_wr_clk) {
1692 case 0:
1693 case 1:
1694 case 2:
1695 case 3:
1696 mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
1697 break;
1698 case 4:
1699 mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
1700 break;
1701 case 5:
1702 mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
1703 break;
1704 default:
1705 mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
1706 break;
1707 }
1708 *write_recovery = t_wr_clk;
1709 }
1710
1711 debug("CAS latency = %d\n", *selected_cas);
1712 debug("Write recovery = %d\n", *write_recovery);
1713
1714 mtsdram(SDRAM_MMODE, mmode);
1715 }
1716
1717 /*-----------------------------------------------------------------------------+
1718 * program_rtr.
1719 *-----------------------------------------------------------------------------*/
1720 static void program_rtr(unsigned long *dimm_populated,
1721 unsigned char *iic0_dimm_addr,
1722 unsigned long num_dimm_banks)
1723 {
1724 PPC4xx_SYS_INFO board_cfg;
1725 unsigned long max_refresh_rate;
1726 unsigned long dimm_num;
1727 unsigned long refresh_rate_type;
1728 unsigned long refresh_rate;
1729 unsigned long rint;
1730 unsigned long sdram_freq;
1731 unsigned long sdr_ddrpll;
1732 unsigned long val;
1733
1734 /*------------------------------------------------------------------
1735 * Get the board configuration info.
1736 *-----------------------------------------------------------------*/
1737 get_sys_info(&board_cfg);
1738
1739 /*------------------------------------------------------------------
1740 * Set the SDRAM Refresh Timing Register, SDRAM_RTR
1741 *-----------------------------------------------------------------*/
1742 mfsdr(SDR0_DDR0, sdr_ddrpll);
1743 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1744
1745 max_refresh_rate = 0;
1746 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1747 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1748
1749 refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
1750 refresh_rate_type &= 0x7F;
1751 switch (refresh_rate_type) {
1752 case 0:
1753 refresh_rate = 15625;
1754 break;
1755 case 1:
1756 refresh_rate = 3906;
1757 break;
1758 case 2:
1759 refresh_rate = 7812;
1760 break;
1761 case 3:
1762 refresh_rate = 31250;
1763 break;
1764 case 4:
1765 refresh_rate = 62500;
1766 break;
1767 case 5:
1768 refresh_rate = 125000;
1769 break;
1770 default:
1771 refresh_rate = 0;
1772 printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
1773 (unsigned int)dimm_num);
1774 printf("Replace the DIMM module with a supported DIMM.\n\n");
1775 spd_ddr_init_hang ();
1776 break;
1777 }
1778
1779 max_refresh_rate = max(max_refresh_rate, refresh_rate);
1780 }
1781 }
1782
1783 rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
1784 mfsdram(SDRAM_RTR, val);
1785 mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
1786 (SDRAM_RTR_RINT_ENCODE(rint)));
1787 }
1788
1789 /*------------------------------------------------------------------
1790 * This routine programs the SDRAM_TRx registers.
1791 *-----------------------------------------------------------------*/
1792 static void program_tr(unsigned long *dimm_populated,
1793 unsigned char *iic0_dimm_addr,
1794 unsigned long num_dimm_banks)
1795 {
1796 unsigned long dimm_num;
1797 unsigned long sdram_ddr1;
1798 unsigned long t_rp_ns;
1799 unsigned long t_rcd_ns;
1800 unsigned long t_rrd_ns;
1801 unsigned long t_ras_ns;
1802 unsigned long t_rc_ns;
1803 unsigned long t_rfc_ns;
1804 unsigned long t_wpc_ns;
1805 unsigned long t_wtr_ns;
1806 unsigned long t_rpc_ns;
1807 unsigned long t_rp_clk;
1808 unsigned long t_rcd_clk;
1809 unsigned long t_rrd_clk;
1810 unsigned long t_ras_clk;
1811 unsigned long t_rc_clk;
1812 unsigned long t_rfc_clk;
1813 unsigned long t_wpc_clk;
1814 unsigned long t_wtr_clk;
1815 unsigned long t_rpc_clk;
1816 unsigned long sdtr1, sdtr2, sdtr3;
1817 unsigned long ddr_check;
1818 unsigned long sdram_freq;
1819 unsigned long sdr_ddrpll;
1820
1821 PPC4xx_SYS_INFO board_cfg;
1822
1823 /*------------------------------------------------------------------
1824 * Get the board configuration info.
1825 *-----------------------------------------------------------------*/
1826 get_sys_info(&board_cfg);
1827
1828 mfsdr(SDR0_DDR0, sdr_ddrpll);
1829 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1830
1831 /*------------------------------------------------------------------
1832 * Handle the timing. We need to find the worst case timing of all
1833 * the dimm modules installed.
1834 *-----------------------------------------------------------------*/
1835 t_rp_ns = 0;
1836 t_rrd_ns = 0;
1837 t_rcd_ns = 0;
1838 t_ras_ns = 0;
1839 t_rc_ns = 0;
1840 t_rfc_ns = 0;
1841 t_wpc_ns = 0;
1842 t_wtr_ns = 0;
1843 t_rpc_ns = 0;
1844 sdram_ddr1 = true;
1845
1846 /* loop through all the DIMM slots on the board */
1847 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1848 /* If a dimm is installed in a particular slot ... */
1849 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1850 if (dimm_populated[dimm_num] == SDRAM_DDR2)
1851 sdram_ddr1 = true;
1852 else
1853 sdram_ddr1 = false;
1854
1855 t_rcd_ns = max(t_rcd_ns,
1856 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
1857 t_rrd_ns = max(t_rrd_ns,
1858 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
1859 t_rp_ns = max(t_rp_ns,
1860 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
1861 t_ras_ns = max(t_ras_ns,
1862 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 30));
1863 t_rc_ns = max(t_rc_ns,
1864 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 41));
1865 t_rfc_ns = max(t_rfc_ns,
1866 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 42));
1867 }
1868 }
1869
1870 /*------------------------------------------------------------------
1871 * Set the SDRAM Timing Reg 1, SDRAM_TR1
1872 *-----------------------------------------------------------------*/
1873 mfsdram(SDRAM_SDTR1, sdtr1);
1874 sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
1875 SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
1876
1877 /* default values */
1878 sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
1879 sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
1880
1881 /* normal operations */
1882 sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
1883 sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
1884
1885 mtsdram(SDRAM_SDTR1, sdtr1);
1886
1887 /*------------------------------------------------------------------
1888 * Set the SDRAM Timing Reg 2, SDRAM_TR2
1889 *-----------------------------------------------------------------*/
1890 mfsdram(SDRAM_SDTR2, sdtr2);
1891 sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK | SDRAM_SDTR2_WTR_MASK |
1892 SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
1893 SDRAM_SDTR2_RPC_MASK | SDRAM_SDTR2_RP_MASK |
1894 SDRAM_SDTR2_RRD_MASK);
1895
1896 /*
1897 * convert t_rcd from nanoseconds to ddr clocks
1898 * round up if necessary
1899 */
1900 t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
1901 ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
1902 if (sdram_freq != ddr_check)
1903 t_rcd_clk++;
1904
1905 switch (t_rcd_clk) {
1906 case 0:
1907 case 1:
1908 sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
1909 break;
1910 case 2:
1911 sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
1912 break;
1913 case 3:
1914 sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
1915 break;
1916 case 4:
1917 sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
1918 break;
1919 default:
1920 sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
1921 break;
1922 }
1923
1924 if (sdram_ddr1 == true) { /* DDR1 */
1925 if (sdram_freq < 200000000) {
1926 sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1927 sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1928 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1929 } else {
1930 sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1931 sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1932 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1933 }
1934 } else { /* DDR2 */
1935 /* loop through all the DIMM slots on the board */
1936 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1937 /* If a dimm is installed in a particular slot ... */
1938 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1939 t_wpc_ns = max(t_wtr_ns,
1940 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1941 t_wtr_ns = max(t_wtr_ns,
1942 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
1943 t_rpc_ns = max(t_rpc_ns,
1944 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
1945 }
1946 }
1947
1948 /*
1949 * convert from nanoseconds to ddr clocks
1950 * round up if necessary
1951 */
1952 t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
1953 ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
1954 if (sdram_freq != ddr_check)
1955 t_wpc_clk++;
1956
1957 switch (t_wpc_clk) {
1958 case 0:
1959 case 1:
1960 case 2:
1961 sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1962 break;
1963 case 3:
1964 sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1965 break;
1966 case 4:
1967 sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
1968 break;
1969 case 5:
1970 sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
1971 break;
1972 default:
1973 sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
1974 break;
1975 }
1976
1977 /*
1978 * convert from nanoseconds to ddr clocks
1979 * round up if necessary
1980 */
1981 t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
1982 ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
1983 if (sdram_freq != ddr_check)
1984 t_wtr_clk++;
1985
1986 switch (t_wtr_clk) {
1987 case 0:
1988 case 1:
1989 sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1990 break;
1991 case 2:
1992 sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1993 break;
1994 case 3:
1995 sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
1996 break;
1997 default:
1998 sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
1999 break;
2000 }
2001
2002 /*
2003 * convert from nanoseconds to ddr clocks
2004 * round up if necessary
2005 */
2006 t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
2007 ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
2008 if (sdram_freq != ddr_check)
2009 t_rpc_clk++;
2010
2011 switch (t_rpc_clk) {
2012 case 0:
2013 case 1:
2014 case 2:
2015 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
2016 break;
2017 case 3:
2018 sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
2019 break;
2020 default:
2021 sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
2022 break;
2023 }
2024 }
2025
2026 /* default value */
2027 sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
2028
2029 /*
2030 * convert t_rrd from nanoseconds to ddr clocks
2031 * round up if necessary
2032 */
2033 t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
2034 ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
2035 if (sdram_freq != ddr_check)
2036 t_rrd_clk++;
2037
2038 if (t_rrd_clk == 3)
2039 sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
2040 else
2041 sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
2042
2043 /*
2044 * convert t_rp from nanoseconds to ddr clocks
2045 * round up if necessary
2046 */
2047 t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
2048 ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
2049 if (sdram_freq != ddr_check)
2050 t_rp_clk++;
2051
2052 switch (t_rp_clk) {
2053 case 0:
2054 case 1:
2055 case 2:
2056 case 3:
2057 sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
2058 break;
2059 case 4:
2060 sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
2061 break;
2062 case 5:
2063 sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
2064 break;
2065 case 6:
2066 sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
2067 break;
2068 default:
2069 sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
2070 break;
2071 }
2072
2073 mtsdram(SDRAM_SDTR2, sdtr2);
2074
2075 /*------------------------------------------------------------------
2076 * Set the SDRAM Timing Reg 3, SDRAM_TR3
2077 *-----------------------------------------------------------------*/
2078 mfsdram(SDRAM_SDTR3, sdtr3);
2079 sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK | SDRAM_SDTR3_RC_MASK |
2080 SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
2081
2082 /*
2083 * convert t_ras from nanoseconds to ddr clocks
2084 * round up if necessary
2085 */
2086 t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
2087 ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
2088 if (sdram_freq != ddr_check)
2089 t_ras_clk++;
2090
2091 sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
2092
2093 /*
2094 * convert t_rc from nanoseconds to ddr clocks
2095 * round up if necessary
2096 */
2097 t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
2098 ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
2099 if (sdram_freq != ddr_check)
2100 t_rc_clk++;
2101
2102 sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
2103
2104 /* default xcs value */
2105 sdtr3 |= SDRAM_SDTR3_XCS;
2106
2107 /*
2108 * convert t_rfc from nanoseconds to ddr clocks
2109 * round up if necessary
2110 */
2111 t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
2112 ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
2113 if (sdram_freq != ddr_check)
2114 t_rfc_clk++;
2115
2116 sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
2117
2118 mtsdram(SDRAM_SDTR3, sdtr3);
2119 }
2120
2121 /*-----------------------------------------------------------------------------+
2122 * program_bxcf.
2123 *-----------------------------------------------------------------------------*/
2124 static void program_bxcf(unsigned long *dimm_populated,
2125 unsigned char *iic0_dimm_addr,
2126 unsigned long num_dimm_banks)
2127 {
2128 unsigned long dimm_num;
2129 unsigned long num_col_addr;
2130 unsigned long num_ranks;
2131 unsigned long num_banks;
2132 unsigned long mode;
2133 unsigned long ind_rank;
2134 unsigned long ind;
2135 unsigned long ind_bank;
2136 unsigned long bank_0_populated;
2137
2138 /*------------------------------------------------------------------
2139 * Set the BxCF regs. First, wipe out the bank config registers.
2140 *-----------------------------------------------------------------*/
2141 mtsdram(SDRAM_MB0CF, 0x00000000);
2142 mtsdram(SDRAM_MB1CF, 0x00000000);
2143 mtsdram(SDRAM_MB2CF, 0x00000000);
2144 mtsdram(SDRAM_MB3CF, 0x00000000);
2145
2146 mode = SDRAM_BXCF_M_BE_ENABLE;
2147
2148 bank_0_populated = 0;
2149
2150 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2151 if (dimm_populated[dimm_num] != SDRAM_NONE) {
2152 num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
2153 num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2154 if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2155 num_ranks = (num_ranks & 0x0F) +1;
2156 else
2157 num_ranks = num_ranks & 0x0F;
2158
2159 num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
2160
2161 for (ind_bank = 0; ind_bank < 2; ind_bank++) {
2162 if (num_banks == 4)
2163 ind = 0;
2164 else
2165 ind = 5 << 8;
2166 switch (num_col_addr) {
2167 case 0x08:
2168 mode |= (SDRAM_BXCF_M_AM_0 + ind);
2169 break;
2170 case 0x09:
2171 mode |= (SDRAM_BXCF_M_AM_1 + ind);
2172 break;
2173 case 0x0A:
2174 mode |= (SDRAM_BXCF_M_AM_2 + ind);
2175 break;
2176 case 0x0B:
2177 mode |= (SDRAM_BXCF_M_AM_3 + ind);
2178 break;
2179 case 0x0C:
2180 mode |= (SDRAM_BXCF_M_AM_4 + ind);
2181 break;
2182 default:
2183 printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
2184 (unsigned int)dimm_num);
2185 printf("ERROR: Unsupported value for number of "
2186 "column addresses: %d.\n", (unsigned int)num_col_addr);
2187 printf("Replace the DIMM module with a supported DIMM.\n\n");
2188 spd_ddr_init_hang ();
2189 }
2190 }
2191
2192 if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
2193 bank_0_populated = 1;
2194
2195 for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
2196 mtsdram(SDRAM_MB0CF +
2197 ((dimm_num + bank_0_populated + ind_rank) << 2),
2198 mode);
2199 }
2200 }
2201 }
2202 }
2203
2204 /*------------------------------------------------------------------
2205 * program memory queue.
2206 *-----------------------------------------------------------------*/
2207 static void program_memory_queue(unsigned long *dimm_populated,
2208 unsigned char *iic0_dimm_addr,
2209 unsigned long num_dimm_banks)
2210 {
2211 unsigned long dimm_num;
2212 phys_size_t rank_base_addr;
2213 unsigned long rank_reg;
2214 phys_size_t rank_size_bytes;
2215 unsigned long rank_size_id;
2216 unsigned long num_ranks;
2217 unsigned long baseadd_size;
2218 unsigned long i;
2219 unsigned long bank_0_populated = 0;
2220 phys_size_t total_size = 0;
2221
2222 /*------------------------------------------------------------------
2223 * Reset the rank_base_address.
2224 *-----------------------------------------------------------------*/
2225 rank_reg = SDRAM_R0BAS;
2226
2227 rank_base_addr = 0x00000000;
2228
2229 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2230 if (dimm_populated[dimm_num] != SDRAM_NONE) {
2231 num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2232 if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2233 num_ranks = (num_ranks & 0x0F) + 1;
2234 else
2235 num_ranks = num_ranks & 0x0F;
2236
2237 rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
2238
2239 /*------------------------------------------------------------------
2240 * Set the sizes
2241 *-----------------------------------------------------------------*/
2242 baseadd_size = 0;
2243 switch (rank_size_id) {
2244 case 0x01:
2245 baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
2246 total_size = 1024;
2247 break;
2248 case 0x02:
2249 baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
2250 total_size = 2048;
2251 break;
2252 case 0x04:
2253 baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
2254 total_size = 4096;
2255 break;
2256 case 0x08:
2257 baseadd_size |= SDRAM_RXBAS_SDSZ_32;
2258 total_size = 32;
2259 break;
2260 case 0x10:
2261 baseadd_size |= SDRAM_RXBAS_SDSZ_64;
2262 total_size = 64;
2263 break;
2264 case 0x20:
2265 baseadd_size |= SDRAM_RXBAS_SDSZ_128;
2266 total_size = 128;
2267 break;
2268 case 0x40:
2269 baseadd_size |= SDRAM_RXBAS_SDSZ_256;
2270 total_size = 256;
2271 break;
2272 case 0x80:
2273 baseadd_size |= SDRAM_RXBAS_SDSZ_512;
2274 total_size = 512;
2275 break;
2276 default:
2277 printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
2278 (unsigned int)dimm_num);
2279 printf("ERROR: Unsupported value for the banksize: %d.\n",
2280 (unsigned int)rank_size_id);
2281 printf("Replace the DIMM module with a supported DIMM.\n\n");
2282 spd_ddr_init_hang ();
2283 }
2284 rank_size_bytes = total_size << 20;
2285
2286 if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
2287 bank_0_populated = 1;
2288
2289 for (i = 0; i < num_ranks; i++) {
2290 mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
2291 (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
2292 baseadd_size));
2293 rank_base_addr += rank_size_bytes;
2294 }
2295 }
2296 }
2297
2298 #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
2299 defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
2300 defined(CONFIG_460SX)
2301 /*
2302 * Enable high bandwidth access
2303 * This is currently not used, but with this setup
2304 * it is possible to use it later on in e.g. the Linux
2305 * EMAC driver for performance gain.
2306 */
2307 mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
2308 mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
2309
2310 /*
2311 * Set optimal value for Memory Queue HB/LL Configuration registers
2312 */
2313 mtdcr(SDRAM_CONF1HB, (mfdcr(SDRAM_CONF1HB) & ~SDRAM_CONF1HB_MASK) |
2314 SDRAM_CONF1HB_AAFR | SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE |
2315 SDRAM_CONF1HB_RPLM | SDRAM_CONF1HB_WRCL);
2316 mtdcr(SDRAM_CONF1LL, (mfdcr(SDRAM_CONF1LL) & ~SDRAM_CONF1LL_MASK) |
2317 SDRAM_CONF1LL_AAFR | SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE |
2318 SDRAM_CONF1LL_RPLM);
2319 mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
2320 #endif
2321 }
2322
2323 #ifdef CONFIG_DDR_ECC
2324 /*-----------------------------------------------------------------------------+
2325 * program_ecc.
2326 *-----------------------------------------------------------------------------*/
2327 static void program_ecc(unsigned long *dimm_populated,
2328 unsigned char *iic0_dimm_addr,
2329 unsigned long num_dimm_banks,
2330 unsigned long tlb_word2_i_value)
2331 {
2332 unsigned long dimm_num;
2333 unsigned long ecc;
2334
2335 ecc = 0;
2336 /* loop through all the DIMM slots on the board */
2337 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2338 /* If a dimm is installed in a particular slot ... */
2339 if (dimm_populated[dimm_num] != SDRAM_NONE)
2340 ecc = max(ecc,
2341 (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11));
2342 }
2343 if (ecc == 0)
2344 return;
2345
2346 do_program_ecc(tlb_word2_i_value);
2347 }
2348 #endif
2349
2350 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
2351 /*-----------------------------------------------------------------------------+
2352 * program_DQS_calibration.
2353 *-----------------------------------------------------------------------------*/
2354 static void program_DQS_calibration(unsigned long *dimm_populated,
2355 unsigned char *iic0_dimm_addr,
2356 unsigned long num_dimm_banks)
2357 {
2358 unsigned long val;
2359
2360 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
2361 mtsdram(SDRAM_RQDC, 0x80000037);
2362 mtsdram(SDRAM_RDCC, 0x40000000);
2363 mtsdram(SDRAM_RFDC, 0x000001DF);
2364
2365 test();
2366 #else
2367 /*------------------------------------------------------------------
2368 * Program RDCC register
2369 * Read sample cycle auto-update enable
2370 *-----------------------------------------------------------------*/
2371
2372 mfsdram(SDRAM_RDCC, val);
2373 mtsdram(SDRAM_RDCC,
2374 (val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
2375 | SDRAM_RDCC_RSAE_ENABLE);
2376
2377 /*------------------------------------------------------------------
2378 * Program RQDC register
2379 * Internal DQS delay mechanism enable
2380 *-----------------------------------------------------------------*/
2381 mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
2382
2383 /*------------------------------------------------------------------
2384 * Program RFDC register
2385 * Set Feedback Fractional Oversample
2386 * Auto-detect read sample cycle enable
2387 * Set RFOS to 1/4 of memclk cycle (0x3f)
2388 *-----------------------------------------------------------------*/
2389 mfsdram(SDRAM_RFDC, val);
2390 mtsdram(SDRAM_RFDC,
2391 (val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
2392 SDRAM_RFDC_RFFD_MASK))
2393 | (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x3f) |
2394 SDRAM_RFDC_RFFD_ENCODE(0)));
2395
2396 DQS_calibration_process();
2397 #endif
2398 }
2399
2400 static int short_mem_test(void)
2401 {
2402 u32 *membase;
2403 u32 bxcr_num;
2404 u32 bxcf;
2405 int i;
2406 int j;
2407 phys_size_t base_addr;
2408 u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
2409 {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2410 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2411 {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2412 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2413 {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2414 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2415 {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2416 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2417 {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2418 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2419 {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2420 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2421 {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2422 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2423 {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2424 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2425 int l;
2426
2427 for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
2428 mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
2429
2430 /* Banks enabled */
2431 if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2432 /* Bank is enabled */
2433
2434 /*
2435 * Only run test on accessable memory (below 2GB)
2436 */
2437 base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
2438 if (base_addr >= CONFIG_MAX_MEM_MAPPED)
2439 continue;
2440
2441 /*------------------------------------------------------------------
2442 * Run the short memory test.
2443 *-----------------------------------------------------------------*/
2444 membase = (u32 *)(u32)base_addr;
2445
2446 for (i = 0; i < NUMMEMTESTS; i++) {
2447 for (j = 0; j < NUMMEMWORDS; j++) {
2448 membase[j] = test[i][j];
2449 ppcDcbf((u32)&(membase[j]));
2450 }
2451 sync();
2452 for (l=0; l<NUMLOOPS; l++) {
2453 for (j = 0; j < NUMMEMWORDS; j++) {
2454 if (membase[j] != test[i][j]) {
2455 ppcDcbf((u32)&(membase[j]));
2456 return 0;
2457 }
2458 ppcDcbf((u32)&(membase[j]));
2459 }
2460 sync();
2461 }
2462 }
2463 } /* if bank enabled */
2464 } /* for bxcf_num */
2465
2466 return 1;
2467 }
2468
2469 #ifndef HARD_CODED_DQS
2470 /*-----------------------------------------------------------------------------+
2471 * DQS_calibration_process.
2472 *-----------------------------------------------------------------------------*/
2473 static void DQS_calibration_process(void)
2474 {
2475 unsigned long rfdc_reg;
2476 unsigned long rffd;
2477 unsigned long val;
2478 long rffd_average;
2479 long max_start;
2480 unsigned long dlycal;
2481 unsigned long dly_val;
2482 unsigned long max_pass_length;
2483 unsigned long current_pass_length;
2484 unsigned long current_fail_length;
2485 unsigned long current_start;
2486 long max_end;
2487 unsigned char fail_found;
2488 unsigned char pass_found;
2489 #if !defined(CONFIG_DDR_RQDC_FIXED)
2490 int window_found;
2491 u32 rqdc_reg;
2492 u32 rqfd;
2493 u32 rqfd_start;
2494 u32 rqfd_average;
2495 int loopi = 0;
2496 char str[] = "Auto calibration -";
2497 char slash[] = "\\|/-\\|/-";
2498
2499 /*------------------------------------------------------------------
2500 * Test to determine the best read clock delay tuning bits.
2501 *
2502 * Before the DDR controller can be used, the read clock delay needs to be
2503 * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2504 * This value cannot be hardcoded into the program because it changes
2505 * depending on the board's setup and environment.
2506 * To do this, all delay values are tested to see if they
2507 * work or not. By doing this, you get groups of fails with groups of
2508 * passing values. The idea is to find the start and end of a passing
2509 * window and take the center of it to use as the read clock delay.
2510 *
2511 * A failure has to be seen first so that when we hit a pass, we know
2512 * that it is truely the start of the window. If we get passing values
2513 * to start off with, we don't know if we are at the start of the window.
2514 *
2515 * The code assumes that a failure will always be found.
2516 * If a failure is not found, there is no easy way to get the middle
2517 * of the passing window. I guess we can pretty much pick any value
2518 * but some values will be better than others. Since the lowest speed
2519 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2520 * from experimentation it is safe to say you will always have a failure.
2521 *-----------------------------------------------------------------*/
2522
2523 /* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
2524 rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
2525
2526 puts(str);
2527
2528 calibration_loop:
2529 mfsdram(SDRAM_RQDC, rqdc_reg);
2530 mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2531 SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
2532 #else /* CONFIG_DDR_RQDC_FIXED */
2533 /*
2534 * On Katmai the complete auto-calibration somehow doesn't seem to
2535 * produce the best results, meaning optimal values for RQFD/RFFD.
2536 * This was discovered by GDA using a high bandwidth scope,
2537 * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
2538 * so now on Katmai "only" RFFD is auto-calibrated.
2539 */
2540 mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
2541 #endif /* CONFIG_DDR_RQDC_FIXED */
2542
2543 max_start = 0;
2544
2545 max_pass_length = 0;
2546 max_start = 0;
2547 max_end = 0;
2548 current_pass_length = 0;
2549 current_fail_length = 0;
2550 current_start = 0;
2551 fail_found = false;
2552 pass_found = false;
2553
2554 /*
2555 * get the delay line calibration register value
2556 */
2557 mfsdram(SDRAM_DLCR, dlycal);
2558 dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
2559
2560 for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
2561 mfsdram(SDRAM_RFDC, rfdc_reg);
2562 rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
2563
2564 /*------------------------------------------------------------------
2565 * Set the timing reg for the test.
2566 *-----------------------------------------------------------------*/
2567 mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
2568
2569 /*------------------------------------------------------------------
2570 * See if the rffd value passed.
2571 *-----------------------------------------------------------------*/
2572 if (short_mem_test()) {
2573 if (fail_found == true) {
2574 pass_found = true;
2575 if (current_pass_length == 0)
2576 current_start = rffd;
2577
2578 current_fail_length = 0;
2579 current_pass_length++;
2580
2581 if (current_pass_length > max_pass_length) {
2582 max_pass_length = current_pass_length;
2583 max_start = current_start;
2584 max_end = rffd;
2585 }
2586 }
2587 } else {
2588 current_pass_length = 0;
2589 current_fail_length++;
2590
2591 if (current_fail_length >= (dly_val >> 2)) {
2592 if (fail_found == false)
2593 fail_found = true;
2594 else if (pass_found == true)
2595 break;
2596 }
2597 }
2598 } /* for rffd */
2599
2600 /*------------------------------------------------------------------
2601 * Set the average RFFD value
2602 *-----------------------------------------------------------------*/
2603 rffd_average = ((max_start + max_end) >> 1);
2604
2605 if (rffd_average < 0)
2606 rffd_average = 0;
2607
2608 if (rffd_average > SDRAM_RFDC_RFFD_MAX)
2609 rffd_average = SDRAM_RFDC_RFFD_MAX;
2610 /* now fix RFDC[RFFD] found and find RQDC[RQFD] */
2611 mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
2612
2613 #if !defined(CONFIG_DDR_RQDC_FIXED)
2614 max_pass_length = 0;
2615 max_start = 0;
2616 max_end = 0;
2617 current_pass_length = 0;
2618 current_fail_length = 0;
2619 current_start = 0;
2620 window_found = false;
2621 fail_found = false;
2622 pass_found = false;
2623
2624 for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
2625 mfsdram(SDRAM_RQDC, rqdc_reg);
2626 rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
2627
2628 /*------------------------------------------------------------------
2629 * Set the timing reg for the test.
2630 *-----------------------------------------------------------------*/
2631 mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
2632
2633 /*------------------------------------------------------------------
2634 * See if the rffd value passed.
2635 *-----------------------------------------------------------------*/
2636 if (short_mem_test()) {
2637 if (fail_found == true) {
2638 pass_found = true;
2639 if (current_pass_length == 0)
2640 current_start = rqfd;
2641
2642 current_fail_length = 0;
2643 current_pass_length++;
2644
2645 if (current_pass_length > max_pass_length) {
2646 max_pass_length = current_pass_length;
2647 max_start = current_start;
2648 max_end = rqfd;
2649 }
2650 }
2651 } else {
2652 current_pass_length = 0;
2653 current_fail_length++;
2654
2655 if (fail_found == false) {
2656 fail_found = true;
2657 } else if (pass_found == true) {
2658 window_found = true;
2659 break;
2660 }
2661 }
2662 }
2663
2664 rqfd_average = ((max_start + max_end) >> 1);
2665
2666 /*------------------------------------------------------------------
2667 * Make sure we found the valid read passing window. Halt if not
2668 *-----------------------------------------------------------------*/
2669 if (window_found == false) {
2670 if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
2671 putc('\b');
2672 putc(slash[loopi++ % 8]);
2673
2674 /* try again from with a different RQFD start value */
2675 rqfd_start++;
2676 goto calibration_loop;
2677 }
2678
2679 printf("\nERROR: Cannot determine a common read delay for the "
2680 "DIMM(s) installed.\n");
2681 debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
2682 ppc4xx_ibm_ddr2_register_dump();
2683 spd_ddr_init_hang ();
2684 }
2685
2686 if (rqfd_average < 0)
2687 rqfd_average = 0;
2688
2689 if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
2690 rqfd_average = SDRAM_RQDC_RQFD_MAX;
2691
2692 mtsdram(SDRAM_RQDC,
2693 (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2694 SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
2695
2696 blank_string(strlen(str));
2697 #endif /* CONFIG_DDR_RQDC_FIXED */
2698
2699 mfsdram(SDRAM_DLCR, val);
2700 debug("%s[%d] DLCR: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2701 mfsdram(SDRAM_RQDC, val);
2702 debug("%s[%d] RQDC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2703 mfsdram(SDRAM_RFDC, val);
2704 debug("%s[%d] RFDC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2705 mfsdram(SDRAM_RDCC, val);
2706 debug("%s[%d] RDCC: 0x%08lX\n", __FUNCTION__, __LINE__, val);
2707 }
2708 #else /* calibration test with hardvalues */
2709 /*-----------------------------------------------------------------------------+
2710 * DQS_calibration_process.
2711 *-----------------------------------------------------------------------------*/
2712 static void test(void)
2713 {
2714 unsigned long dimm_num;
2715 unsigned long ecc_temp;
2716 unsigned long i, j;
2717 unsigned long *membase;
2718 unsigned long bxcf[MAXRANKS];
2719 unsigned long val;
2720 char window_found;
2721 char begin_found[MAXDIMMS];
2722 char end_found[MAXDIMMS];
2723 char search_end[MAXDIMMS];
2724 unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
2725 {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2726 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2727 {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2728 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2729 {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2730 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2731 {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2732 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2733 {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2734 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2735 {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2736 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2737 {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2738 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2739 {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2740 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2741
2742 /*------------------------------------------------------------------
2743 * Test to determine the best read clock delay tuning bits.
2744 *
2745 * Before the DDR controller can be used, the read clock delay needs to be
2746 * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2747 * This value cannot be hardcoded into the program because it changes
2748 * depending on the board's setup and environment.
2749 * To do this, all delay values are tested to see if they
2750 * work or not. By doing this, you get groups of fails with groups of
2751 * passing values. The idea is to find the start and end of a passing
2752 * window and take the center of it to use as the read clock delay.
2753 *
2754 * A failure has to be seen first so that when we hit a pass, we know
2755 * that it is truely the start of the window. If we get passing values
2756 * to start off with, we don't know if we are at the start of the window.
2757 *
2758 * The code assumes that a failure will always be found.
2759 * If a failure is not found, there is no easy way to get the middle
2760 * of the passing window. I guess we can pretty much pick any value
2761 * but some values will be better than others. Since the lowest speed
2762 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2763 * from experimentation it is safe to say you will always have a failure.
2764 *-----------------------------------------------------------------*/
2765 mfsdram(SDRAM_MCOPT1, ecc_temp);
2766 ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
2767 mfsdram(SDRAM_MCOPT1, val);
2768 mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
2769 SDRAM_MCOPT1_MCHK_NON);
2770
2771 window_found = false;
2772 begin_found[0] = false;
2773 end_found[0] = false;
2774 search_end[0] = false;
2775 begin_found[1] = false;
2776 end_found[1] = false;
2777 search_end[1] = false;
2778
2779 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2780 mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
2781
2782 /* Banks enabled */
2783 if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2784
2785 /* Bank is enabled */
2786 membase =
2787 (unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
2788
2789 /*------------------------------------------------------------------
2790 * Run the short memory test.
2791 *-----------------------------------------------------------------*/
2792 for (i = 0; i < NUMMEMTESTS; i++) {
2793 for (j = 0; j < NUMMEMWORDS; j++) {
2794 membase[j] = test[i][j];
2795 ppcDcbf((u32)&(membase[j]));
2796 }
2797 sync();
2798 for (j = 0; j < NUMMEMWORDS; j++) {
2799 if (membase[j] != test[i][j]) {
2800 ppcDcbf((u32)&(membase[j]));
2801 break;
2802 }
2803 ppcDcbf((u32)&(membase[j]));
2804 }
2805 sync();
2806 if (j < NUMMEMWORDS)
2807 break;
2808 }
2809
2810 /*------------------------------------------------------------------
2811 * See if the rffd value passed.
2812 *-----------------------------------------------------------------*/
2813 if (i < NUMMEMTESTS) {
2814 if ((end_found[dimm_num] == false) &&
2815 (search_end[dimm_num] == true)) {
2816 end_found[dimm_num] = true;
2817 }
2818 if ((end_found[0] == true) &&
2819 (end_found[1] == true))
2820 break;
2821 } else {
2822 if (begin_found[dimm_num] == false) {
2823 begin_found[dimm_num] = true;
2824 search_end[dimm_num] = true;
2825 }
2826 }
2827 } else {
2828 begin_found[dimm_num] = true;
2829 end_found[dimm_num] = true;
2830 }
2831 }
2832
2833 if ((begin_found[0] == true) && (begin_found[1] == true))
2834 window_found = true;
2835
2836 /*------------------------------------------------------------------
2837 * Make sure we found the valid read passing window. Halt if not
2838 *-----------------------------------------------------------------*/
2839 if (window_found == false) {
2840 printf("ERROR: Cannot determine a common read delay for the "
2841 "DIMM(s) installed.\n");
2842 spd_ddr_init_hang ();
2843 }
2844
2845 /*------------------------------------------------------------------
2846 * Restore the ECC variable to what it originally was
2847 *-----------------------------------------------------------------*/
2848 mtsdram(SDRAM_MCOPT1,
2849 (ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
2850 | ecc_temp);
2851 }
2852 #endif /* !HARD_CODED_DQS */
2853 #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
2854
2855 #else /* CONFIG_SPD_EEPROM */
2856
2857 /*-----------------------------------------------------------------------------
2858 * Function: dram_init
2859 * Description: Configures the PPC4xx IBM DDR1/DDR2 SDRAM memory controller.
2860 * The configuration is performed using static, compile-
2861 * time parameters.
2862 * Configures the PPC405EX(r) and PPC460EX/GT
2863 *---------------------------------------------------------------------------*/
2864 int dram_init(void)
2865 {
2866 unsigned long val;
2867
2868 #if defined(CONFIG_440)
2869 mtdcr(SDRAM_R0BAS, CONFIG_SYS_SDRAM_R0BAS);
2870 mtdcr(SDRAM_R1BAS, CONFIG_SYS_SDRAM_R1BAS);
2871 mtdcr(SDRAM_R2BAS, CONFIG_SYS_SDRAM_R2BAS);
2872 mtdcr(SDRAM_R3BAS, CONFIG_SYS_SDRAM_R3BAS);
2873 mtdcr(SDRAM_PLBADDULL, CONFIG_SYS_SDRAM_PLBADDULL); /* MQ0_BAUL */
2874 mtdcr(SDRAM_PLBADDUHB, CONFIG_SYS_SDRAM_PLBADDUHB); /* MQ0_BAUH */
2875 mtdcr(SDRAM_CONF1LL, CONFIG_SYS_SDRAM_CONF1LL);
2876 mtdcr(SDRAM_CONF1HB, CONFIG_SYS_SDRAM_CONF1HB);
2877 mtdcr(SDRAM_CONFPATHB, CONFIG_SYS_SDRAM_CONFPATHB);
2878 #endif
2879
2880 /* Set Memory Bank Configuration Registers */
2881
2882 mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
2883 mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
2884 mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
2885 mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
2886
2887 /* Set Memory Clock Timing Register */
2888
2889 mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
2890
2891 /* Set Refresh Time Register */
2892
2893 mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
2894
2895 /* Set SDRAM Timing Registers */
2896
2897 mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
2898 mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
2899 mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
2900
2901 /* Set Mode and Extended Mode Registers */
2902
2903 mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
2904 mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
2905
2906 /* Set Memory Controller Options 1 Register */
2907
2908 mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
2909
2910 /* Set Manual Initialization Control Registers */
2911
2912 mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
2913 mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
2914 mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
2915 mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
2916 mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
2917 mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
2918 mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
2919 mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
2920 mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
2921 mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
2922 mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
2923 mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
2924 mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
2925 mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
2926 mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
2927 mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
2928
2929 /* Set On-Die Termination Registers */
2930
2931 mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
2932 mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
2933 mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
2934
2935 /* Set Write Timing Register */
2936
2937 mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
2938
2939 /*
2940 * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
2941 * SDRAM0_MCOPT2[IPTR] = 1
2942 */
2943
2944 mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
2945 SDRAM_MCOPT2_IPTR_EXECUTE));
2946
2947 /*
2948 * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
2949 * completion of initialization.
2950 */
2951
2952 do {
2953 mfsdram(SDRAM_MCSTAT, val);
2954 } while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
2955
2956 /* Set Delay Control Registers */
2957
2958 mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
2959
2960 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
2961 mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
2962 mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
2963 mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
2964 #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
2965
2966 /*
2967 * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
2968 */
2969
2970 mfsdram(SDRAM_MCOPT2, val);
2971 mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
2972
2973 #if defined(CONFIG_440)
2974 /*
2975 * Program TLB entries with caches enabled, for best performace
2976 * while auto-calibrating and ECC generation
2977 */
2978 program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), 0);
2979 #endif
2980
2981 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
2982 /*------------------------------------------------------------------
2983 | DQS calibration.
2984 +-----------------------------------------------------------------*/
2985 DQS_autocalibration();
2986 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
2987
2988 /*
2989 * Now complete RDSS configuration as mentioned on page 7 of the AMCC
2990 * PowerPC440SP/SPe DDR2 application note:
2991 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
2992 */
2993 update_rdcc();
2994
2995 #if defined(CONFIG_DDR_ECC)
2996 do_program_ecc(0);
2997 #endif /* defined(CONFIG_DDR_ECC) */
2998
2999 #if defined(CONFIG_440)
3000 /*
3001 * Now after initialization (auto-calibration and ECC generation)
3002 * remove the TLB entries with caches enabled and program again with
3003 * desired cache functionality
3004 */
3005 remove_tlb(0, (CONFIG_SYS_MBYTES_SDRAM << 20));
3006 program_tlb(0, 0, (CONFIG_SYS_MBYTES_SDRAM << 20), MY_TLB_WORD2_I_ENABLE);
3007 #endif
3008
3009 ppc4xx_ibm_ddr2_register_dump();
3010
3011 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3012 /*
3013 * Clear potential errors resulting from auto-calibration.
3014 * If not done, then we could get an interrupt later on when
3015 * exceptions are enabled.
3016 */
3017 set_mcsr(get_mcsr());
3018 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3019
3020 gd->ram_size = CONFIG_SYS_MBYTES_SDRAM << 20;
3021
3022 return 0;
3023 }
3024 #endif /* CONFIG_SPD_EEPROM */
3025
3026 #if defined(CONFIG_440)
3027 u32 mfdcr_any(u32 dcr)
3028 {
3029 u32 val;
3030
3031 switch (dcr) {
3032 case SDRAM_R0BAS + 0:
3033 val = mfdcr(SDRAM_R0BAS + 0);
3034 break;
3035 case SDRAM_R0BAS + 1:
3036 val = mfdcr(SDRAM_R0BAS + 1);
3037 break;
3038 case SDRAM_R0BAS + 2:
3039 val = mfdcr(SDRAM_R0BAS + 2);
3040 break;
3041 case SDRAM_R0BAS + 3:
3042 val = mfdcr(SDRAM_R0BAS + 3);
3043 break;
3044 default:
3045 printf("DCR %d not defined in case statement!!!\n", dcr);
3046 val = 0; /* just to satisfy the compiler */
3047 }
3048
3049 return val;
3050 }
3051
3052 void mtdcr_any(u32 dcr, u32 val)
3053 {
3054 switch (dcr) {
3055 case SDRAM_R0BAS + 0:
3056 mtdcr(SDRAM_R0BAS + 0, val);
3057 break;
3058 case SDRAM_R0BAS + 1:
3059 mtdcr(SDRAM_R0BAS + 1, val);
3060 break;
3061 case SDRAM_R0BAS + 2:
3062 mtdcr(SDRAM_R0BAS + 2, val);
3063 break;
3064 case SDRAM_R0BAS + 3:
3065 mtdcr(SDRAM_R0BAS + 3, val);
3066 break;
3067 default:
3068 printf("DCR %d not defined in case statement!!!\n", dcr);
3069 }
3070 }
3071 #endif /* defined(CONFIG_440) */
3072
3073 inline void ppc4xx_ibm_ddr2_register_dump(void)
3074 {
3075 #if defined(DEBUG)
3076 printf("\nPPC4xx IBM DDR2 Register Dump:\n");
3077
3078 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3079 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3080 PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R0BAS);
3081 PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R1BAS);
3082 PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R2BAS);
3083 PPC4xx_IBM_DDR2_DUMP_MQ_REGISTER(R3BAS);
3084 #endif /* (defined(CONFIG_440SP) || ... */
3085 #if defined(CONFIG_405EX)
3086 PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
3087 PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
3088 PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
3089 PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
3090 PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
3091 PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
3092 #endif /* defined(CONFIG_405EX) */
3093 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
3094 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
3095 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
3096 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
3097 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
3098 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
3099 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
3100 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
3101 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
3102 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
3103 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
3104 PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
3105 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3106 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3107 PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
3108 PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
3109 /*
3110 * OPART is only used as a trigger register.
3111 *
3112 * No data is contained in this register, and reading or writing
3113 * to is can cause bad things to happen (hangs). Just skip it and
3114 * report "N/A".
3115 */
3116 printf("%20s = N/A\n", "SDRAM_OPART");
3117 #endif /* defined(CONFIG_440SP) || ... */
3118 PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
3119 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
3120 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
3121 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
3122 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
3123 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
3124 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
3125 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
3126 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
3127 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
3128 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
3129 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
3130 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
3131 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
3132 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
3133 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
3134 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
3135 PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
3136 PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
3137 PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
3138 PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
3139 PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
3140 PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
3141 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
3142 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
3143 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
3144 PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
3145 PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
3146 PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCES);
3147 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3148 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3149 PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
3150 #endif /* defined(CONFIG_440SP) || ... */
3151 PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
3152 PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
3153 PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
3154 #endif /* defined(DEBUG) */
3155 }