]> git.ipfire.org Git - people/ms/u-boot.git/blob - cpu/ppc4xx/44x_spd_ddr2.c
rename CFG_ macros to CONFIG_SYS
[people/ms/u-boot.git] / cpu / ppc4xx / 44x_spd_ddr2.c
1 /*
2 * cpu/ppc4xx/44x_spd_ddr2.c
3 * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
4 * DDR2 controller (non Denali Core). Those currently are:
5 *
6 * 405: 405EX(r)
7 * 440/460: 440SP/440SPe/460EX/460GT
8 *
9 * Copyright (c) 2008 Nuovation System Designs, LLC
10 * Grant Erickson <gerickson@nuovations.com>
11
12 * (C) Copyright 2007-2008
13 * Stefan Roese, DENX Software Engineering, sr@denx.de.
14 *
15 * COPYRIGHT AMCC CORPORATION 2004
16 *
17 * See file CREDITS for list of people who contributed to this
18 * project.
19 *
20 * This program is free software; you can redistribute it and/or
21 * modify it under the terms of the GNU General Public License as
22 * published by the Free Software Foundation; either version 2 of
23 * the License, or (at your option) any later version.
24 *
25 * This program is distributed in the hope that it will be useful,
26 * but WITHOUT ANY WARRANTY; without even the implied warranty of
27 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
28 * GNU General Public License for more details.
29 *
30 * You should have received a copy of the GNU General Public License
31 * along with this program; if not, write to the Free Software
32 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
33 * MA 02111-1307 USA
34 *
35 */
36
37 /* define DEBUG for debugging output (obviously ;-)) */
38 #if 0
39 #define DEBUG
40 #endif
41
42 #include <common.h>
43 #include <command.h>
44 #include <ppc4xx.h>
45 #include <i2c.h>
46 #include <asm/io.h>
47 #include <asm/processor.h>
48 #include <asm/mmu.h>
49 #include <asm/cache.h>
50
51 #include "ecc.h"
52
53 #if defined(CONFIG_SDRAM_PPC4xx_IBM_DDR2)
54
55 #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic) \
56 do { \
57 u32 data; \
58 mfsdram(SDRAM_##mnemonic, data); \
59 printf("%20s[%02x] = 0x%08X\n", \
60 "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
61 } while (0)
62
63 #if defined(CONFIG_SPD_EEPROM)
64
65 /*-----------------------------------------------------------------------------+
66 * Defines
67 *-----------------------------------------------------------------------------*/
68 #ifndef TRUE
69 #define TRUE 1
70 #endif
71 #ifndef FALSE
72 #define FALSE 0
73 #endif
74
75 #define SDRAM_DDR1 1
76 #define SDRAM_DDR2 2
77 #define SDRAM_NONE 0
78
79 #define MAXDIMMS 2
80 #define MAXRANKS 4
81 #define MAXBXCF 4
82 #define MAX_SPD_BYTES 256 /* Max number of bytes on the DIMM's SPD EEPROM */
83
84 #define ONE_BILLION 1000000000
85
86 #define MULDIV64(m1, m2, d) (u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
87
88 #define CMD_NOP (7 << 19)
89 #define CMD_PRECHARGE (2 << 19)
90 #define CMD_REFRESH (1 << 19)
91 #define CMD_EMR (0 << 19)
92 #define CMD_READ (5 << 19)
93 #define CMD_WRITE (4 << 19)
94
95 #define SELECT_MR (0 << 16)
96 #define SELECT_EMR (1 << 16)
97 #define SELECT_EMR2 (2 << 16)
98 #define SELECT_EMR3 (3 << 16)
99
100 /* MR */
101 #define DLL_RESET 0x00000100
102
103 #define WRITE_RECOV_2 (1 << 9)
104 #define WRITE_RECOV_3 (2 << 9)
105 #define WRITE_RECOV_4 (3 << 9)
106 #define WRITE_RECOV_5 (4 << 9)
107 #define WRITE_RECOV_6 (5 << 9)
108
109 #define BURST_LEN_4 0x00000002
110
111 /* EMR */
112 #define ODT_0_OHM 0x00000000
113 #define ODT_50_OHM 0x00000044
114 #define ODT_75_OHM 0x00000004
115 #define ODT_150_OHM 0x00000040
116
117 #define ODS_FULL 0x00000000
118 #define ODS_REDUCED 0x00000002
119 #define OCD_CALIB_DEF 0x00000380
120
121 /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
122 #define ODT_EB0R (0x80000000 >> 8)
123 #define ODT_EB0W (0x80000000 >> 7)
124 #define CALC_ODT_R(n) (ODT_EB0R << (n << 1))
125 #define CALC_ODT_W(n) (ODT_EB0W << (n << 1))
126 #define CALC_ODT_RW(n) (CALC_ODT_R(n) | CALC_ODT_W(n))
127
128 /* Defines for the Read Cycle Delay test */
129 #define NUMMEMTESTS 8
130 #define NUMMEMWORDS 8
131 #define NUMLOOPS 64 /* memory test loops */
132
133 /*
134 * This DDR2 setup code can dynamically setup the TLB entries for the DDR2 memory
135 * region. Right now the cache should still be disabled in U-Boot because of the
136 * EMAC driver, that need it's buffer descriptor to be located in non cached
137 * memory.
138 *
139 * If at some time this restriction doesn't apply anymore, just define
140 * CONFIG_4xx_DCACHE in the board config file and this code should setup
141 * everything correctly.
142 */
143 #ifdef CONFIG_4xx_DCACHE
144 #define MY_TLB_WORD2_I_ENABLE 0 /* enable caching on SDRAM */
145 #else
146 #define MY_TLB_WORD2_I_ENABLE TLB_WORD2_I_ENABLE /* disable caching on SDRAM */
147 #endif
148
149 /*
150 * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
151 * To support such configurations, we "only" map the first 2GB via the TLB's. We
152 * need some free virtual address space for the remaining peripherals like, SoC
153 * devices, FLASH etc.
154 *
155 * Note that ECC is currently not supported on configurations with more than 2GB
156 * SDRAM. This is because we only map the first 2GB on such systems, and therefore
157 * the ECC parity byte of the remaining area can't be written.
158 */
159 #ifndef CONFIG_MAX_MEM_MAPPED
160 #define CONFIG_MAX_MEM_MAPPED ((phys_size_t)2 << 30)
161 #endif
162
163 /*
164 * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
165 */
166 void __spd_ddr_init_hang (void)
167 {
168 hang ();
169 }
170 void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
171
172 /*
173 * To provide an interface for board specific config values in this common
174 * DDR setup code, we implement he "weak" default functions here. They return
175 * the default value back to the caller.
176 *
177 * Please see include/configs/yucca.h for an example fora board specific
178 * implementation.
179 */
180 u32 __ddr_wrdtr(u32 default_val)
181 {
182 return default_val;
183 }
184 u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
185
186 u32 __ddr_clktr(u32 default_val)
187 {
188 return default_val;
189 }
190 u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
191
192
193 /* Private Structure Definitions */
194
195 /* enum only to ease code for cas latency setting */
196 typedef enum ddr_cas_id {
197 DDR_CAS_2 = 20,
198 DDR_CAS_2_5 = 25,
199 DDR_CAS_3 = 30,
200 DDR_CAS_4 = 40,
201 DDR_CAS_5 = 50
202 } ddr_cas_id_t;
203
204 /*-----------------------------------------------------------------------------+
205 * Prototypes
206 *-----------------------------------------------------------------------------*/
207 static phys_size_t sdram_memsize(void);
208 static void get_spd_info(unsigned long *dimm_populated,
209 unsigned char *iic0_dimm_addr,
210 unsigned long num_dimm_banks);
211 static void check_mem_type(unsigned long *dimm_populated,
212 unsigned char *iic0_dimm_addr,
213 unsigned long num_dimm_banks);
214 static void check_frequency(unsigned long *dimm_populated,
215 unsigned char *iic0_dimm_addr,
216 unsigned long num_dimm_banks);
217 static void check_rank_number(unsigned long *dimm_populated,
218 unsigned char *iic0_dimm_addr,
219 unsigned long num_dimm_banks);
220 static void check_voltage_type(unsigned long *dimm_populated,
221 unsigned char *iic0_dimm_addr,
222 unsigned long num_dimm_banks);
223 static void program_memory_queue(unsigned long *dimm_populated,
224 unsigned char *iic0_dimm_addr,
225 unsigned long num_dimm_banks);
226 static void program_codt(unsigned long *dimm_populated,
227 unsigned char *iic0_dimm_addr,
228 unsigned long num_dimm_banks);
229 static void program_mode(unsigned long *dimm_populated,
230 unsigned char *iic0_dimm_addr,
231 unsigned long num_dimm_banks,
232 ddr_cas_id_t *selected_cas,
233 int *write_recovery);
234 static void program_tr(unsigned long *dimm_populated,
235 unsigned char *iic0_dimm_addr,
236 unsigned long num_dimm_banks);
237 static void program_rtr(unsigned long *dimm_populated,
238 unsigned char *iic0_dimm_addr,
239 unsigned long num_dimm_banks);
240 static void program_bxcf(unsigned long *dimm_populated,
241 unsigned char *iic0_dimm_addr,
242 unsigned long num_dimm_banks);
243 static void program_copt1(unsigned long *dimm_populated,
244 unsigned char *iic0_dimm_addr,
245 unsigned long num_dimm_banks);
246 static void program_initplr(unsigned long *dimm_populated,
247 unsigned char *iic0_dimm_addr,
248 unsigned long num_dimm_banks,
249 ddr_cas_id_t selected_cas,
250 int write_recovery);
251 static unsigned long is_ecc_enabled(void);
252 #ifdef CONFIG_DDR_ECC
253 static void program_ecc(unsigned long *dimm_populated,
254 unsigned char *iic0_dimm_addr,
255 unsigned long num_dimm_banks,
256 unsigned long tlb_word2_i_value);
257 static void program_ecc_addr(unsigned long start_address,
258 unsigned long num_bytes,
259 unsigned long tlb_word2_i_value);
260 #endif
261 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
262 static void program_DQS_calibration(unsigned long *dimm_populated,
263 unsigned char *iic0_dimm_addr,
264 unsigned long num_dimm_banks);
265 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
266 static void test(void);
267 #else
268 static void DQS_calibration_process(void);
269 #endif
270 #endif
271 int do_reset (cmd_tbl_t *cmdtp, int flag, int argc, char *argv[]);
272 void dcbz_area(u32 start_address, u32 num_bytes);
273
274 static unsigned char spd_read(uchar chip, uint addr)
275 {
276 unsigned char data[2];
277
278 if (i2c_probe(chip) == 0)
279 if (i2c_read(chip, addr, 1, data, 1) == 0)
280 return data[0];
281
282 return 0;
283 }
284
285 /*-----------------------------------------------------------------------------+
286 * sdram_memsize
287 *-----------------------------------------------------------------------------*/
288 static phys_size_t sdram_memsize(void)
289 {
290 phys_size_t mem_size;
291 unsigned long mcopt2;
292 unsigned long mcstat;
293 unsigned long mb0cf;
294 unsigned long sdsz;
295 unsigned long i;
296
297 mem_size = 0;
298
299 mfsdram(SDRAM_MCOPT2, mcopt2);
300 mfsdram(SDRAM_MCSTAT, mcstat);
301
302 /* DDR controller must be enabled and not in self-refresh. */
303 /* Otherwise memsize is zero. */
304 if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
305 && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
306 && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
307 == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
308 for (i = 0; i < MAXBXCF; i++) {
309 mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
310 /* Banks enabled */
311 if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
312 sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
313
314 switch(sdsz) {
315 case SDRAM_RXBAS_SDSZ_8:
316 mem_size+=8;
317 break;
318 case SDRAM_RXBAS_SDSZ_16:
319 mem_size+=16;
320 break;
321 case SDRAM_RXBAS_SDSZ_32:
322 mem_size+=32;
323 break;
324 case SDRAM_RXBAS_SDSZ_64:
325 mem_size+=64;
326 break;
327 case SDRAM_RXBAS_SDSZ_128:
328 mem_size+=128;
329 break;
330 case SDRAM_RXBAS_SDSZ_256:
331 mem_size+=256;
332 break;
333 case SDRAM_RXBAS_SDSZ_512:
334 mem_size+=512;
335 break;
336 case SDRAM_RXBAS_SDSZ_1024:
337 mem_size+=1024;
338 break;
339 case SDRAM_RXBAS_SDSZ_2048:
340 mem_size+=2048;
341 break;
342 case SDRAM_RXBAS_SDSZ_4096:
343 mem_size+=4096;
344 break;
345 default:
346 printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
347 , sdsz);
348 mem_size=0;
349 break;
350 }
351 }
352 }
353 }
354
355 return mem_size << 20;
356 }
357
358 /*-----------------------------------------------------------------------------+
359 * initdram. Initializes the 440SP Memory Queue and DDR SDRAM controller.
360 * Note: This routine runs from flash with a stack set up in the chip's
361 * sram space. It is important that the routine does not require .sbss, .bss or
362 * .data sections. It also cannot call routines that require these sections.
363 *-----------------------------------------------------------------------------*/
364 /*-----------------------------------------------------------------------------
365 * Function: initdram
366 * Description: Configures SDRAM memory banks for DDR operation.
367 * Auto Memory Configuration option reads the DDR SDRAM EEPROMs
368 * via the IIC bus and then configures the DDR SDRAM memory
369 * banks appropriately. If Auto Memory Configuration is
370 * not used, it is assumed that no DIMM is plugged
371 *-----------------------------------------------------------------------------*/
372 phys_size_t initdram(int board_type)
373 {
374 unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
375 unsigned char spd0[MAX_SPD_BYTES];
376 unsigned char spd1[MAX_SPD_BYTES];
377 unsigned char *dimm_spd[MAXDIMMS];
378 unsigned long dimm_populated[MAXDIMMS];
379 unsigned long num_dimm_banks; /* on board dimm banks */
380 unsigned long val;
381 ddr_cas_id_t selected_cas = DDR_CAS_5; /* preset to silence compiler */
382 int write_recovery;
383 phys_size_t dram_size = 0;
384
385 num_dimm_banks = sizeof(iic0_dimm_addr);
386
387 /*------------------------------------------------------------------
388 * Set up an array of SPD matrixes.
389 *-----------------------------------------------------------------*/
390 dimm_spd[0] = spd0;
391 dimm_spd[1] = spd1;
392
393 /*------------------------------------------------------------------
394 * Reset the DDR-SDRAM controller.
395 *-----------------------------------------------------------------*/
396 mtsdr(SDR0_SRST, (0x80000000 >> 10));
397 mtsdr(SDR0_SRST, 0x00000000);
398
399 /*
400 * Make sure I2C controller is initialized
401 * before continuing.
402 */
403
404 /* switch to correct I2C bus */
405 I2C_SET_BUS(CONFIG_SYS_SPD_BUS_NUM);
406 i2c_init(CONFIG_SYS_I2C_SPEED, CONFIG_SYS_I2C_SLAVE);
407
408 /*------------------------------------------------------------------
409 * Clear out the serial presence detect buffers.
410 * Perform IIC reads from the dimm. Fill in the spds.
411 * Check to see if the dimm slots are populated
412 *-----------------------------------------------------------------*/
413 get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
414
415 /*------------------------------------------------------------------
416 * Check the memory type for the dimms plugged.
417 *-----------------------------------------------------------------*/
418 check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
419
420 /*------------------------------------------------------------------
421 * Check the frequency supported for the dimms plugged.
422 *-----------------------------------------------------------------*/
423 check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
424
425 /*------------------------------------------------------------------
426 * Check the total rank number.
427 *-----------------------------------------------------------------*/
428 check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
429
430 /*------------------------------------------------------------------
431 * Check the voltage type for the dimms plugged.
432 *-----------------------------------------------------------------*/
433 check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
434
435 /*------------------------------------------------------------------
436 * Program SDRAM controller options 2 register
437 * Except Enabling of the memory controller.
438 *-----------------------------------------------------------------*/
439 mfsdram(SDRAM_MCOPT2, val);
440 mtsdram(SDRAM_MCOPT2,
441 (val &
442 ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
443 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
444 SDRAM_MCOPT2_ISIE_MASK))
445 | (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
446 SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
447 SDRAM_MCOPT2_ISIE_ENABLE));
448
449 /*------------------------------------------------------------------
450 * Program SDRAM controller options 1 register
451 * Note: Does not enable the memory controller.
452 *-----------------------------------------------------------------*/
453 program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
454
455 /*------------------------------------------------------------------
456 * Set the SDRAM Controller On Die Termination Register
457 *-----------------------------------------------------------------*/
458 program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
459
460 /*------------------------------------------------------------------
461 * Program SDRAM refresh register.
462 *-----------------------------------------------------------------*/
463 program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
464
465 /*------------------------------------------------------------------
466 * Program SDRAM mode register.
467 *-----------------------------------------------------------------*/
468 program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
469 &selected_cas, &write_recovery);
470
471 /*------------------------------------------------------------------
472 * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
473 *-----------------------------------------------------------------*/
474 mfsdram(SDRAM_WRDTR, val);
475 mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
476 ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
477
478 /*------------------------------------------------------------------
479 * Set the SDRAM Clock Timing Register
480 *-----------------------------------------------------------------*/
481 mfsdram(SDRAM_CLKTR, val);
482 mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
483 ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
484
485 /*------------------------------------------------------------------
486 * Program the BxCF registers.
487 *-----------------------------------------------------------------*/
488 program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
489
490 /*------------------------------------------------------------------
491 * Program SDRAM timing registers.
492 *-----------------------------------------------------------------*/
493 program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
494
495 /*------------------------------------------------------------------
496 * Set the Extended Mode register
497 *-----------------------------------------------------------------*/
498 mfsdram(SDRAM_MEMODE, val);
499 mtsdram(SDRAM_MEMODE,
500 (val & ~(SDRAM_MEMODE_DIC_MASK | SDRAM_MEMODE_DLL_MASK |
501 SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
502 (SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
503 | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
504
505 /*------------------------------------------------------------------
506 * Program Initialization preload registers.
507 *-----------------------------------------------------------------*/
508 program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
509 selected_cas, write_recovery);
510
511 /*------------------------------------------------------------------
512 * Delay to ensure 200usec have elapsed since reset.
513 *-----------------------------------------------------------------*/
514 udelay(400);
515
516 /*------------------------------------------------------------------
517 * Set the memory queue core base addr.
518 *-----------------------------------------------------------------*/
519 program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
520
521 /*------------------------------------------------------------------
522 * Program SDRAM controller options 2 register
523 * Enable the memory controller.
524 *-----------------------------------------------------------------*/
525 mfsdram(SDRAM_MCOPT2, val);
526 mtsdram(SDRAM_MCOPT2,
527 (val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
528 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
529 SDRAM_MCOPT2_IPTR_EXECUTE);
530
531 /*------------------------------------------------------------------
532 * Wait for IPTR_EXECUTE init sequence to complete.
533 *-----------------------------------------------------------------*/
534 do {
535 mfsdram(SDRAM_MCSTAT, val);
536 } while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
537
538 /* enable the controller only after init sequence completes */
539 mfsdram(SDRAM_MCOPT2, val);
540 mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
541
542 /* Make sure delay-line calibration is done before proceeding */
543 do {
544 mfsdram(SDRAM_DLCR, val);
545 } while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
546
547 /* get installed memory size */
548 dram_size = sdram_memsize();
549
550 /*
551 * Limit size to 2GB
552 */
553 if (dram_size > CONFIG_MAX_MEM_MAPPED)
554 dram_size = CONFIG_MAX_MEM_MAPPED;
555
556 /* and program tlb entries for this size (dynamic) */
557
558 /*
559 * Program TLB entries with caches enabled, for best performace
560 * while auto-calibrating and ECC generation
561 */
562 program_tlb(0, 0, dram_size, 0);
563
564 /*------------------------------------------------------------------
565 * DQS calibration.
566 *-----------------------------------------------------------------*/
567 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
568 DQS_autocalibration();
569 #else
570 program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
571 #endif
572
573 #ifdef CONFIG_DDR_ECC
574 /*------------------------------------------------------------------
575 * If ecc is enabled, initialize the parity bits.
576 *-----------------------------------------------------------------*/
577 program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
578 #endif
579
580 /*
581 * Now after initialization (auto-calibration and ECC generation)
582 * remove the TLB entries with caches enabled and program again with
583 * desired cache functionality
584 */
585 remove_tlb(0, dram_size);
586 program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
587
588 ppc4xx_ibm_ddr2_register_dump();
589
590 /*
591 * Clear potential errors resulting from auto-calibration.
592 * If not done, then we could get an interrupt later on when
593 * exceptions are enabled.
594 */
595 set_mcsr(get_mcsr());
596
597 return sdram_memsize();
598 }
599
600 static void get_spd_info(unsigned long *dimm_populated,
601 unsigned char *iic0_dimm_addr,
602 unsigned long num_dimm_banks)
603 {
604 unsigned long dimm_num;
605 unsigned long dimm_found;
606 unsigned char num_of_bytes;
607 unsigned char total_size;
608
609 dimm_found = FALSE;
610 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
611 num_of_bytes = 0;
612 total_size = 0;
613
614 num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
615 debug("\nspd_read(0x%x) returned %d\n",
616 iic0_dimm_addr[dimm_num], num_of_bytes);
617 total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
618 debug("spd_read(0x%x) returned %d\n",
619 iic0_dimm_addr[dimm_num], total_size);
620
621 if ((num_of_bytes != 0) && (total_size != 0)) {
622 dimm_populated[dimm_num] = TRUE;
623 dimm_found = TRUE;
624 debug("DIMM slot %lu: populated\n", dimm_num);
625 } else {
626 dimm_populated[dimm_num] = FALSE;
627 debug("DIMM slot %lu: Not populated\n", dimm_num);
628 }
629 }
630
631 if (dimm_found == FALSE) {
632 printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
633 spd_ddr_init_hang ();
634 }
635 }
636
637 void board_add_ram_info(int use_default)
638 {
639 PPC4xx_SYS_INFO board_cfg;
640 u32 val;
641
642 if (is_ecc_enabled())
643 puts(" (ECC");
644 else
645 puts(" (ECC not");
646
647 get_sys_info(&board_cfg);
648
649 mfsdr(SDR0_DDR0, val);
650 val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
651 printf(" enabled, %d MHz", (val * 2) / 1000000);
652
653 mfsdram(SDRAM_MMODE, val);
654 val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
655 printf(", CL%d)", val);
656 }
657
658 /*------------------------------------------------------------------
659 * For the memory DIMMs installed, this routine verifies that they
660 * really are DDR specific DIMMs.
661 *-----------------------------------------------------------------*/
662 static void check_mem_type(unsigned long *dimm_populated,
663 unsigned char *iic0_dimm_addr,
664 unsigned long num_dimm_banks)
665 {
666 unsigned long dimm_num;
667 unsigned long dimm_type;
668
669 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
670 if (dimm_populated[dimm_num] == TRUE) {
671 dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
672 switch (dimm_type) {
673 case 1:
674 printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
675 "slot %d.\n", (unsigned int)dimm_num);
676 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
677 printf("Replace the DIMM module with a supported DIMM.\n\n");
678 spd_ddr_init_hang ();
679 break;
680 case 2:
681 printf("ERROR: EDO DIMM detected in slot %d.\n",
682 (unsigned int)dimm_num);
683 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
684 printf("Replace the DIMM module with a supported DIMM.\n\n");
685 spd_ddr_init_hang ();
686 break;
687 case 3:
688 printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
689 (unsigned int)dimm_num);
690 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
691 printf("Replace the DIMM module with a supported DIMM.\n\n");
692 spd_ddr_init_hang ();
693 break;
694 case 4:
695 printf("ERROR: SDRAM DIMM detected in slot %d.\n",
696 (unsigned int)dimm_num);
697 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
698 printf("Replace the DIMM module with a supported DIMM.\n\n");
699 spd_ddr_init_hang ();
700 break;
701 case 5:
702 printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
703 (unsigned int)dimm_num);
704 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
705 printf("Replace the DIMM module with a supported DIMM.\n\n");
706 spd_ddr_init_hang ();
707 break;
708 case 6:
709 printf("ERROR: SGRAM DIMM detected in slot %d.\n",
710 (unsigned int)dimm_num);
711 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
712 printf("Replace the DIMM module with a supported DIMM.\n\n");
713 spd_ddr_init_hang ();
714 break;
715 case 7:
716 debug("DIMM slot %d: DDR1 SDRAM detected\n", dimm_num);
717 dimm_populated[dimm_num] = SDRAM_DDR1;
718 break;
719 case 8:
720 debug("DIMM slot %d: DDR2 SDRAM detected\n", dimm_num);
721 dimm_populated[dimm_num] = SDRAM_DDR2;
722 break;
723 default:
724 printf("ERROR: Unknown DIMM detected in slot %d.\n",
725 (unsigned int)dimm_num);
726 printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
727 printf("Replace the DIMM module with a supported DIMM.\n\n");
728 spd_ddr_init_hang ();
729 break;
730 }
731 }
732 }
733 for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
734 if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
735 && (dimm_populated[dimm_num] != SDRAM_NONE)
736 && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
737 printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
738 spd_ddr_init_hang ();
739 }
740 }
741 }
742
743 /*------------------------------------------------------------------
744 * For the memory DIMMs installed, this routine verifies that
745 * frequency previously calculated is supported.
746 *-----------------------------------------------------------------*/
747 static void check_frequency(unsigned long *dimm_populated,
748 unsigned char *iic0_dimm_addr,
749 unsigned long num_dimm_banks)
750 {
751 unsigned long dimm_num;
752 unsigned long tcyc_reg;
753 unsigned long cycle_time;
754 unsigned long calc_cycle_time;
755 unsigned long sdram_freq;
756 unsigned long sdr_ddrpll;
757 PPC4xx_SYS_INFO board_cfg;
758
759 /*------------------------------------------------------------------
760 * Get the board configuration info.
761 *-----------------------------------------------------------------*/
762 get_sys_info(&board_cfg);
763
764 mfsdr(SDR0_DDR0, sdr_ddrpll);
765 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
766
767 /*
768 * calc_cycle_time is calculated from DDR frequency set by board/chip
769 * and is expressed in multiple of 10 picoseconds
770 * to match the way DIMM cycle time is calculated below.
771 */
772 calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
773
774 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
775 if (dimm_populated[dimm_num] != SDRAM_NONE) {
776 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
777 /*
778 * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
779 * the higher order nibble (bits 4-7) designates the cycle time
780 * to a granularity of 1ns;
781 * the value presented by the lower order nibble (bits 0-3)
782 * has a granularity of .1ns and is added to the value designated
783 * by the higher nibble. In addition, four lines of the lower order
784 * nibble are assigned to support +.25,+.33, +.66 and +.75.
785 */
786 /* Convert from hex to decimal */
787 if ((tcyc_reg & 0x0F) == 0x0D)
788 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
789 else if ((tcyc_reg & 0x0F) == 0x0C)
790 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
791 else if ((tcyc_reg & 0x0F) == 0x0B)
792 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
793 else if ((tcyc_reg & 0x0F) == 0x0A)
794 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
795 else
796 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
797 ((tcyc_reg & 0x0F)*10);
798 debug("cycle_time=%d [10 picoseconds]\n", cycle_time);
799
800 if (cycle_time > (calc_cycle_time + 10)) {
801 /*
802 * the provided sdram cycle_time is too small
803 * for the available DIMM cycle_time.
804 * The additionnal 100ps is here to accept a small incertainty.
805 */
806 printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
807 "slot %d \n while calculated cycle time is %d ps.\n",
808 (unsigned int)(cycle_time*10),
809 (unsigned int)dimm_num,
810 (unsigned int)(calc_cycle_time*10));
811 printf("Replace the DIMM, or change DDR frequency via "
812 "strapping bits.\n\n");
813 spd_ddr_init_hang ();
814 }
815 }
816 }
817 }
818
819 /*------------------------------------------------------------------
820 * For the memory DIMMs installed, this routine verifies two
821 * ranks/banks maximum are availables.
822 *-----------------------------------------------------------------*/
823 static void check_rank_number(unsigned long *dimm_populated,
824 unsigned char *iic0_dimm_addr,
825 unsigned long num_dimm_banks)
826 {
827 unsigned long dimm_num;
828 unsigned long dimm_rank;
829 unsigned long total_rank = 0;
830
831 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
832 if (dimm_populated[dimm_num] != SDRAM_NONE) {
833 dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
834 if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
835 dimm_rank = (dimm_rank & 0x0F) +1;
836 else
837 dimm_rank = dimm_rank & 0x0F;
838
839
840 if (dimm_rank > MAXRANKS) {
841 printf("ERROR: DRAM DIMM detected with %lu ranks in "
842 "slot %lu is not supported.\n", dimm_rank, dimm_num);
843 printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
844 printf("Replace the DIMM module with a supported DIMM.\n\n");
845 spd_ddr_init_hang ();
846 } else
847 total_rank += dimm_rank;
848 }
849 if (total_rank > MAXRANKS) {
850 printf("ERROR: DRAM DIMM detected with a total of %d ranks "
851 "for all slots.\n", (unsigned int)total_rank);
852 printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
853 printf("Remove one of the DIMM modules.\n\n");
854 spd_ddr_init_hang ();
855 }
856 }
857 }
858
859 /*------------------------------------------------------------------
860 * only support 2.5V modules.
861 * This routine verifies this.
862 *-----------------------------------------------------------------*/
863 static void check_voltage_type(unsigned long *dimm_populated,
864 unsigned char *iic0_dimm_addr,
865 unsigned long num_dimm_banks)
866 {
867 unsigned long dimm_num;
868 unsigned long voltage_type;
869
870 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
871 if (dimm_populated[dimm_num] != SDRAM_NONE) {
872 voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
873 switch (voltage_type) {
874 case 0x00:
875 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
876 printf("This DIMM is 5.0 Volt/TTL.\n");
877 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
878 (unsigned int)dimm_num);
879 spd_ddr_init_hang ();
880 break;
881 case 0x01:
882 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
883 printf("This DIMM is LVTTL.\n");
884 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
885 (unsigned int)dimm_num);
886 spd_ddr_init_hang ();
887 break;
888 case 0x02:
889 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
890 printf("This DIMM is 1.5 Volt.\n");
891 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
892 (unsigned int)dimm_num);
893 spd_ddr_init_hang ();
894 break;
895 case 0x03:
896 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
897 printf("This DIMM is 3.3 Volt/TTL.\n");
898 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
899 (unsigned int)dimm_num);
900 spd_ddr_init_hang ();
901 break;
902 case 0x04:
903 /* 2.5 Voltage only for DDR1 */
904 break;
905 case 0x05:
906 /* 1.8 Voltage only for DDR2 */
907 break;
908 default:
909 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
910 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
911 (unsigned int)dimm_num);
912 spd_ddr_init_hang ();
913 break;
914 }
915 }
916 }
917 }
918
919 /*-----------------------------------------------------------------------------+
920 * program_copt1.
921 *-----------------------------------------------------------------------------*/
922 static void program_copt1(unsigned long *dimm_populated,
923 unsigned char *iic0_dimm_addr,
924 unsigned long num_dimm_banks)
925 {
926 unsigned long dimm_num;
927 unsigned long mcopt1;
928 unsigned long ecc_enabled;
929 unsigned long ecc = 0;
930 unsigned long data_width = 0;
931 unsigned long dimm_32bit;
932 unsigned long dimm_64bit;
933 unsigned long registered = 0;
934 unsigned long attribute = 0;
935 unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
936 unsigned long bankcount;
937 unsigned long ddrtype;
938 unsigned long val;
939
940 #ifdef CONFIG_DDR_ECC
941 ecc_enabled = TRUE;
942 #else
943 ecc_enabled = FALSE;
944 #endif
945 dimm_32bit = FALSE;
946 dimm_64bit = FALSE;
947 buf0 = FALSE;
948 buf1 = FALSE;
949
950 /*------------------------------------------------------------------
951 * Set memory controller options reg 1, SDRAM_MCOPT1.
952 *-----------------------------------------------------------------*/
953 mfsdram(SDRAM_MCOPT1, val);
954 mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
955 SDRAM_MCOPT1_PMU_MASK | SDRAM_MCOPT1_DMWD_MASK |
956 SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
957 SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
958 SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
959 SDRAM_MCOPT1_DREF_MASK);
960
961 mcopt1 |= SDRAM_MCOPT1_QDEP;
962 mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
963 mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
964 mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
965 mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
966 mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
967
968 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
969 if (dimm_populated[dimm_num] != SDRAM_NONE) {
970 /* test ecc support */
971 ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
972 if (ecc != 0x02) /* ecc not supported */
973 ecc_enabled = FALSE;
974
975 /* test bank count */
976 bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
977 if (bankcount == 0x04) /* bank count = 4 */
978 mcopt1 |= SDRAM_MCOPT1_4_BANKS;
979 else /* bank count = 8 */
980 mcopt1 |= SDRAM_MCOPT1_8_BANKS;
981
982 /* test DDR type */
983 ddrtype = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2);
984 /* test for buffered/unbuffered, registered, differential clocks */
985 registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
986 attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
987
988 /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
989 if (dimm_num == 0) {
990 if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
991 mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
992 if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
993 mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
994 if (registered == 1) { /* DDR2 always buffered */
995 /* TODO: what about above comments ? */
996 mcopt1 |= SDRAM_MCOPT1_RDEN;
997 buf0 = TRUE;
998 } else {
999 /* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
1000 if ((attribute & 0x02) == 0x00) {
1001 /* buffered not supported */
1002 buf0 = FALSE;
1003 } else {
1004 mcopt1 |= SDRAM_MCOPT1_RDEN;
1005 buf0 = TRUE;
1006 }
1007 }
1008 }
1009 else if (dimm_num == 1) {
1010 if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
1011 mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
1012 if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
1013 mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
1014 if (registered == 1) {
1015 /* DDR2 always buffered */
1016 mcopt1 |= SDRAM_MCOPT1_RDEN;
1017 buf1 = TRUE;
1018 } else {
1019 if ((attribute & 0x02) == 0x00) {
1020 /* buffered not supported */
1021 buf1 = FALSE;
1022 } else {
1023 mcopt1 |= SDRAM_MCOPT1_RDEN;
1024 buf1 = TRUE;
1025 }
1026 }
1027 }
1028
1029 /* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
1030 data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
1031 (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
1032
1033 switch (data_width) {
1034 case 72:
1035 case 64:
1036 dimm_64bit = TRUE;
1037 break;
1038 case 40:
1039 case 32:
1040 dimm_32bit = TRUE;
1041 break;
1042 default:
1043 printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
1044 data_width);
1045 printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
1046 break;
1047 }
1048 }
1049 }
1050
1051 /* verify matching properties */
1052 if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
1053 if (buf0 != buf1) {
1054 printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
1055 spd_ddr_init_hang ();
1056 }
1057 }
1058
1059 if ((dimm_64bit == TRUE) && (dimm_32bit == TRUE)) {
1060 printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
1061 spd_ddr_init_hang ();
1062 }
1063 else if ((dimm_64bit == TRUE) && (dimm_32bit == FALSE)) {
1064 mcopt1 |= SDRAM_MCOPT1_DMWD_64;
1065 } else if ((dimm_64bit == FALSE) && (dimm_32bit == TRUE)) {
1066 mcopt1 |= SDRAM_MCOPT1_DMWD_32;
1067 } else {
1068 printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
1069 spd_ddr_init_hang ();
1070 }
1071
1072 if (ecc_enabled == TRUE)
1073 mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
1074 else
1075 mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
1076
1077 mtsdram(SDRAM_MCOPT1, mcopt1);
1078 }
1079
1080 /*-----------------------------------------------------------------------------+
1081 * program_codt.
1082 *-----------------------------------------------------------------------------*/
1083 static void program_codt(unsigned long *dimm_populated,
1084 unsigned char *iic0_dimm_addr,
1085 unsigned long num_dimm_banks)
1086 {
1087 unsigned long codt;
1088 unsigned long modt0 = 0;
1089 unsigned long modt1 = 0;
1090 unsigned long modt2 = 0;
1091 unsigned long modt3 = 0;
1092 unsigned char dimm_num;
1093 unsigned char dimm_rank;
1094 unsigned char total_rank = 0;
1095 unsigned char total_dimm = 0;
1096 unsigned char dimm_type = 0;
1097 unsigned char firstSlot = 0;
1098
1099 /*------------------------------------------------------------------
1100 * Set the SDRAM Controller On Die Termination Register
1101 *-----------------------------------------------------------------*/
1102 mfsdram(SDRAM_CODT, codt);
1103 codt |= (SDRAM_CODT_IO_NMODE
1104 & (~SDRAM_CODT_DQS_SINGLE_END
1105 & ~SDRAM_CODT_CKSE_SINGLE_END
1106 & ~SDRAM_CODT_FEEBBACK_RCV_SINGLE_END
1107 & ~SDRAM_CODT_FEEBBACK_DRV_SINGLE_END));
1108
1109 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1110 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1111 dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
1112 if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
1113 dimm_rank = (dimm_rank & 0x0F) + 1;
1114 dimm_type = SDRAM_DDR2;
1115 } else {
1116 dimm_rank = dimm_rank & 0x0F;
1117 dimm_type = SDRAM_DDR1;
1118 }
1119
1120 total_rank += dimm_rank;
1121 total_dimm++;
1122 if ((dimm_num == 0) && (total_dimm == 1))
1123 firstSlot = TRUE;
1124 else
1125 firstSlot = FALSE;
1126 }
1127 }
1128 if (dimm_type == SDRAM_DDR2) {
1129 codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
1130 if ((total_dimm == 1) && (firstSlot == TRUE)) {
1131 if (total_rank == 1) { /* PUUU */
1132 codt |= CALC_ODT_R(0);
1133 modt0 = CALC_ODT_W(0);
1134 modt1 = 0x00000000;
1135 modt2 = 0x00000000;
1136 modt3 = 0x00000000;
1137 }
1138 if (total_rank == 2) { /* PPUU */
1139 codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
1140 modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
1141 modt1 = 0x00000000;
1142 modt2 = 0x00000000;
1143 modt3 = 0x00000000;
1144 }
1145 } else if ((total_dimm == 1) && (firstSlot != TRUE)) {
1146 if (total_rank == 1) { /* UUPU */
1147 codt |= CALC_ODT_R(2);
1148 modt0 = 0x00000000;
1149 modt1 = 0x00000000;
1150 modt2 = CALC_ODT_W(2);
1151 modt3 = 0x00000000;
1152 }
1153 if (total_rank == 2) { /* UUPP */
1154 codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
1155 modt0 = 0x00000000;
1156 modt1 = 0x00000000;
1157 modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
1158 modt3 = 0x00000000;
1159 }
1160 }
1161 if (total_dimm == 2) {
1162 if (total_rank == 2) { /* PUPU */
1163 codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
1164 modt0 = CALC_ODT_RW(2);
1165 modt1 = 0x00000000;
1166 modt2 = CALC_ODT_RW(0);
1167 modt3 = 0x00000000;
1168 }
1169 if (total_rank == 4) { /* PPPP */
1170 codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
1171 CALC_ODT_R(2) | CALC_ODT_R(3);
1172 modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
1173 modt1 = 0x00000000;
1174 modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
1175 modt3 = 0x00000000;
1176 }
1177 }
1178 } else {
1179 codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
1180 modt0 = 0x00000000;
1181 modt1 = 0x00000000;
1182 modt2 = 0x00000000;
1183 modt3 = 0x00000000;
1184
1185 if (total_dimm == 1) {
1186 if (total_rank == 1)
1187 codt |= 0x00800000;
1188 if (total_rank == 2)
1189 codt |= 0x02800000;
1190 }
1191 if (total_dimm == 2) {
1192 if (total_rank == 2)
1193 codt |= 0x08800000;
1194 if (total_rank == 4)
1195 codt |= 0x2a800000;
1196 }
1197 }
1198
1199 debug("nb of dimm %d\n", total_dimm);
1200 debug("nb of rank %d\n", total_rank);
1201 if (total_dimm == 1)
1202 debug("dimm in slot %d\n", firstSlot);
1203
1204 mtsdram(SDRAM_CODT, codt);
1205 mtsdram(SDRAM_MODT0, modt0);
1206 mtsdram(SDRAM_MODT1, modt1);
1207 mtsdram(SDRAM_MODT2, modt2);
1208 mtsdram(SDRAM_MODT3, modt3);
1209 }
1210
1211 /*-----------------------------------------------------------------------------+
1212 * program_initplr.
1213 *-----------------------------------------------------------------------------*/
1214 static void program_initplr(unsigned long *dimm_populated,
1215 unsigned char *iic0_dimm_addr,
1216 unsigned long num_dimm_banks,
1217 ddr_cas_id_t selected_cas,
1218 int write_recovery)
1219 {
1220 u32 cas = 0;
1221 u32 odt = 0;
1222 u32 ods = 0;
1223 u32 mr;
1224 u32 wr;
1225 u32 emr;
1226 u32 emr2;
1227 u32 emr3;
1228 int dimm_num;
1229 int total_dimm = 0;
1230
1231 /******************************************************
1232 ** Assumption: if more than one DIMM, all DIMMs are the same
1233 ** as already checked in check_memory_type
1234 ******************************************************/
1235
1236 if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
1237 mtsdram(SDRAM_INITPLR0, 0x81B80000);
1238 mtsdram(SDRAM_INITPLR1, 0x81900400);
1239 mtsdram(SDRAM_INITPLR2, 0x81810000);
1240 mtsdram(SDRAM_INITPLR3, 0xff800162);
1241 mtsdram(SDRAM_INITPLR4, 0x81900400);
1242 mtsdram(SDRAM_INITPLR5, 0x86080000);
1243 mtsdram(SDRAM_INITPLR6, 0x86080000);
1244 mtsdram(SDRAM_INITPLR7, 0x81000062);
1245 } else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
1246 switch (selected_cas) {
1247 case DDR_CAS_3:
1248 cas = 3 << 4;
1249 break;
1250 case DDR_CAS_4:
1251 cas = 4 << 4;
1252 break;
1253 case DDR_CAS_5:
1254 cas = 5 << 4;
1255 break;
1256 default:
1257 printf("ERROR: ucode error on selected_cas value %d", selected_cas);
1258 spd_ddr_init_hang ();
1259 break;
1260 }
1261
1262 #if 0
1263 /*
1264 * ToDo - Still a problem with the write recovery:
1265 * On the Corsair CM2X512-5400C4 module, setting write recovery
1266 * in the INITPLR reg to the value calculated in program_mode()
1267 * results in not correctly working DDR2 memory (crash after
1268 * relocation).
1269 *
1270 * So for now, set the write recovery to 3. This seems to work
1271 * on the Corair module too.
1272 *
1273 * 2007-03-01, sr
1274 */
1275 switch (write_recovery) {
1276 case 3:
1277 wr = WRITE_RECOV_3;
1278 break;
1279 case 4:
1280 wr = WRITE_RECOV_4;
1281 break;
1282 case 5:
1283 wr = WRITE_RECOV_5;
1284 break;
1285 case 6:
1286 wr = WRITE_RECOV_6;
1287 break;
1288 default:
1289 printf("ERROR: write recovery not support (%d)", write_recovery);
1290 spd_ddr_init_hang ();
1291 break;
1292 }
1293 #else
1294 wr = WRITE_RECOV_3; /* test-only, see description above */
1295 #endif
1296
1297 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
1298 if (dimm_populated[dimm_num] != SDRAM_NONE)
1299 total_dimm++;
1300 if (total_dimm == 1) {
1301 odt = ODT_150_OHM;
1302 ods = ODS_FULL;
1303 } else if (total_dimm == 2) {
1304 odt = ODT_75_OHM;
1305 ods = ODS_REDUCED;
1306 } else {
1307 printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
1308 spd_ddr_init_hang ();
1309 }
1310
1311 mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
1312 emr = CMD_EMR | SELECT_EMR | odt | ods;
1313 emr2 = CMD_EMR | SELECT_EMR2;
1314 emr3 = CMD_EMR | SELECT_EMR3;
1315 /* NOP - Wait 106 MemClk cycles */
1316 mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
1317 SDRAM_INITPLR_IMWT_ENCODE(106));
1318 udelay(1000);
1319 /* precharge 4 MemClk cycles */
1320 mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1321 SDRAM_INITPLR_IMWT_ENCODE(4));
1322 /* EMR2 - Wait tMRD (2 MemClk cycles) */
1323 mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
1324 SDRAM_INITPLR_IMWT_ENCODE(2));
1325 /* EMR3 - Wait tMRD (2 MemClk cycles) */
1326 mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
1327 SDRAM_INITPLR_IMWT_ENCODE(2));
1328 /* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
1329 mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
1330 SDRAM_INITPLR_IMWT_ENCODE(2));
1331 /* MR w/ DLL reset - 200 cycle wait for DLL reset */
1332 mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
1333 SDRAM_INITPLR_IMWT_ENCODE(200));
1334 udelay(1000);
1335 /* precharge 4 MemClk cycles */
1336 mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1337 SDRAM_INITPLR_IMWT_ENCODE(4));
1338 /* Refresh 25 MemClk cycles */
1339 mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1340 SDRAM_INITPLR_IMWT_ENCODE(25));
1341 /* Refresh 25 MemClk cycles */
1342 mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1343 SDRAM_INITPLR_IMWT_ENCODE(25));
1344 /* Refresh 25 MemClk cycles */
1345 mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1346 SDRAM_INITPLR_IMWT_ENCODE(25));
1347 /* Refresh 25 MemClk cycles */
1348 mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1349 SDRAM_INITPLR_IMWT_ENCODE(25));
1350 /* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
1351 mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
1352 SDRAM_INITPLR_IMWT_ENCODE(2));
1353 /* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
1354 mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
1355 SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
1356 /* EMR OCD Exit */
1357 mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
1358 SDRAM_INITPLR_IMWT_ENCODE(2));
1359 } else {
1360 printf("ERROR: ucode error as unknown DDR type in program_initplr");
1361 spd_ddr_init_hang ();
1362 }
1363 }
1364
1365 /*------------------------------------------------------------------
1366 * This routine programs the SDRAM_MMODE register.
1367 * the selected_cas is an output parameter, that will be passed
1368 * by caller to call the above program_initplr( )
1369 *-----------------------------------------------------------------*/
1370 static void program_mode(unsigned long *dimm_populated,
1371 unsigned char *iic0_dimm_addr,
1372 unsigned long num_dimm_banks,
1373 ddr_cas_id_t *selected_cas,
1374 int *write_recovery)
1375 {
1376 unsigned long dimm_num;
1377 unsigned long sdram_ddr1;
1378 unsigned long t_wr_ns;
1379 unsigned long t_wr_clk;
1380 unsigned long cas_bit;
1381 unsigned long cas_index;
1382 unsigned long sdram_freq;
1383 unsigned long ddr_check;
1384 unsigned long mmode;
1385 unsigned long tcyc_reg;
1386 unsigned long cycle_2_0_clk;
1387 unsigned long cycle_2_5_clk;
1388 unsigned long cycle_3_0_clk;
1389 unsigned long cycle_4_0_clk;
1390 unsigned long cycle_5_0_clk;
1391 unsigned long max_2_0_tcyc_ns_x_100;
1392 unsigned long max_2_5_tcyc_ns_x_100;
1393 unsigned long max_3_0_tcyc_ns_x_100;
1394 unsigned long max_4_0_tcyc_ns_x_100;
1395 unsigned long max_5_0_tcyc_ns_x_100;
1396 unsigned long cycle_time_ns_x_100[3];
1397 PPC4xx_SYS_INFO board_cfg;
1398 unsigned char cas_2_0_available;
1399 unsigned char cas_2_5_available;
1400 unsigned char cas_3_0_available;
1401 unsigned char cas_4_0_available;
1402 unsigned char cas_5_0_available;
1403 unsigned long sdr_ddrpll;
1404
1405 /*------------------------------------------------------------------
1406 * Get the board configuration info.
1407 *-----------------------------------------------------------------*/
1408 get_sys_info(&board_cfg);
1409
1410 mfsdr(SDR0_DDR0, sdr_ddrpll);
1411 sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
1412 debug("sdram_freq=%d\n", sdram_freq);
1413
1414 /*------------------------------------------------------------------
1415 * Handle the timing. We need to find the worst case timing of all
1416 * the dimm modules installed.
1417 *-----------------------------------------------------------------*/
1418 t_wr_ns = 0;
1419 cas_2_0_available = TRUE;
1420 cas_2_5_available = TRUE;
1421 cas_3_0_available = TRUE;
1422 cas_4_0_available = TRUE;
1423 cas_5_0_available = TRUE;
1424 max_2_0_tcyc_ns_x_100 = 10;
1425 max_2_5_tcyc_ns_x_100 = 10;
1426 max_3_0_tcyc_ns_x_100 = 10;
1427 max_4_0_tcyc_ns_x_100 = 10;
1428 max_5_0_tcyc_ns_x_100 = 10;
1429 sdram_ddr1 = TRUE;
1430
1431 /* loop through all the DIMM slots on the board */
1432 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1433 /* If a dimm is installed in a particular slot ... */
1434 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1435 if (dimm_populated[dimm_num] == SDRAM_DDR1)
1436 sdram_ddr1 = TRUE;
1437 else
1438 sdram_ddr1 = FALSE;
1439
1440 /* t_wr_ns = max(t_wr_ns, (unsigned long)dimm_spd[dimm_num][36] >> 2); */ /* not used in this loop. */
1441 cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
1442 debug("cas_bit[SPD byte 18]=%02x\n", cas_bit);
1443
1444 /* For a particular DIMM, grab the three CAS values it supports */
1445 for (cas_index = 0; cas_index < 3; cas_index++) {
1446 switch (cas_index) {
1447 case 0:
1448 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
1449 break;
1450 case 1:
1451 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
1452 break;
1453 default:
1454 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
1455 break;
1456 }
1457
1458 if ((tcyc_reg & 0x0F) >= 10) {
1459 if ((tcyc_reg & 0x0F) == 0x0D) {
1460 /* Convert from hex to decimal */
1461 cycle_time_ns_x_100[cas_index] =
1462 (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
1463 } else {
1464 printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
1465 "in slot %d\n", (unsigned int)dimm_num);
1466 spd_ddr_init_hang ();
1467 }
1468 } else {
1469 /* Convert from hex to decimal */
1470 cycle_time_ns_x_100[cas_index] =
1471 (((tcyc_reg & 0xF0) >> 4) * 100) +
1472 ((tcyc_reg & 0x0F)*10);
1473 }
1474 debug("cas_index=%d: cycle_time_ns_x_100=%d\n", cas_index,
1475 cycle_time_ns_x_100[cas_index]);
1476 }
1477
1478 /* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
1479 /* supported for a particular DIMM. */
1480 cas_index = 0;
1481
1482 if (sdram_ddr1) {
1483 /*
1484 * DDR devices use the following bitmask for CAS latency:
1485 * Bit 7 6 5 4 3 2 1 0
1486 * TBD 4.0 3.5 3.0 2.5 2.0 1.5 1.0
1487 */
1488 if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
1489 (cycle_time_ns_x_100[cas_index] != 0)) {
1490 max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1491 cycle_time_ns_x_100[cas_index]);
1492 cas_index++;
1493 } else {
1494 if (cas_index != 0)
1495 cas_index++;
1496 cas_4_0_available = FALSE;
1497 }
1498
1499 if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1500 (cycle_time_ns_x_100[cas_index] != 0)) {
1501 max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1502 cycle_time_ns_x_100[cas_index]);
1503 cas_index++;
1504 } else {
1505 if (cas_index != 0)
1506 cas_index++;
1507 cas_3_0_available = FALSE;
1508 }
1509
1510 if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1511 (cycle_time_ns_x_100[cas_index] != 0)) {
1512 max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
1513 cycle_time_ns_x_100[cas_index]);
1514 cas_index++;
1515 } else {
1516 if (cas_index != 0)
1517 cas_index++;
1518 cas_2_5_available = FALSE;
1519 }
1520
1521 if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
1522 (cycle_time_ns_x_100[cas_index] != 0)) {
1523 max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
1524 cycle_time_ns_x_100[cas_index]);
1525 cas_index++;
1526 } else {
1527 if (cas_index != 0)
1528 cas_index++;
1529 cas_2_0_available = FALSE;
1530 }
1531 } else {
1532 /*
1533 * DDR2 devices use the following bitmask for CAS latency:
1534 * Bit 7 6 5 4 3 2 1 0
1535 * TBD 6.0 5.0 4.0 3.0 2.0 TBD TBD
1536 */
1537 if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
1538 (cycle_time_ns_x_100[cas_index] != 0)) {
1539 max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
1540 cycle_time_ns_x_100[cas_index]);
1541 cas_index++;
1542 } else {
1543 if (cas_index != 0)
1544 cas_index++;
1545 cas_5_0_available = FALSE;
1546 }
1547
1548 if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1549 (cycle_time_ns_x_100[cas_index] != 0)) {
1550 max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1551 cycle_time_ns_x_100[cas_index]);
1552 cas_index++;
1553 } else {
1554 if (cas_index != 0)
1555 cas_index++;
1556 cas_4_0_available = FALSE;
1557 }
1558
1559 if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1560 (cycle_time_ns_x_100[cas_index] != 0)) {
1561 max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1562 cycle_time_ns_x_100[cas_index]);
1563 cas_index++;
1564 } else {
1565 if (cas_index != 0)
1566 cas_index++;
1567 cas_3_0_available = FALSE;
1568 }
1569 }
1570 }
1571 }
1572
1573 /*------------------------------------------------------------------
1574 * Set the SDRAM mode, SDRAM_MMODE
1575 *-----------------------------------------------------------------*/
1576 mfsdram(SDRAM_MMODE, mmode);
1577 mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
1578
1579 /* add 10 here because of rounding problems */
1580 cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
1581 cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
1582 cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
1583 cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
1584 cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
1585 debug("cycle_3_0_clk=%d\n", cycle_3_0_clk);
1586 debug("cycle_4_0_clk=%d\n", cycle_4_0_clk);
1587 debug("cycle_5_0_clk=%d\n", cycle_5_0_clk);
1588
1589 if (sdram_ddr1 == TRUE) { /* DDR1 */
1590 if ((cas_2_0_available == TRUE) && (sdram_freq <= cycle_2_0_clk)) {
1591 mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
1592 *selected_cas = DDR_CAS_2;
1593 } else if ((cas_2_5_available == TRUE) && (sdram_freq <= cycle_2_5_clk)) {
1594 mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
1595 *selected_cas = DDR_CAS_2_5;
1596 } else if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
1597 mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
1598 *selected_cas = DDR_CAS_3;
1599 } else {
1600 printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1601 printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
1602 printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
1603 spd_ddr_init_hang ();
1604 }
1605 } else { /* DDR2 */
1606 debug("cas_3_0_available=%d\n", cas_3_0_available);
1607 debug("cas_4_0_available=%d\n", cas_4_0_available);
1608 debug("cas_5_0_available=%d\n", cas_5_0_available);
1609 if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
1610 mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
1611 *selected_cas = DDR_CAS_3;
1612 } else if ((cas_4_0_available == TRUE) && (sdram_freq <= cycle_4_0_clk)) {
1613 mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
1614 *selected_cas = DDR_CAS_4;
1615 } else if ((cas_5_0_available == TRUE) && (sdram_freq <= cycle_5_0_clk)) {
1616 mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
1617 *selected_cas = DDR_CAS_5;
1618 } else {
1619 printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1620 printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
1621 printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
1622 printf("cas3=%d cas4=%d cas5=%d\n",
1623 cas_3_0_available, cas_4_0_available, cas_5_0_available);
1624 printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
1625 sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
1626 spd_ddr_init_hang ();
1627 }
1628 }
1629
1630 if (sdram_ddr1 == TRUE)
1631 mmode |= SDRAM_MMODE_WR_DDR1;
1632 else {
1633
1634 /* loop through all the DIMM slots on the board */
1635 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1636 /* If a dimm is installed in a particular slot ... */
1637 if (dimm_populated[dimm_num] != SDRAM_NONE)
1638 t_wr_ns = max(t_wr_ns,
1639 spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1640 }
1641
1642 /*
1643 * convert from nanoseconds to ddr clocks
1644 * round up if necessary
1645 */
1646 t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
1647 ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
1648 if (sdram_freq != ddr_check)
1649 t_wr_clk++;
1650
1651 switch (t_wr_clk) {
1652 case 0:
1653 case 1:
1654 case 2:
1655 case 3:
1656 mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
1657 break;
1658 case 4:
1659 mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
1660 break;
1661 case 5:
1662 mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
1663 break;
1664 default:
1665 mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
1666 break;
1667 }
1668 *write_recovery = t_wr_clk;
1669 }
1670
1671 debug("CAS latency = %d\n", *selected_cas);
1672 debug("Write recovery = %d\n", *write_recovery);
1673
1674 mtsdram(SDRAM_MMODE, mmode);
1675 }
1676
1677 /*-----------------------------------------------------------------------------+
1678 * program_rtr.
1679 *-----------------------------------------------------------------------------*/
1680 static void program_rtr(unsigned long *dimm_populated,
1681 unsigned char *iic0_dimm_addr,
1682 unsigned long num_dimm_banks)
1683 {
1684 PPC4xx_SYS_INFO board_cfg;
1685 unsigned long max_refresh_rate;
1686 unsigned long dimm_num;
1687 unsigned long refresh_rate_type;
1688 unsigned long refresh_rate;
1689 unsigned long rint;
1690 unsigned long sdram_freq;
1691 unsigned long sdr_ddrpll;
1692 unsigned long val;
1693
1694 /*------------------------------------------------------------------
1695 * Get the board configuration info.
1696 *-----------------------------------------------------------------*/
1697 get_sys_info(&board_cfg);
1698
1699 /*------------------------------------------------------------------
1700 * Set the SDRAM Refresh Timing Register, SDRAM_RTR
1701 *-----------------------------------------------------------------*/
1702 mfsdr(SDR0_DDR0, sdr_ddrpll);
1703 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1704
1705 max_refresh_rate = 0;
1706 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1707 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1708
1709 refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
1710 refresh_rate_type &= 0x7F;
1711 switch (refresh_rate_type) {
1712 case 0:
1713 refresh_rate = 15625;
1714 break;
1715 case 1:
1716 refresh_rate = 3906;
1717 break;
1718 case 2:
1719 refresh_rate = 7812;
1720 break;
1721 case 3:
1722 refresh_rate = 31250;
1723 break;
1724 case 4:
1725 refresh_rate = 62500;
1726 break;
1727 case 5:
1728 refresh_rate = 125000;
1729 break;
1730 default:
1731 refresh_rate = 0;
1732 printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
1733 (unsigned int)dimm_num);
1734 printf("Replace the DIMM module with a supported DIMM.\n\n");
1735 spd_ddr_init_hang ();
1736 break;
1737 }
1738
1739 max_refresh_rate = max(max_refresh_rate, refresh_rate);
1740 }
1741 }
1742
1743 rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
1744 mfsdram(SDRAM_RTR, val);
1745 mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
1746 (SDRAM_RTR_RINT_ENCODE(rint)));
1747 }
1748
1749 /*------------------------------------------------------------------
1750 * This routine programs the SDRAM_TRx registers.
1751 *-----------------------------------------------------------------*/
1752 static void program_tr(unsigned long *dimm_populated,
1753 unsigned char *iic0_dimm_addr,
1754 unsigned long num_dimm_banks)
1755 {
1756 unsigned long dimm_num;
1757 unsigned long sdram_ddr1;
1758 unsigned long t_rp_ns;
1759 unsigned long t_rcd_ns;
1760 unsigned long t_rrd_ns;
1761 unsigned long t_ras_ns;
1762 unsigned long t_rc_ns;
1763 unsigned long t_rfc_ns;
1764 unsigned long t_wpc_ns;
1765 unsigned long t_wtr_ns;
1766 unsigned long t_rpc_ns;
1767 unsigned long t_rp_clk;
1768 unsigned long t_rcd_clk;
1769 unsigned long t_rrd_clk;
1770 unsigned long t_ras_clk;
1771 unsigned long t_rc_clk;
1772 unsigned long t_rfc_clk;
1773 unsigned long t_wpc_clk;
1774 unsigned long t_wtr_clk;
1775 unsigned long t_rpc_clk;
1776 unsigned long sdtr1, sdtr2, sdtr3;
1777 unsigned long ddr_check;
1778 unsigned long sdram_freq;
1779 unsigned long sdr_ddrpll;
1780
1781 PPC4xx_SYS_INFO board_cfg;
1782
1783 /*------------------------------------------------------------------
1784 * Get the board configuration info.
1785 *-----------------------------------------------------------------*/
1786 get_sys_info(&board_cfg);
1787
1788 mfsdr(SDR0_DDR0, sdr_ddrpll);
1789 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1790
1791 /*------------------------------------------------------------------
1792 * Handle the timing. We need to find the worst case timing of all
1793 * the dimm modules installed.
1794 *-----------------------------------------------------------------*/
1795 t_rp_ns = 0;
1796 t_rrd_ns = 0;
1797 t_rcd_ns = 0;
1798 t_ras_ns = 0;
1799 t_rc_ns = 0;
1800 t_rfc_ns = 0;
1801 t_wpc_ns = 0;
1802 t_wtr_ns = 0;
1803 t_rpc_ns = 0;
1804 sdram_ddr1 = TRUE;
1805
1806 /* loop through all the DIMM slots on the board */
1807 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1808 /* If a dimm is installed in a particular slot ... */
1809 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1810 if (dimm_populated[dimm_num] == SDRAM_DDR2)
1811 sdram_ddr1 = TRUE;
1812 else
1813 sdram_ddr1 = FALSE;
1814
1815 t_rcd_ns = max(t_rcd_ns, spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
1816 t_rrd_ns = max(t_rrd_ns, spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
1817 t_rp_ns = max(t_rp_ns, spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
1818 t_ras_ns = max(t_ras_ns, spd_read(iic0_dimm_addr[dimm_num], 30));
1819 t_rc_ns = max(t_rc_ns, spd_read(iic0_dimm_addr[dimm_num], 41));
1820 t_rfc_ns = max(t_rfc_ns, spd_read(iic0_dimm_addr[dimm_num], 42));
1821 }
1822 }
1823
1824 /*------------------------------------------------------------------
1825 * Set the SDRAM Timing Reg 1, SDRAM_TR1
1826 *-----------------------------------------------------------------*/
1827 mfsdram(SDRAM_SDTR1, sdtr1);
1828 sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
1829 SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
1830
1831 /* default values */
1832 sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
1833 sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
1834
1835 /* normal operations */
1836 sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
1837 sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
1838
1839 mtsdram(SDRAM_SDTR1, sdtr1);
1840
1841 /*------------------------------------------------------------------
1842 * Set the SDRAM Timing Reg 2, SDRAM_TR2
1843 *-----------------------------------------------------------------*/
1844 mfsdram(SDRAM_SDTR2, sdtr2);
1845 sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK | SDRAM_SDTR2_WTR_MASK |
1846 SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
1847 SDRAM_SDTR2_RPC_MASK | SDRAM_SDTR2_RP_MASK |
1848 SDRAM_SDTR2_RRD_MASK);
1849
1850 /*
1851 * convert t_rcd from nanoseconds to ddr clocks
1852 * round up if necessary
1853 */
1854 t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
1855 ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
1856 if (sdram_freq != ddr_check)
1857 t_rcd_clk++;
1858
1859 switch (t_rcd_clk) {
1860 case 0:
1861 case 1:
1862 sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
1863 break;
1864 case 2:
1865 sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
1866 break;
1867 case 3:
1868 sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
1869 break;
1870 case 4:
1871 sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
1872 break;
1873 default:
1874 sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
1875 break;
1876 }
1877
1878 if (sdram_ddr1 == TRUE) { /* DDR1 */
1879 if (sdram_freq < 200000000) {
1880 sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1881 sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1882 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1883 } else {
1884 sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1885 sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1886 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1887 }
1888 } else { /* DDR2 */
1889 /* loop through all the DIMM slots on the board */
1890 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1891 /* If a dimm is installed in a particular slot ... */
1892 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1893 t_wpc_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1894 t_wtr_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
1895 t_rpc_ns = max(t_rpc_ns, spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
1896 }
1897 }
1898
1899 /*
1900 * convert from nanoseconds to ddr clocks
1901 * round up if necessary
1902 */
1903 t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
1904 ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
1905 if (sdram_freq != ddr_check)
1906 t_wpc_clk++;
1907
1908 switch (t_wpc_clk) {
1909 case 0:
1910 case 1:
1911 case 2:
1912 sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1913 break;
1914 case 3:
1915 sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1916 break;
1917 case 4:
1918 sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
1919 break;
1920 case 5:
1921 sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
1922 break;
1923 default:
1924 sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
1925 break;
1926 }
1927
1928 /*
1929 * convert from nanoseconds to ddr clocks
1930 * round up if necessary
1931 */
1932 t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
1933 ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
1934 if (sdram_freq != ddr_check)
1935 t_wtr_clk++;
1936
1937 switch (t_wtr_clk) {
1938 case 0:
1939 case 1:
1940 sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1941 break;
1942 case 2:
1943 sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1944 break;
1945 case 3:
1946 sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
1947 break;
1948 default:
1949 sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
1950 break;
1951 }
1952
1953 /*
1954 * convert from nanoseconds to ddr clocks
1955 * round up if necessary
1956 */
1957 t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
1958 ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
1959 if (sdram_freq != ddr_check)
1960 t_rpc_clk++;
1961
1962 switch (t_rpc_clk) {
1963 case 0:
1964 case 1:
1965 case 2:
1966 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1967 break;
1968 case 3:
1969 sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
1970 break;
1971 default:
1972 sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
1973 break;
1974 }
1975 }
1976
1977 /* default value */
1978 sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
1979
1980 /*
1981 * convert t_rrd from nanoseconds to ddr clocks
1982 * round up if necessary
1983 */
1984 t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
1985 ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
1986 if (sdram_freq != ddr_check)
1987 t_rrd_clk++;
1988
1989 if (t_rrd_clk == 3)
1990 sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
1991 else
1992 sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
1993
1994 /*
1995 * convert t_rp from nanoseconds to ddr clocks
1996 * round up if necessary
1997 */
1998 t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
1999 ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
2000 if (sdram_freq != ddr_check)
2001 t_rp_clk++;
2002
2003 switch (t_rp_clk) {
2004 case 0:
2005 case 1:
2006 case 2:
2007 case 3:
2008 sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
2009 break;
2010 case 4:
2011 sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
2012 break;
2013 case 5:
2014 sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
2015 break;
2016 case 6:
2017 sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
2018 break;
2019 default:
2020 sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
2021 break;
2022 }
2023
2024 mtsdram(SDRAM_SDTR2, sdtr2);
2025
2026 /*------------------------------------------------------------------
2027 * Set the SDRAM Timing Reg 3, SDRAM_TR3
2028 *-----------------------------------------------------------------*/
2029 mfsdram(SDRAM_SDTR3, sdtr3);
2030 sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK | SDRAM_SDTR3_RC_MASK |
2031 SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
2032
2033 /*
2034 * convert t_ras from nanoseconds to ddr clocks
2035 * round up if necessary
2036 */
2037 t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
2038 ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
2039 if (sdram_freq != ddr_check)
2040 t_ras_clk++;
2041
2042 sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
2043
2044 /*
2045 * convert t_rc from nanoseconds to ddr clocks
2046 * round up if necessary
2047 */
2048 t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
2049 ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
2050 if (sdram_freq != ddr_check)
2051 t_rc_clk++;
2052
2053 sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
2054
2055 /* default xcs value */
2056 sdtr3 |= SDRAM_SDTR3_XCS;
2057
2058 /*
2059 * convert t_rfc from nanoseconds to ddr clocks
2060 * round up if necessary
2061 */
2062 t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
2063 ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
2064 if (sdram_freq != ddr_check)
2065 t_rfc_clk++;
2066
2067 sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
2068
2069 mtsdram(SDRAM_SDTR3, sdtr3);
2070 }
2071
2072 /*-----------------------------------------------------------------------------+
2073 * program_bxcf.
2074 *-----------------------------------------------------------------------------*/
2075 static void program_bxcf(unsigned long *dimm_populated,
2076 unsigned char *iic0_dimm_addr,
2077 unsigned long num_dimm_banks)
2078 {
2079 unsigned long dimm_num;
2080 unsigned long num_col_addr;
2081 unsigned long num_ranks;
2082 unsigned long num_banks;
2083 unsigned long mode;
2084 unsigned long ind_rank;
2085 unsigned long ind;
2086 unsigned long ind_bank;
2087 unsigned long bank_0_populated;
2088
2089 /*------------------------------------------------------------------
2090 * Set the BxCF regs. First, wipe out the bank config registers.
2091 *-----------------------------------------------------------------*/
2092 mtsdram(SDRAM_MB0CF, 0x00000000);
2093 mtsdram(SDRAM_MB1CF, 0x00000000);
2094 mtsdram(SDRAM_MB2CF, 0x00000000);
2095 mtsdram(SDRAM_MB3CF, 0x00000000);
2096
2097 mode = SDRAM_BXCF_M_BE_ENABLE;
2098
2099 bank_0_populated = 0;
2100
2101 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2102 if (dimm_populated[dimm_num] != SDRAM_NONE) {
2103 num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
2104 num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2105 if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2106 num_ranks = (num_ranks & 0x0F) +1;
2107 else
2108 num_ranks = num_ranks & 0x0F;
2109
2110 num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
2111
2112 for (ind_bank = 0; ind_bank < 2; ind_bank++) {
2113 if (num_banks == 4)
2114 ind = 0;
2115 else
2116 ind = 5 << 8;
2117 switch (num_col_addr) {
2118 case 0x08:
2119 mode |= (SDRAM_BXCF_M_AM_0 + ind);
2120 break;
2121 case 0x09:
2122 mode |= (SDRAM_BXCF_M_AM_1 + ind);
2123 break;
2124 case 0x0A:
2125 mode |= (SDRAM_BXCF_M_AM_2 + ind);
2126 break;
2127 case 0x0B:
2128 mode |= (SDRAM_BXCF_M_AM_3 + ind);
2129 break;
2130 case 0x0C:
2131 mode |= (SDRAM_BXCF_M_AM_4 + ind);
2132 break;
2133 default:
2134 printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
2135 (unsigned int)dimm_num);
2136 printf("ERROR: Unsupported value for number of "
2137 "column addresses: %d.\n", (unsigned int)num_col_addr);
2138 printf("Replace the DIMM module with a supported DIMM.\n\n");
2139 spd_ddr_init_hang ();
2140 }
2141 }
2142
2143 if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
2144 bank_0_populated = 1;
2145
2146 for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
2147 mtsdram(SDRAM_MB0CF +
2148 ((dimm_num + bank_0_populated + ind_rank) << 2),
2149 mode);
2150 }
2151 }
2152 }
2153 }
2154
2155 /*------------------------------------------------------------------
2156 * program memory queue.
2157 *-----------------------------------------------------------------*/
2158 static void program_memory_queue(unsigned long *dimm_populated,
2159 unsigned char *iic0_dimm_addr,
2160 unsigned long num_dimm_banks)
2161 {
2162 unsigned long dimm_num;
2163 phys_size_t rank_base_addr;
2164 unsigned long rank_reg;
2165 phys_size_t rank_size_bytes;
2166 unsigned long rank_size_id;
2167 unsigned long num_ranks;
2168 unsigned long baseadd_size;
2169 unsigned long i;
2170 unsigned long bank_0_populated = 0;
2171 phys_size_t total_size = 0;
2172
2173 /*------------------------------------------------------------------
2174 * Reset the rank_base_address.
2175 *-----------------------------------------------------------------*/
2176 rank_reg = SDRAM_R0BAS;
2177
2178 rank_base_addr = 0x00000000;
2179
2180 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2181 if (dimm_populated[dimm_num] != SDRAM_NONE) {
2182 num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2183 if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2184 num_ranks = (num_ranks & 0x0F) + 1;
2185 else
2186 num_ranks = num_ranks & 0x0F;
2187
2188 rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
2189
2190 /*------------------------------------------------------------------
2191 * Set the sizes
2192 *-----------------------------------------------------------------*/
2193 baseadd_size = 0;
2194 switch (rank_size_id) {
2195 case 0x01:
2196 baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
2197 total_size = 1024;
2198 break;
2199 case 0x02:
2200 baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
2201 total_size = 2048;
2202 break;
2203 case 0x04:
2204 baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
2205 total_size = 4096;
2206 break;
2207 case 0x08:
2208 baseadd_size |= SDRAM_RXBAS_SDSZ_32;
2209 total_size = 32;
2210 break;
2211 case 0x10:
2212 baseadd_size |= SDRAM_RXBAS_SDSZ_64;
2213 total_size = 64;
2214 break;
2215 case 0x20:
2216 baseadd_size |= SDRAM_RXBAS_SDSZ_128;
2217 total_size = 128;
2218 break;
2219 case 0x40:
2220 baseadd_size |= SDRAM_RXBAS_SDSZ_256;
2221 total_size = 256;
2222 break;
2223 case 0x80:
2224 baseadd_size |= SDRAM_RXBAS_SDSZ_512;
2225 total_size = 512;
2226 break;
2227 default:
2228 printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
2229 (unsigned int)dimm_num);
2230 printf("ERROR: Unsupported value for the banksize: %d.\n",
2231 (unsigned int)rank_size_id);
2232 printf("Replace the DIMM module with a supported DIMM.\n\n");
2233 spd_ddr_init_hang ();
2234 }
2235 rank_size_bytes = total_size << 20;
2236
2237 if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
2238 bank_0_populated = 1;
2239
2240 for (i = 0; i < num_ranks; i++) {
2241 mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
2242 (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
2243 baseadd_size));
2244 rank_base_addr += rank_size_bytes;
2245 }
2246 }
2247 }
2248
2249 #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
2250 defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
2251 defined(CONFIG_460SX)
2252 /*
2253 * Enable high bandwidth access
2254 * This is currently not used, but with this setup
2255 * it is possible to use it later on in e.g. the Linux
2256 * EMAC driver for performance gain.
2257 */
2258 mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
2259 mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
2260
2261 /*
2262 * Set optimal value for Memory Queue HB/LL Configuration registers
2263 */
2264 mtdcr(SDRAM_CONF1HB, mfdcr(SDRAM_CONF1HB) | SDRAM_CONF1HB_AAFR |
2265 SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE);
2266 mtdcr(SDRAM_CONF1LL, mfdcr(SDRAM_CONF1LL) | SDRAM_CONF1LL_AAFR |
2267 SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE);
2268 mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
2269 #endif
2270 }
2271
2272 /*-----------------------------------------------------------------------------+
2273 * is_ecc_enabled.
2274 *-----------------------------------------------------------------------------*/
2275 static unsigned long is_ecc_enabled(void)
2276 {
2277 unsigned long dimm_num;
2278 unsigned long ecc;
2279 unsigned long val;
2280
2281 ecc = 0;
2282 /* loop through all the DIMM slots on the board */
2283 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2284 mfsdram(SDRAM_MCOPT1, val);
2285 ecc = max(ecc, SDRAM_MCOPT1_MCHK_CHK_DECODE(val));
2286 }
2287
2288 return ecc;
2289 }
2290
2291 #ifdef CONFIG_DDR_ECC
2292 /*-----------------------------------------------------------------------------+
2293 * program_ecc.
2294 *-----------------------------------------------------------------------------*/
2295 static void program_ecc(unsigned long *dimm_populated,
2296 unsigned char *iic0_dimm_addr,
2297 unsigned long num_dimm_banks,
2298 unsigned long tlb_word2_i_value)
2299 {
2300 unsigned long mcopt1;
2301 unsigned long mcopt2;
2302 unsigned long mcstat;
2303 unsigned long dimm_num;
2304 unsigned long ecc;
2305
2306 ecc = 0;
2307 /* loop through all the DIMM slots on the board */
2308 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2309 /* If a dimm is installed in a particular slot ... */
2310 if (dimm_populated[dimm_num] != SDRAM_NONE)
2311 ecc = max(ecc, spd_read(iic0_dimm_addr[dimm_num], 11));
2312 }
2313 if (ecc == 0)
2314 return;
2315
2316 if (sdram_memsize() > CONFIG_MAX_MEM_MAPPED) {
2317 printf("\nWarning: Can't enable ECC on systems with more than 2GB of SDRAM!\n");
2318 return;
2319 }
2320
2321 mfsdram(SDRAM_MCOPT1, mcopt1);
2322 mfsdram(SDRAM_MCOPT2, mcopt2);
2323
2324 if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
2325 /* DDR controller must be enabled and not in self-refresh. */
2326 mfsdram(SDRAM_MCSTAT, mcstat);
2327 if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
2328 && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
2329 && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
2330 == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
2331
2332 program_ecc_addr(0, sdram_memsize(), tlb_word2_i_value);
2333 }
2334 }
2335
2336 return;
2337 }
2338
2339 static void wait_ddr_idle(void)
2340 {
2341 u32 val;
2342
2343 do {
2344 mfsdram(SDRAM_MCSTAT, val);
2345 } while ((val & SDRAM_MCSTAT_IDLE_MASK) == SDRAM_MCSTAT_IDLE_NOT);
2346 }
2347
2348 /*-----------------------------------------------------------------------------+
2349 * program_ecc_addr.
2350 *-----------------------------------------------------------------------------*/
2351 static void program_ecc_addr(unsigned long start_address,
2352 unsigned long num_bytes,
2353 unsigned long tlb_word2_i_value)
2354 {
2355 unsigned long current_address;
2356 unsigned long end_address;
2357 unsigned long address_increment;
2358 unsigned long mcopt1;
2359 char str[] = "ECC generation -";
2360 char slash[] = "\\|/-\\|/-";
2361 int loop = 0;
2362 int loopi = 0;
2363
2364 current_address = start_address;
2365 mfsdram(SDRAM_MCOPT1, mcopt1);
2366 if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
2367 mtsdram(SDRAM_MCOPT1,
2368 (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_GEN);
2369 sync();
2370 eieio();
2371 wait_ddr_idle();
2372
2373 puts(str);
2374 if (tlb_word2_i_value == TLB_WORD2_I_ENABLE) {
2375 /* ECC bit set method for non-cached memory */
2376 if ((mcopt1 & SDRAM_MCOPT1_DMWD_MASK) == SDRAM_MCOPT1_DMWD_32)
2377 address_increment = 4;
2378 else
2379 address_increment = 8;
2380 end_address = current_address + num_bytes;
2381
2382 while (current_address < end_address) {
2383 *((unsigned long *)current_address) = 0x00000000;
2384 current_address += address_increment;
2385
2386 if ((loop++ % (2 << 20)) == 0) {
2387 putc('\b');
2388 putc(slash[loopi++ % 8]);
2389 }
2390 }
2391
2392 } else {
2393 /* ECC bit set method for cached memory */
2394 dcbz_area(start_address, num_bytes);
2395 /* Write modified dcache lines back to memory */
2396 clean_dcache_range(start_address, start_address + num_bytes);
2397 }
2398
2399 blank_string(strlen(str));
2400
2401 sync();
2402 eieio();
2403 wait_ddr_idle();
2404
2405 /* clear ECC error repoting registers */
2406 mtsdram(SDRAM_ECCCR, 0xffffffff);
2407 mtdcr(0x4c, 0xffffffff);
2408
2409 mtsdram(SDRAM_MCOPT1,
2410 (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_CHK_REP);
2411 sync();
2412 eieio();
2413 wait_ddr_idle();
2414 }
2415 }
2416 #endif
2417
2418 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
2419 /*-----------------------------------------------------------------------------+
2420 * program_DQS_calibration.
2421 *-----------------------------------------------------------------------------*/
2422 static void program_DQS_calibration(unsigned long *dimm_populated,
2423 unsigned char *iic0_dimm_addr,
2424 unsigned long num_dimm_banks)
2425 {
2426 unsigned long val;
2427
2428 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
2429 mtsdram(SDRAM_RQDC, 0x80000037);
2430 mtsdram(SDRAM_RDCC, 0x40000000);
2431 mtsdram(SDRAM_RFDC, 0x000001DF);
2432
2433 test();
2434 #else
2435 /*------------------------------------------------------------------
2436 * Program RDCC register
2437 * Read sample cycle auto-update enable
2438 *-----------------------------------------------------------------*/
2439
2440 mfsdram(SDRAM_RDCC, val);
2441 mtsdram(SDRAM_RDCC,
2442 (val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
2443 | SDRAM_RDCC_RSAE_ENABLE);
2444
2445 /*------------------------------------------------------------------
2446 * Program RQDC register
2447 * Internal DQS delay mechanism enable
2448 *-----------------------------------------------------------------*/
2449 mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
2450
2451 /*------------------------------------------------------------------
2452 * Program RFDC register
2453 * Set Feedback Fractional Oversample
2454 * Auto-detect read sample cycle enable
2455 * Set RFOS to 1/4 of memclk cycle (0x3f)
2456 *-----------------------------------------------------------------*/
2457 mfsdram(SDRAM_RFDC, val);
2458 mtsdram(SDRAM_RFDC,
2459 (val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
2460 SDRAM_RFDC_RFFD_MASK))
2461 | (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x3f) |
2462 SDRAM_RFDC_RFFD_ENCODE(0)));
2463
2464 DQS_calibration_process();
2465 #endif
2466 }
2467
2468 static int short_mem_test(void)
2469 {
2470 u32 *membase;
2471 u32 bxcr_num;
2472 u32 bxcf;
2473 int i;
2474 int j;
2475 phys_size_t base_addr;
2476 u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
2477 {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2478 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2479 {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2480 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2481 {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2482 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2483 {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2484 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2485 {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2486 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2487 {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2488 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2489 {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2490 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2491 {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2492 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2493 int l;
2494
2495 for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
2496 mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
2497
2498 /* Banks enabled */
2499 if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2500 /* Bank is enabled */
2501
2502 /*
2503 * Only run test on accessable memory (below 2GB)
2504 */
2505 base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
2506 if (base_addr >= CONFIG_MAX_MEM_MAPPED)
2507 continue;
2508
2509 /*------------------------------------------------------------------
2510 * Run the short memory test.
2511 *-----------------------------------------------------------------*/
2512 membase = (u32 *)(u32)base_addr;
2513
2514 for (i = 0; i < NUMMEMTESTS; i++) {
2515 for (j = 0; j < NUMMEMWORDS; j++) {
2516 membase[j] = test[i][j];
2517 ppcDcbf((u32)&(membase[j]));
2518 }
2519 sync();
2520 for (l=0; l<NUMLOOPS; l++) {
2521 for (j = 0; j < NUMMEMWORDS; j++) {
2522 if (membase[j] != test[i][j]) {
2523 ppcDcbf((u32)&(membase[j]));
2524 return 0;
2525 }
2526 ppcDcbf((u32)&(membase[j]));
2527 }
2528 sync();
2529 }
2530 }
2531 } /* if bank enabled */
2532 } /* for bxcf_num */
2533
2534 return 1;
2535 }
2536
2537 #ifndef HARD_CODED_DQS
2538 /*-----------------------------------------------------------------------------+
2539 * DQS_calibration_process.
2540 *-----------------------------------------------------------------------------*/
2541 static void DQS_calibration_process(void)
2542 {
2543 unsigned long rfdc_reg;
2544 unsigned long rffd;
2545 unsigned long val;
2546 long rffd_average;
2547 long max_start;
2548 long min_end;
2549 unsigned long begin_rqfd[MAXRANKS];
2550 unsigned long begin_rffd[MAXRANKS];
2551 unsigned long end_rqfd[MAXRANKS];
2552 unsigned long end_rffd[MAXRANKS];
2553 char window_found;
2554 unsigned long dlycal;
2555 unsigned long dly_val;
2556 unsigned long max_pass_length;
2557 unsigned long current_pass_length;
2558 unsigned long current_fail_length;
2559 unsigned long current_start;
2560 long max_end;
2561 unsigned char fail_found;
2562 unsigned char pass_found;
2563 #if !defined(CONFIG_DDR_RQDC_FIXED)
2564 u32 rqdc_reg;
2565 u32 rqfd;
2566 u32 rqfd_start;
2567 u32 rqfd_average;
2568 int loopi = 0;
2569 char str[] = "Auto calibration -";
2570 char slash[] = "\\|/-\\|/-";
2571
2572 /*------------------------------------------------------------------
2573 * Test to determine the best read clock delay tuning bits.
2574 *
2575 * Before the DDR controller can be used, the read clock delay needs to be
2576 * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2577 * This value cannot be hardcoded into the program because it changes
2578 * depending on the board's setup and environment.
2579 * To do this, all delay values are tested to see if they
2580 * work or not. By doing this, you get groups of fails with groups of
2581 * passing values. The idea is to find the start and end of a passing
2582 * window and take the center of it to use as the read clock delay.
2583 *
2584 * A failure has to be seen first so that when we hit a pass, we know
2585 * that it is truely the start of the window. If we get passing values
2586 * to start off with, we don't know if we are at the start of the window.
2587 *
2588 * The code assumes that a failure will always be found.
2589 * If a failure is not found, there is no easy way to get the middle
2590 * of the passing window. I guess we can pretty much pick any value
2591 * but some values will be better than others. Since the lowest speed
2592 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2593 * from experimentation it is safe to say you will always have a failure.
2594 *-----------------------------------------------------------------*/
2595
2596 /* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
2597 rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
2598
2599 puts(str);
2600
2601 calibration_loop:
2602 mfsdram(SDRAM_RQDC, rqdc_reg);
2603 mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2604 SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
2605 #else /* CONFIG_DDR_RQDC_FIXED */
2606 /*
2607 * On Katmai the complete auto-calibration somehow doesn't seem to
2608 * produce the best results, meaning optimal values for RQFD/RFFD.
2609 * This was discovered by GDA using a high bandwidth scope,
2610 * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
2611 * so now on Katmai "only" RFFD is auto-calibrated.
2612 */
2613 mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
2614 #endif /* CONFIG_DDR_RQDC_FIXED */
2615
2616 max_start = 0;
2617 min_end = 0;
2618 begin_rqfd[0] = 0;
2619 begin_rffd[0] = 0;
2620 begin_rqfd[1] = 0;
2621 begin_rffd[1] = 0;
2622 end_rqfd[0] = 0;
2623 end_rffd[0] = 0;
2624 end_rqfd[1] = 0;
2625 end_rffd[1] = 0;
2626 window_found = FALSE;
2627
2628 max_pass_length = 0;
2629 max_start = 0;
2630 max_end = 0;
2631 current_pass_length = 0;
2632 current_fail_length = 0;
2633 current_start = 0;
2634 window_found = FALSE;
2635 fail_found = FALSE;
2636 pass_found = FALSE;
2637
2638 /*
2639 * get the delay line calibration register value
2640 */
2641 mfsdram(SDRAM_DLCR, dlycal);
2642 dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
2643
2644 for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
2645 mfsdram(SDRAM_RFDC, rfdc_reg);
2646 rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
2647
2648 /*------------------------------------------------------------------
2649 * Set the timing reg for the test.
2650 *-----------------------------------------------------------------*/
2651 mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
2652
2653 /*------------------------------------------------------------------
2654 * See if the rffd value passed.
2655 *-----------------------------------------------------------------*/
2656 if (short_mem_test()) {
2657 if (fail_found == TRUE) {
2658 pass_found = TRUE;
2659 if (current_pass_length == 0)
2660 current_start = rffd;
2661
2662 current_fail_length = 0;
2663 current_pass_length++;
2664
2665 if (current_pass_length > max_pass_length) {
2666 max_pass_length = current_pass_length;
2667 max_start = current_start;
2668 max_end = rffd;
2669 }
2670 }
2671 } else {
2672 current_pass_length = 0;
2673 current_fail_length++;
2674
2675 if (current_fail_length >= (dly_val >> 2)) {
2676 if (fail_found == FALSE) {
2677 fail_found = TRUE;
2678 } else if (pass_found == TRUE) {
2679 window_found = TRUE;
2680 break;
2681 }
2682 }
2683 }
2684 } /* for rffd */
2685
2686 /*------------------------------------------------------------------
2687 * Set the average RFFD value
2688 *-----------------------------------------------------------------*/
2689 rffd_average = ((max_start + max_end) >> 1);
2690
2691 if (rffd_average < 0)
2692 rffd_average = 0;
2693
2694 if (rffd_average > SDRAM_RFDC_RFFD_MAX)
2695 rffd_average = SDRAM_RFDC_RFFD_MAX;
2696 /* now fix RFDC[RFFD] found and find RQDC[RQFD] */
2697 mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
2698
2699 #if !defined(CONFIG_DDR_RQDC_FIXED)
2700 max_pass_length = 0;
2701 max_start = 0;
2702 max_end = 0;
2703 current_pass_length = 0;
2704 current_fail_length = 0;
2705 current_start = 0;
2706 window_found = FALSE;
2707 fail_found = FALSE;
2708 pass_found = FALSE;
2709
2710 for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
2711 mfsdram(SDRAM_RQDC, rqdc_reg);
2712 rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
2713
2714 /*------------------------------------------------------------------
2715 * Set the timing reg for the test.
2716 *-----------------------------------------------------------------*/
2717 mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
2718
2719 /*------------------------------------------------------------------
2720 * See if the rffd value passed.
2721 *-----------------------------------------------------------------*/
2722 if (short_mem_test()) {
2723 if (fail_found == TRUE) {
2724 pass_found = TRUE;
2725 if (current_pass_length == 0)
2726 current_start = rqfd;
2727
2728 current_fail_length = 0;
2729 current_pass_length++;
2730
2731 if (current_pass_length > max_pass_length) {
2732 max_pass_length = current_pass_length;
2733 max_start = current_start;
2734 max_end = rqfd;
2735 }
2736 }
2737 } else {
2738 current_pass_length = 0;
2739 current_fail_length++;
2740
2741 if (fail_found == FALSE) {
2742 fail_found = TRUE;
2743 } else if (pass_found == TRUE) {
2744 window_found = TRUE;
2745 break;
2746 }
2747 }
2748 }
2749
2750 rqfd_average = ((max_start + max_end) >> 1);
2751
2752 /*------------------------------------------------------------------
2753 * Make sure we found the valid read passing window. Halt if not
2754 *-----------------------------------------------------------------*/
2755 if (window_found == FALSE) {
2756 if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
2757 putc('\b');
2758 putc(slash[loopi++ % 8]);
2759
2760 /* try again from with a different RQFD start value */
2761 rqfd_start++;
2762 goto calibration_loop;
2763 }
2764
2765 printf("\nERROR: Cannot determine a common read delay for the "
2766 "DIMM(s) installed.\n");
2767 debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
2768 ppc4xx_ibm_ddr2_register_dump();
2769 spd_ddr_init_hang ();
2770 }
2771
2772 if (rqfd_average < 0)
2773 rqfd_average = 0;
2774
2775 if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
2776 rqfd_average = SDRAM_RQDC_RQFD_MAX;
2777
2778 mtsdram(SDRAM_RQDC,
2779 (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2780 SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
2781
2782 blank_string(strlen(str));
2783 #endif /* CONFIG_DDR_RQDC_FIXED */
2784
2785 /*
2786 * Now complete RDSS configuration as mentioned on page 7 of the AMCC
2787 * PowerPC440SP/SPe DDR2 application note:
2788 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
2789 */
2790 mfsdram(SDRAM_RTSR, val);
2791 if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
2792 mfsdram(SDRAM_RDCC, val);
2793 if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
2794 val += 0x40000000;
2795 mtsdram(SDRAM_RDCC, val);
2796 }
2797 }
2798
2799 mfsdram(SDRAM_DLCR, val);
2800 debug("%s[%d] DLCR: 0x%08X\n", __FUNCTION__, __LINE__, val);
2801 mfsdram(SDRAM_RQDC, val);
2802 debug("%s[%d] RQDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
2803 mfsdram(SDRAM_RFDC, val);
2804 debug("%s[%d] RFDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
2805 mfsdram(SDRAM_RDCC, val);
2806 debug("%s[%d] RDCC: 0x%08X\n", __FUNCTION__, __LINE__, val);
2807 }
2808 #else /* calibration test with hardvalues */
2809 /*-----------------------------------------------------------------------------+
2810 * DQS_calibration_process.
2811 *-----------------------------------------------------------------------------*/
2812 static void test(void)
2813 {
2814 unsigned long dimm_num;
2815 unsigned long ecc_temp;
2816 unsigned long i, j;
2817 unsigned long *membase;
2818 unsigned long bxcf[MAXRANKS];
2819 unsigned long val;
2820 char window_found;
2821 char begin_found[MAXDIMMS];
2822 char end_found[MAXDIMMS];
2823 char search_end[MAXDIMMS];
2824 unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
2825 {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2826 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2827 {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2828 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2829 {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2830 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2831 {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2832 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2833 {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2834 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2835 {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2836 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2837 {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2838 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2839 {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2840 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2841
2842 /*------------------------------------------------------------------
2843 * Test to determine the best read clock delay tuning bits.
2844 *
2845 * Before the DDR controller can be used, the read clock delay needs to be
2846 * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2847 * This value cannot be hardcoded into the program because it changes
2848 * depending on the board's setup and environment.
2849 * To do this, all delay values are tested to see if they
2850 * work or not. By doing this, you get groups of fails with groups of
2851 * passing values. The idea is to find the start and end of a passing
2852 * window and take the center of it to use as the read clock delay.
2853 *
2854 * A failure has to be seen first so that when we hit a pass, we know
2855 * that it is truely the start of the window. If we get passing values
2856 * to start off with, we don't know if we are at the start of the window.
2857 *
2858 * The code assumes that a failure will always be found.
2859 * If a failure is not found, there is no easy way to get the middle
2860 * of the passing window. I guess we can pretty much pick any value
2861 * but some values will be better than others. Since the lowest speed
2862 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2863 * from experimentation it is safe to say you will always have a failure.
2864 *-----------------------------------------------------------------*/
2865 mfsdram(SDRAM_MCOPT1, ecc_temp);
2866 ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
2867 mfsdram(SDRAM_MCOPT1, val);
2868 mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
2869 SDRAM_MCOPT1_MCHK_NON);
2870
2871 window_found = FALSE;
2872 begin_found[0] = FALSE;
2873 end_found[0] = FALSE;
2874 search_end[0] = FALSE;
2875 begin_found[1] = FALSE;
2876 end_found[1] = FALSE;
2877 search_end[1] = FALSE;
2878
2879 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2880 mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
2881
2882 /* Banks enabled */
2883 if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2884
2885 /* Bank is enabled */
2886 membase =
2887 (unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
2888
2889 /*------------------------------------------------------------------
2890 * Run the short memory test.
2891 *-----------------------------------------------------------------*/
2892 for (i = 0; i < NUMMEMTESTS; i++) {
2893 for (j = 0; j < NUMMEMWORDS; j++) {
2894 membase[j] = test[i][j];
2895 ppcDcbf((u32)&(membase[j]));
2896 }
2897 sync();
2898 for (j = 0; j < NUMMEMWORDS; j++) {
2899 if (membase[j] != test[i][j]) {
2900 ppcDcbf((u32)&(membase[j]));
2901 break;
2902 }
2903 ppcDcbf((u32)&(membase[j]));
2904 }
2905 sync();
2906 if (j < NUMMEMWORDS)
2907 break;
2908 }
2909
2910 /*------------------------------------------------------------------
2911 * See if the rffd value passed.
2912 *-----------------------------------------------------------------*/
2913 if (i < NUMMEMTESTS) {
2914 if ((end_found[dimm_num] == FALSE) &&
2915 (search_end[dimm_num] == TRUE)) {
2916 end_found[dimm_num] = TRUE;
2917 }
2918 if ((end_found[0] == TRUE) &&
2919 (end_found[1] == TRUE))
2920 break;
2921 } else {
2922 if (begin_found[dimm_num] == FALSE) {
2923 begin_found[dimm_num] = TRUE;
2924 search_end[dimm_num] = TRUE;
2925 }
2926 }
2927 } else {
2928 begin_found[dimm_num] = TRUE;
2929 end_found[dimm_num] = TRUE;
2930 }
2931 }
2932
2933 if ((begin_found[0] == TRUE) && (begin_found[1] == TRUE))
2934 window_found = TRUE;
2935
2936 /*------------------------------------------------------------------
2937 * Make sure we found the valid read passing window. Halt if not
2938 *-----------------------------------------------------------------*/
2939 if (window_found == FALSE) {
2940 printf("ERROR: Cannot determine a common read delay for the "
2941 "DIMM(s) installed.\n");
2942 spd_ddr_init_hang ();
2943 }
2944
2945 /*------------------------------------------------------------------
2946 * Restore the ECC variable to what it originally was
2947 *-----------------------------------------------------------------*/
2948 mtsdram(SDRAM_MCOPT1,
2949 (ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
2950 | ecc_temp);
2951 }
2952 #endif /* !HARD_CODED_DQS */
2953 #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
2954
2955 #else /* CONFIG_SPD_EEPROM */
2956
2957 /*-----------------------------------------------------------------------------
2958 * Function: initdram
2959 * Description: Configures the PPC405EX(r) DDR1/DDR2 SDRAM memory
2960 * banks. The configuration is performed using static, compile-
2961 * time parameters.
2962 *---------------------------------------------------------------------------*/
2963 phys_size_t initdram(int board_type)
2964 {
2965 /*
2966 * Only run this SDRAM init code once. For NAND booting
2967 * targets like Kilauea, we call initdram() early from the
2968 * 4k NAND booting image (CONFIG_NAND_SPL) from nand_boot().
2969 * Later on the NAND U-Boot image runs (CONFIG_NAND_U_BOOT)
2970 * which calls initdram() again. This time the controller
2971 * mustn't be reconfigured again since we're already running
2972 * from SDRAM.
2973 */
2974 #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
2975 unsigned long val;
2976
2977 /* Set Memory Bank Configuration Registers */
2978
2979 mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
2980 mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
2981 mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
2982 mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
2983
2984 /* Set Memory Clock Timing Register */
2985
2986 mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
2987
2988 /* Set Refresh Time Register */
2989
2990 mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
2991
2992 /* Set SDRAM Timing Registers */
2993
2994 mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
2995 mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
2996 mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
2997
2998 /* Set Mode and Extended Mode Registers */
2999
3000 mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
3001 mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
3002
3003 /* Set Memory Controller Options 1 Register */
3004
3005 mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
3006
3007 /* Set Manual Initialization Control Registers */
3008
3009 mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
3010 mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
3011 mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
3012 mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
3013 mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
3014 mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
3015 mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
3016 mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
3017 mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
3018 mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
3019 mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
3020 mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
3021 mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
3022 mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
3023 mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
3024 mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
3025
3026 /* Set On-Die Termination Registers */
3027
3028 mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
3029 mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
3030 mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
3031
3032 /* Set Write Timing Register */
3033
3034 mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
3035
3036 /*
3037 * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
3038 * SDRAM0_MCOPT2[IPTR] = 1
3039 */
3040
3041 mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
3042 SDRAM_MCOPT2_IPTR_EXECUTE));
3043
3044 /*
3045 * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
3046 * completion of initialization.
3047 */
3048
3049 do {
3050 mfsdram(SDRAM_MCSTAT, val);
3051 } while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
3052
3053 /* Set Delay Control Registers */
3054
3055 mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
3056
3057 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3058 mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
3059 mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
3060 mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
3061 #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3062
3063 /*
3064 * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
3065 */
3066
3067 mfsdram(SDRAM_MCOPT2, val);
3068 mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
3069
3070 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3071 #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
3072 /*------------------------------------------------------------------
3073 | DQS calibration.
3074 +-----------------------------------------------------------------*/
3075 DQS_autocalibration();
3076 #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
3077 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3078
3079 #if defined(CONFIG_DDR_ECC)
3080 ecc_init(CONFIG_SYS_SDRAM_BASE, CONFIG_SYS_MBYTES_SDRAM << 20);
3081 #endif /* defined(CONFIG_DDR_ECC) */
3082
3083 ppc4xx_ibm_ddr2_register_dump();
3084
3085 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3086 /*
3087 * Clear potential errors resulting from auto-calibration.
3088 * If not done, then we could get an interrupt later on when
3089 * exceptions are enabled.
3090 */
3091 set_mcsr(get_mcsr());
3092 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3093
3094 #endif /* !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL) */
3095
3096 return (CONFIG_SYS_MBYTES_SDRAM << 20);
3097 }
3098 #endif /* CONFIG_SPD_EEPROM */
3099
3100 #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
3101 #if defined(CONFIG_440)
3102 u32 mfdcr_any(u32 dcr)
3103 {
3104 u32 val;
3105
3106 switch (dcr) {
3107 case SDRAM_R0BAS + 0:
3108 val = mfdcr(SDRAM_R0BAS + 0);
3109 break;
3110 case SDRAM_R0BAS + 1:
3111 val = mfdcr(SDRAM_R0BAS + 1);
3112 break;
3113 case SDRAM_R0BAS + 2:
3114 val = mfdcr(SDRAM_R0BAS + 2);
3115 break;
3116 case SDRAM_R0BAS + 3:
3117 val = mfdcr(SDRAM_R0BAS + 3);
3118 break;
3119 default:
3120 printf("DCR %d not defined in case statement!!!\n", dcr);
3121 val = 0; /* just to satisfy the compiler */
3122 }
3123
3124 return val;
3125 }
3126
3127 void mtdcr_any(u32 dcr, u32 val)
3128 {
3129 switch (dcr) {
3130 case SDRAM_R0BAS + 0:
3131 mtdcr(SDRAM_R0BAS + 0, val);
3132 break;
3133 case SDRAM_R0BAS + 1:
3134 mtdcr(SDRAM_R0BAS + 1, val);
3135 break;
3136 case SDRAM_R0BAS + 2:
3137 mtdcr(SDRAM_R0BAS + 2, val);
3138 break;
3139 case SDRAM_R0BAS + 3:
3140 mtdcr(SDRAM_R0BAS + 3, val);
3141 break;
3142 default:
3143 printf("DCR %d not defined in case statement!!!\n", dcr);
3144 }
3145 }
3146 #endif /* defined(CONFIG_440) */
3147
3148 void blank_string(int size)
3149 {
3150 int i;
3151
3152 for (i = 0; i < size; i++)
3153 putc('\b');
3154 for (i = 0; i < size; i++)
3155 putc(' ');
3156 for (i = 0; i < size; i++)
3157 putc('\b');
3158 }
3159 #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
3160
3161 inline void ppc4xx_ibm_ddr2_register_dump(void)
3162 {
3163 #if defined(DEBUG)
3164 printf("\nPPC4xx IBM DDR2 Register Dump:\n");
3165
3166 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3167 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3168 PPC4xx_IBM_DDR2_DUMP_REGISTER(R0BAS);
3169 PPC4xx_IBM_DDR2_DUMP_REGISTER(R1BAS);
3170 PPC4xx_IBM_DDR2_DUMP_REGISTER(R2BAS);
3171 PPC4xx_IBM_DDR2_DUMP_REGISTER(R3BAS);
3172 #endif /* (defined(CONFIG_440SP) || ... */
3173 #if defined(CONFIG_405EX)
3174 PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
3175 PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
3176 PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
3177 PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
3178 PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
3179 PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
3180 #endif /* defined(CONFIG_405EX) */
3181 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
3182 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
3183 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
3184 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
3185 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
3186 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
3187 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
3188 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
3189 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
3190 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
3191 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
3192 PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
3193 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3194 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3195 PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
3196 PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
3197 /*
3198 * OPART is only used as a trigger register.
3199 *
3200 * No data is contained in this register, and reading or writing
3201 * to is can cause bad things to happen (hangs). Just skip it and
3202 * report "N/A".
3203 */
3204 printf("%20s = N/A\n", "SDRAM_OPART");
3205 #endif /* defined(CONFIG_440SP) || ... */
3206 PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
3207 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
3208 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
3209 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
3210 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
3211 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
3212 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
3213 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
3214 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
3215 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
3216 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
3217 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
3218 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
3219 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
3220 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
3221 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
3222 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
3223 PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
3224 PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
3225 PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
3226 PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
3227 PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
3228 PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
3229 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
3230 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
3231 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
3232 PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
3233 PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
3234 PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCCR);
3235 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3236 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3237 PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
3238 #endif /* defined(CONFIG_440SP) || ... */
3239 PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
3240 PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
3241 PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
3242 #endif /* defined(DEBUG) */
3243 }
3244
3245 #endif /* CONFIG_SDRAM_PPC4xx_IBM_DDR2 */