]> git.ipfire.org Git - people/ms/u-boot.git/blob - cpu/ppc4xx/44x_spd_ddr2.c
Merge 'next' branch
[people/ms/u-boot.git] / cpu / ppc4xx / 44x_spd_ddr2.c
1 /*
2 * cpu/ppc4xx/44x_spd_ddr2.c
3 * This SPD SDRAM detection code supports AMCC PPC44x cpu's with a
4 * DDR2 controller (non Denali Core). Those currently are:
5 *
6 * 405: 405EX(r)
7 * 440/460: 440SP/440SPe/460EX/460GT
8 *
9 * Copyright (c) 2008 Nuovation System Designs, LLC
10 * Grant Erickson <gerickson@nuovations.com>
11
12 * (C) Copyright 2007-2008
13 * Stefan Roese, DENX Software Engineering, sr@denx.de.
14 *
15 * COPYRIGHT AMCC CORPORATION 2004
16 *
17 * See file CREDITS for list of people who contributed to this
18 * project.
19 *
20 * This program is free software; you can redistribute it and/or
21 * modify it under the terms of the GNU General Public License as
22 * published by the Free Software Foundation; either version 2 of
23 * the License, or (at your option) any later version.
24 *
25 * This program is distributed in the hope that it will be useful,
26 * but WITHOUT ANY WARRANTY; without even the implied warranty of
27 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
28 * GNU General Public License for more details.
29 *
30 * You should have received a copy of the GNU General Public License
31 * along with this program; if not, write to the Free Software
32 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
33 * MA 02111-1307 USA
34 *
35 */
36
37 /* define DEBUG for debugging output (obviously ;-)) */
38 #if 0
39 #define DEBUG
40 #endif
41
42 #include <common.h>
43 #include <command.h>
44 #include <ppc4xx.h>
45 #include <i2c.h>
46 #include <asm/io.h>
47 #include <asm/processor.h>
48 #include <asm/mmu.h>
49 #include <asm/cache.h>
50
51 #include "ecc.h"
52
53 #if defined(CONFIG_SDRAM_PPC4xx_IBM_DDR2)
54
55 #define PPC4xx_IBM_DDR2_DUMP_REGISTER(mnemonic) \
56 do { \
57 u32 data; \
58 mfsdram(SDRAM_##mnemonic, data); \
59 printf("%20s[%02x] = 0x%08X\n", \
60 "SDRAM_" #mnemonic, SDRAM_##mnemonic, data); \
61 } while (0)
62
63 #if defined(CONFIG_SPD_EEPROM)
64
65 /*-----------------------------------------------------------------------------+
66 * Defines
67 *-----------------------------------------------------------------------------*/
68 #ifndef TRUE
69 #define TRUE 1
70 #endif
71 #ifndef FALSE
72 #define FALSE 0
73 #endif
74
75 #define SDRAM_DDR1 1
76 #define SDRAM_DDR2 2
77 #define SDRAM_NONE 0
78
79 #define MAXDIMMS 2
80 #define MAXRANKS 4
81 #define MAXBXCF 4
82 #define MAX_SPD_BYTES 256 /* Max number of bytes on the DIMM's SPD EEPROM */
83
84 #define ONE_BILLION 1000000000
85
86 #define MULDIV64(m1, m2, d) (u32)(((u64)(m1) * (u64)(m2)) / (u64)(d))
87
88 #define CMD_NOP (7 << 19)
89 #define CMD_PRECHARGE (2 << 19)
90 #define CMD_REFRESH (1 << 19)
91 #define CMD_EMR (0 << 19)
92 #define CMD_READ (5 << 19)
93 #define CMD_WRITE (4 << 19)
94
95 #define SELECT_MR (0 << 16)
96 #define SELECT_EMR (1 << 16)
97 #define SELECT_EMR2 (2 << 16)
98 #define SELECT_EMR3 (3 << 16)
99
100 /* MR */
101 #define DLL_RESET 0x00000100
102
103 #define WRITE_RECOV_2 (1 << 9)
104 #define WRITE_RECOV_3 (2 << 9)
105 #define WRITE_RECOV_4 (3 << 9)
106 #define WRITE_RECOV_5 (4 << 9)
107 #define WRITE_RECOV_6 (5 << 9)
108
109 #define BURST_LEN_4 0x00000002
110
111 /* EMR */
112 #define ODT_0_OHM 0x00000000
113 #define ODT_50_OHM 0x00000044
114 #define ODT_75_OHM 0x00000004
115 #define ODT_150_OHM 0x00000040
116
117 #define ODS_FULL 0x00000000
118 #define ODS_REDUCED 0x00000002
119 #define OCD_CALIB_DEF 0x00000380
120
121 /* defines for ODT (On Die Termination) of the 440SP(e) DDR2 controller */
122 #define ODT_EB0R (0x80000000 >> 8)
123 #define ODT_EB0W (0x80000000 >> 7)
124 #define CALC_ODT_R(n) (ODT_EB0R << (n << 1))
125 #define CALC_ODT_W(n) (ODT_EB0W << (n << 1))
126 #define CALC_ODT_RW(n) (CALC_ODT_R(n) | CALC_ODT_W(n))
127
128 /* Defines for the Read Cycle Delay test */
129 #define NUMMEMTESTS 8
130 #define NUMMEMWORDS 8
131 #define NUMLOOPS 64 /* memory test loops */
132
133 /*
134 * This DDR2 setup code can dynamically setup the TLB entries for the DDR2 memory
135 * region. Right now the cache should still be disabled in U-Boot because of the
136 * EMAC driver, that need it's buffer descriptor to be located in non cached
137 * memory.
138 *
139 * If at some time this restriction doesn't apply anymore, just define
140 * CONFIG_4xx_DCACHE in the board config file and this code should setup
141 * everything correctly.
142 */
143 #ifdef CONFIG_4xx_DCACHE
144 #define MY_TLB_WORD2_I_ENABLE 0 /* enable caching on SDRAM */
145 #else
146 #define MY_TLB_WORD2_I_ENABLE TLB_WORD2_I_ENABLE /* disable caching on SDRAM */
147 #endif
148
149 /*
150 * Newer PPC's like 440SPe, 460EX/GT can be equipped with more than 2GB of SDRAM.
151 * To support such configurations, we "only" map the first 2GB via the TLB's. We
152 * need some free virtual address space for the remaining peripherals like, SoC
153 * devices, FLASH etc.
154 *
155 * Note that ECC is currently not supported on configurations with more than 2GB
156 * SDRAM. This is because we only map the first 2GB on such systems, and therefore
157 * the ECC parity byte of the remaining area can't be written.
158 */
159 #ifndef CONFIG_MAX_MEM_MAPPED
160 #define CONFIG_MAX_MEM_MAPPED ((phys_size_t)2 << 30)
161 #endif
162
163 /*
164 * Board-specific Platform code can reimplement spd_ddr_init_hang () if needed
165 */
166 void __spd_ddr_init_hang (void)
167 {
168 hang ();
169 }
170 void spd_ddr_init_hang (void) __attribute__((weak, alias("__spd_ddr_init_hang")));
171
172 /*
173 * To provide an interface for board specific config values in this common
174 * DDR setup code, we implement he "weak" default functions here. They return
175 * the default value back to the caller.
176 *
177 * Please see include/configs/yucca.h for an example fora board specific
178 * implementation.
179 */
180 u32 __ddr_wrdtr(u32 default_val)
181 {
182 return default_val;
183 }
184 u32 ddr_wrdtr(u32) __attribute__((weak, alias("__ddr_wrdtr")));
185
186 u32 __ddr_clktr(u32 default_val)
187 {
188 return default_val;
189 }
190 u32 ddr_clktr(u32) __attribute__((weak, alias("__ddr_clktr")));
191
192
193 /* Private Structure Definitions */
194
195 /* enum only to ease code for cas latency setting */
196 typedef enum ddr_cas_id {
197 DDR_CAS_2 = 20,
198 DDR_CAS_2_5 = 25,
199 DDR_CAS_3 = 30,
200 DDR_CAS_4 = 40,
201 DDR_CAS_5 = 50
202 } ddr_cas_id_t;
203
204 /*-----------------------------------------------------------------------------+
205 * Prototypes
206 *-----------------------------------------------------------------------------*/
207 static phys_size_t sdram_memsize(void);
208 static void get_spd_info(unsigned long *dimm_populated,
209 unsigned char *iic0_dimm_addr,
210 unsigned long num_dimm_banks);
211 static void check_mem_type(unsigned long *dimm_populated,
212 unsigned char *iic0_dimm_addr,
213 unsigned long num_dimm_banks);
214 static void check_frequency(unsigned long *dimm_populated,
215 unsigned char *iic0_dimm_addr,
216 unsigned long num_dimm_banks);
217 static void check_rank_number(unsigned long *dimm_populated,
218 unsigned char *iic0_dimm_addr,
219 unsigned long num_dimm_banks);
220 static void check_voltage_type(unsigned long *dimm_populated,
221 unsigned char *iic0_dimm_addr,
222 unsigned long num_dimm_banks);
223 static void program_memory_queue(unsigned long *dimm_populated,
224 unsigned char *iic0_dimm_addr,
225 unsigned long num_dimm_banks);
226 static void program_codt(unsigned long *dimm_populated,
227 unsigned char *iic0_dimm_addr,
228 unsigned long num_dimm_banks);
229 static void program_mode(unsigned long *dimm_populated,
230 unsigned char *iic0_dimm_addr,
231 unsigned long num_dimm_banks,
232 ddr_cas_id_t *selected_cas,
233 int *write_recovery);
234 static void program_tr(unsigned long *dimm_populated,
235 unsigned char *iic0_dimm_addr,
236 unsigned long num_dimm_banks);
237 static void program_rtr(unsigned long *dimm_populated,
238 unsigned char *iic0_dimm_addr,
239 unsigned long num_dimm_banks);
240 static void program_bxcf(unsigned long *dimm_populated,
241 unsigned char *iic0_dimm_addr,
242 unsigned long num_dimm_banks);
243 static void program_copt1(unsigned long *dimm_populated,
244 unsigned char *iic0_dimm_addr,
245 unsigned long num_dimm_banks);
246 static void program_initplr(unsigned long *dimm_populated,
247 unsigned char *iic0_dimm_addr,
248 unsigned long num_dimm_banks,
249 ddr_cas_id_t selected_cas,
250 int write_recovery);
251 static unsigned long is_ecc_enabled(void);
252 #ifdef CONFIG_DDR_ECC
253 static void program_ecc(unsigned long *dimm_populated,
254 unsigned char *iic0_dimm_addr,
255 unsigned long num_dimm_banks,
256 unsigned long tlb_word2_i_value);
257 static void program_ecc_addr(unsigned long start_address,
258 unsigned long num_bytes,
259 unsigned long tlb_word2_i_value);
260 #endif
261 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
262 static void program_DQS_calibration(unsigned long *dimm_populated,
263 unsigned char *iic0_dimm_addr,
264 unsigned long num_dimm_banks);
265 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
266 static void test(void);
267 #else
268 static void DQS_calibration_process(void);
269 #endif
270 #endif
271 int do_reset (cmd_tbl_t *cmdtp, int flag, int argc, char *argv[]);
272 void dcbz_area(u32 start_address, u32 num_bytes);
273
274 static unsigned char spd_read(uchar chip, uint addr)
275 {
276 unsigned char data[2];
277
278 if (i2c_probe(chip) == 0)
279 if (i2c_read(chip, addr, 1, data, 1) == 0)
280 return data[0];
281
282 return 0;
283 }
284
285 /*-----------------------------------------------------------------------------+
286 * sdram_memsize
287 *-----------------------------------------------------------------------------*/
288 static phys_size_t sdram_memsize(void)
289 {
290 phys_size_t mem_size;
291 unsigned long mcopt2;
292 unsigned long mcstat;
293 unsigned long mb0cf;
294 unsigned long sdsz;
295 unsigned long i;
296
297 mem_size = 0;
298
299 mfsdram(SDRAM_MCOPT2, mcopt2);
300 mfsdram(SDRAM_MCSTAT, mcstat);
301
302 /* DDR controller must be enabled and not in self-refresh. */
303 /* Otherwise memsize is zero. */
304 if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
305 && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
306 && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
307 == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
308 for (i = 0; i < MAXBXCF; i++) {
309 mfsdram(SDRAM_MB0CF + (i << 2), mb0cf);
310 /* Banks enabled */
311 if ((mb0cf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
312 sdsz = mfdcr_any(SDRAM_R0BAS + i) & SDRAM_RXBAS_SDSZ_MASK;
313
314 switch(sdsz) {
315 case SDRAM_RXBAS_SDSZ_8:
316 mem_size+=8;
317 break;
318 case SDRAM_RXBAS_SDSZ_16:
319 mem_size+=16;
320 break;
321 case SDRAM_RXBAS_SDSZ_32:
322 mem_size+=32;
323 break;
324 case SDRAM_RXBAS_SDSZ_64:
325 mem_size+=64;
326 break;
327 case SDRAM_RXBAS_SDSZ_128:
328 mem_size+=128;
329 break;
330 case SDRAM_RXBAS_SDSZ_256:
331 mem_size+=256;
332 break;
333 case SDRAM_RXBAS_SDSZ_512:
334 mem_size+=512;
335 break;
336 case SDRAM_RXBAS_SDSZ_1024:
337 mem_size+=1024;
338 break;
339 case SDRAM_RXBAS_SDSZ_2048:
340 mem_size+=2048;
341 break;
342 case SDRAM_RXBAS_SDSZ_4096:
343 mem_size+=4096;
344 break;
345 default:
346 printf("WARNING: Unsupported bank size (SDSZ=0x%lx)!\n"
347 , sdsz);
348 mem_size=0;
349 break;
350 }
351 }
352 }
353 }
354
355 return mem_size << 20;
356 }
357
358 /*-----------------------------------------------------------------------------+
359 * initdram. Initializes the 440SP Memory Queue and DDR SDRAM controller.
360 * Note: This routine runs from flash with a stack set up in the chip's
361 * sram space. It is important that the routine does not require .sbss, .bss or
362 * .data sections. It also cannot call routines that require these sections.
363 *-----------------------------------------------------------------------------*/
364 /*-----------------------------------------------------------------------------
365 * Function: initdram
366 * Description: Configures SDRAM memory banks for DDR operation.
367 * Auto Memory Configuration option reads the DDR SDRAM EEPROMs
368 * via the IIC bus and then configures the DDR SDRAM memory
369 * banks appropriately. If Auto Memory Configuration is
370 * not used, it is assumed that no DIMM is plugged
371 *-----------------------------------------------------------------------------*/
372 phys_size_t initdram(int board_type)
373 {
374 unsigned char iic0_dimm_addr[] = SPD_EEPROM_ADDRESS;
375 unsigned char spd0[MAX_SPD_BYTES];
376 unsigned char spd1[MAX_SPD_BYTES];
377 unsigned char *dimm_spd[MAXDIMMS];
378 unsigned long dimm_populated[MAXDIMMS];
379 unsigned long num_dimm_banks; /* on board dimm banks */
380 unsigned long val;
381 ddr_cas_id_t selected_cas = DDR_CAS_5; /* preset to silence compiler */
382 int write_recovery;
383 phys_size_t dram_size = 0;
384
385 num_dimm_banks = sizeof(iic0_dimm_addr);
386
387 /*------------------------------------------------------------------
388 * Set up an array of SPD matrixes.
389 *-----------------------------------------------------------------*/
390 dimm_spd[0] = spd0;
391 dimm_spd[1] = spd1;
392
393 /*------------------------------------------------------------------
394 * Reset the DDR-SDRAM controller.
395 *-----------------------------------------------------------------*/
396 mtsdr(SDR0_SRST, (0x80000000 >> 10));
397 mtsdr(SDR0_SRST, 0x00000000);
398
399 /*
400 * Make sure I2C controller is initialized
401 * before continuing.
402 */
403
404 /* switch to correct I2C bus */
405 I2C_SET_BUS(CONFIG_SYS_SPD_BUS_NUM);
406 i2c_init(CONFIG_SYS_I2C_SPEED, CONFIG_SYS_I2C_SLAVE);
407
408 /*------------------------------------------------------------------
409 * Clear out the serial presence detect buffers.
410 * Perform IIC reads from the dimm. Fill in the spds.
411 * Check to see if the dimm slots are populated
412 *-----------------------------------------------------------------*/
413 get_spd_info(dimm_populated, iic0_dimm_addr, num_dimm_banks);
414
415 /*------------------------------------------------------------------
416 * Check the memory type for the dimms plugged.
417 *-----------------------------------------------------------------*/
418 check_mem_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
419
420 /*------------------------------------------------------------------
421 * Check the frequency supported for the dimms plugged.
422 *-----------------------------------------------------------------*/
423 check_frequency(dimm_populated, iic0_dimm_addr, num_dimm_banks);
424
425 /*------------------------------------------------------------------
426 * Check the total rank number.
427 *-----------------------------------------------------------------*/
428 check_rank_number(dimm_populated, iic0_dimm_addr, num_dimm_banks);
429
430 /*------------------------------------------------------------------
431 * Check the voltage type for the dimms plugged.
432 *-----------------------------------------------------------------*/
433 check_voltage_type(dimm_populated, iic0_dimm_addr, num_dimm_banks);
434
435 /*------------------------------------------------------------------
436 * Program SDRAM controller options 2 register
437 * Except Enabling of the memory controller.
438 *-----------------------------------------------------------------*/
439 mfsdram(SDRAM_MCOPT2, val);
440 mtsdram(SDRAM_MCOPT2,
441 (val &
442 ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_PMEN_MASK |
443 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_XSRP_MASK |
444 SDRAM_MCOPT2_ISIE_MASK))
445 | (SDRAM_MCOPT2_SREN_ENTER | SDRAM_MCOPT2_PMEN_DISABLE |
446 SDRAM_MCOPT2_IPTR_IDLE | SDRAM_MCOPT2_XSRP_ALLOW |
447 SDRAM_MCOPT2_ISIE_ENABLE));
448
449 /*------------------------------------------------------------------
450 * Program SDRAM controller options 1 register
451 * Note: Does not enable the memory controller.
452 *-----------------------------------------------------------------*/
453 program_copt1(dimm_populated, iic0_dimm_addr, num_dimm_banks);
454
455 /*------------------------------------------------------------------
456 * Set the SDRAM Controller On Die Termination Register
457 *-----------------------------------------------------------------*/
458 program_codt(dimm_populated, iic0_dimm_addr, num_dimm_banks);
459
460 /*------------------------------------------------------------------
461 * Program SDRAM refresh register.
462 *-----------------------------------------------------------------*/
463 program_rtr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
464
465 /*------------------------------------------------------------------
466 * Program SDRAM mode register.
467 *-----------------------------------------------------------------*/
468 program_mode(dimm_populated, iic0_dimm_addr, num_dimm_banks,
469 &selected_cas, &write_recovery);
470
471 /*------------------------------------------------------------------
472 * Set the SDRAM Write Data/DM/DQS Clock Timing Reg
473 *-----------------------------------------------------------------*/
474 mfsdram(SDRAM_WRDTR, val);
475 mtsdram(SDRAM_WRDTR, (val & ~(SDRAM_WRDTR_LLWP_MASK | SDRAM_WRDTR_WTR_MASK)) |
476 ddr_wrdtr(SDRAM_WRDTR_LLWP_1_CYC | SDRAM_WRDTR_WTR_90_DEG_ADV));
477
478 /*------------------------------------------------------------------
479 * Set the SDRAM Clock Timing Register
480 *-----------------------------------------------------------------*/
481 mfsdram(SDRAM_CLKTR, val);
482 mtsdram(SDRAM_CLKTR, (val & ~SDRAM_CLKTR_CLKP_MASK) |
483 ddr_clktr(SDRAM_CLKTR_CLKP_0_DEG));
484
485 /*------------------------------------------------------------------
486 * Program the BxCF registers.
487 *-----------------------------------------------------------------*/
488 program_bxcf(dimm_populated, iic0_dimm_addr, num_dimm_banks);
489
490 /*------------------------------------------------------------------
491 * Program SDRAM timing registers.
492 *-----------------------------------------------------------------*/
493 program_tr(dimm_populated, iic0_dimm_addr, num_dimm_banks);
494
495 /*------------------------------------------------------------------
496 * Set the Extended Mode register
497 *-----------------------------------------------------------------*/
498 mfsdram(SDRAM_MEMODE, val);
499 mtsdram(SDRAM_MEMODE,
500 (val & ~(SDRAM_MEMODE_DIC_MASK | SDRAM_MEMODE_DLL_MASK |
501 SDRAM_MEMODE_RTT_MASK | SDRAM_MEMODE_DQS_MASK)) |
502 (SDRAM_MEMODE_DIC_NORMAL | SDRAM_MEMODE_DLL_ENABLE
503 | SDRAM_MEMODE_RTT_150OHM | SDRAM_MEMODE_DQS_ENABLE));
504
505 /*------------------------------------------------------------------
506 * Program Initialization preload registers.
507 *-----------------------------------------------------------------*/
508 program_initplr(dimm_populated, iic0_dimm_addr, num_dimm_banks,
509 selected_cas, write_recovery);
510
511 /*------------------------------------------------------------------
512 * Delay to ensure 200usec have elapsed since reset.
513 *-----------------------------------------------------------------*/
514 udelay(400);
515
516 /*------------------------------------------------------------------
517 * Set the memory queue core base addr.
518 *-----------------------------------------------------------------*/
519 program_memory_queue(dimm_populated, iic0_dimm_addr, num_dimm_banks);
520
521 /*------------------------------------------------------------------
522 * Program SDRAM controller options 2 register
523 * Enable the memory controller.
524 *-----------------------------------------------------------------*/
525 mfsdram(SDRAM_MCOPT2, val);
526 mtsdram(SDRAM_MCOPT2,
527 (val & ~(SDRAM_MCOPT2_SREN_MASK | SDRAM_MCOPT2_DCEN_MASK |
528 SDRAM_MCOPT2_IPTR_MASK | SDRAM_MCOPT2_ISIE_MASK)) |
529 SDRAM_MCOPT2_IPTR_EXECUTE);
530
531 /*------------------------------------------------------------------
532 * Wait for IPTR_EXECUTE init sequence to complete.
533 *-----------------------------------------------------------------*/
534 do {
535 mfsdram(SDRAM_MCSTAT, val);
536 } while ((val & SDRAM_MCSTAT_MIC_MASK) == SDRAM_MCSTAT_MIC_NOTCOMP);
537
538 /* enable the controller only after init sequence completes */
539 mfsdram(SDRAM_MCOPT2, val);
540 mtsdram(SDRAM_MCOPT2, (val | SDRAM_MCOPT2_DCEN_ENABLE));
541
542 /* Make sure delay-line calibration is done before proceeding */
543 do {
544 mfsdram(SDRAM_DLCR, val);
545 } while (!(val & SDRAM_DLCR_DLCS_COMPLETE));
546
547 /* get installed memory size */
548 dram_size = sdram_memsize();
549
550 /*
551 * Limit size to 2GB
552 */
553 if (dram_size > CONFIG_MAX_MEM_MAPPED)
554 dram_size = CONFIG_MAX_MEM_MAPPED;
555
556 /* and program tlb entries for this size (dynamic) */
557
558 /*
559 * Program TLB entries with caches enabled, for best performace
560 * while auto-calibrating and ECC generation
561 */
562 program_tlb(0, 0, dram_size, 0);
563
564 /*------------------------------------------------------------------
565 * DQS calibration.
566 *-----------------------------------------------------------------*/
567 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
568 DQS_autocalibration();
569 #else
570 program_DQS_calibration(dimm_populated, iic0_dimm_addr, num_dimm_banks);
571 #endif
572
573 #ifdef CONFIG_DDR_ECC
574 /*------------------------------------------------------------------
575 * If ecc is enabled, initialize the parity bits.
576 *-----------------------------------------------------------------*/
577 program_ecc(dimm_populated, iic0_dimm_addr, num_dimm_banks, 0);
578 #endif
579
580 /*
581 * Now after initialization (auto-calibration and ECC generation)
582 * remove the TLB entries with caches enabled and program again with
583 * desired cache functionality
584 */
585 remove_tlb(0, dram_size);
586 program_tlb(0, 0, dram_size, MY_TLB_WORD2_I_ENABLE);
587
588 ppc4xx_ibm_ddr2_register_dump();
589
590 /*
591 * Clear potential errors resulting from auto-calibration.
592 * If not done, then we could get an interrupt later on when
593 * exceptions are enabled.
594 */
595 set_mcsr(get_mcsr());
596
597 return sdram_memsize();
598 }
599
600 static void get_spd_info(unsigned long *dimm_populated,
601 unsigned char *iic0_dimm_addr,
602 unsigned long num_dimm_banks)
603 {
604 unsigned long dimm_num;
605 unsigned long dimm_found;
606 unsigned char num_of_bytes;
607 unsigned char total_size;
608
609 dimm_found = FALSE;
610 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
611 num_of_bytes = 0;
612 total_size = 0;
613
614 num_of_bytes = spd_read(iic0_dimm_addr[dimm_num], 0);
615 debug("\nspd_read(0x%x) returned %d\n",
616 iic0_dimm_addr[dimm_num], num_of_bytes);
617 total_size = spd_read(iic0_dimm_addr[dimm_num], 1);
618 debug("spd_read(0x%x) returned %d\n",
619 iic0_dimm_addr[dimm_num], total_size);
620
621 if ((num_of_bytes != 0) && (total_size != 0)) {
622 dimm_populated[dimm_num] = TRUE;
623 dimm_found = TRUE;
624 debug("DIMM slot %lu: populated\n", dimm_num);
625 } else {
626 dimm_populated[dimm_num] = FALSE;
627 debug("DIMM slot %lu: Not populated\n", dimm_num);
628 }
629 }
630
631 if (dimm_found == FALSE) {
632 printf("ERROR - No memory installed. Install a DDR-SDRAM DIMM.\n\n");
633 spd_ddr_init_hang ();
634 }
635 }
636
637 void board_add_ram_info(int use_default)
638 {
639 PPC4xx_SYS_INFO board_cfg;
640 u32 val;
641
642 if (is_ecc_enabled())
643 puts(" (ECC");
644 else
645 puts(" (ECC not");
646
647 get_sys_info(&board_cfg);
648
649 mfsdr(SDR0_DDR0, val);
650 val = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(val), 1);
651 printf(" enabled, %d MHz", (val * 2) / 1000000);
652
653 mfsdram(SDRAM_MMODE, val);
654 val = (val & SDRAM_MMODE_DCL_MASK) >> 4;
655 printf(", CL%d)", val);
656 }
657
658 /*------------------------------------------------------------------
659 * For the memory DIMMs installed, this routine verifies that they
660 * really are DDR specific DIMMs.
661 *-----------------------------------------------------------------*/
662 static void check_mem_type(unsigned long *dimm_populated,
663 unsigned char *iic0_dimm_addr,
664 unsigned long num_dimm_banks)
665 {
666 unsigned long dimm_num;
667 unsigned long dimm_type;
668
669 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
670 if (dimm_populated[dimm_num] == TRUE) {
671 dimm_type = spd_read(iic0_dimm_addr[dimm_num], 2);
672 switch (dimm_type) {
673 case 1:
674 printf("ERROR: Standard Fast Page Mode DRAM DIMM detected in "
675 "slot %d.\n", (unsigned int)dimm_num);
676 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
677 printf("Replace the DIMM module with a supported DIMM.\n\n");
678 spd_ddr_init_hang ();
679 break;
680 case 2:
681 printf("ERROR: EDO DIMM detected in slot %d.\n",
682 (unsigned int)dimm_num);
683 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
684 printf("Replace the DIMM module with a supported DIMM.\n\n");
685 spd_ddr_init_hang ();
686 break;
687 case 3:
688 printf("ERROR: Pipelined Nibble DIMM detected in slot %d.\n",
689 (unsigned int)dimm_num);
690 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
691 printf("Replace the DIMM module with a supported DIMM.\n\n");
692 spd_ddr_init_hang ();
693 break;
694 case 4:
695 printf("ERROR: SDRAM DIMM detected in slot %d.\n",
696 (unsigned int)dimm_num);
697 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
698 printf("Replace the DIMM module with a supported DIMM.\n\n");
699 spd_ddr_init_hang ();
700 break;
701 case 5:
702 printf("ERROR: Multiplexed ROM DIMM detected in slot %d.\n",
703 (unsigned int)dimm_num);
704 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
705 printf("Replace the DIMM module with a supported DIMM.\n\n");
706 spd_ddr_init_hang ();
707 break;
708 case 6:
709 printf("ERROR: SGRAM DIMM detected in slot %d.\n",
710 (unsigned int)dimm_num);
711 printf("Only DDR and DDR2 SDRAM DIMMs are supported.\n");
712 printf("Replace the DIMM module with a supported DIMM.\n\n");
713 spd_ddr_init_hang ();
714 break;
715 case 7:
716 debug("DIMM slot %d: DDR1 SDRAM detected\n", dimm_num);
717 dimm_populated[dimm_num] = SDRAM_DDR1;
718 break;
719 case 8:
720 debug("DIMM slot %d: DDR2 SDRAM detected\n", dimm_num);
721 dimm_populated[dimm_num] = SDRAM_DDR2;
722 break;
723 default:
724 printf("ERROR: Unknown DIMM detected in slot %d.\n",
725 (unsigned int)dimm_num);
726 printf("Only DDR1 and DDR2 SDRAM DIMMs are supported.\n");
727 printf("Replace the DIMM module with a supported DIMM.\n\n");
728 spd_ddr_init_hang ();
729 break;
730 }
731 }
732 }
733 for (dimm_num = 1; dimm_num < num_dimm_banks; dimm_num++) {
734 if ((dimm_populated[dimm_num-1] != SDRAM_NONE)
735 && (dimm_populated[dimm_num] != SDRAM_NONE)
736 && (dimm_populated[dimm_num-1] != dimm_populated[dimm_num])) {
737 printf("ERROR: DIMM's DDR1 and DDR2 type can not be mixed.\n");
738 spd_ddr_init_hang ();
739 }
740 }
741 }
742
743 /*------------------------------------------------------------------
744 * For the memory DIMMs installed, this routine verifies that
745 * frequency previously calculated is supported.
746 *-----------------------------------------------------------------*/
747 static void check_frequency(unsigned long *dimm_populated,
748 unsigned char *iic0_dimm_addr,
749 unsigned long num_dimm_banks)
750 {
751 unsigned long dimm_num;
752 unsigned long tcyc_reg;
753 unsigned long cycle_time;
754 unsigned long calc_cycle_time;
755 unsigned long sdram_freq;
756 unsigned long sdr_ddrpll;
757 PPC4xx_SYS_INFO board_cfg;
758
759 /*------------------------------------------------------------------
760 * Get the board configuration info.
761 *-----------------------------------------------------------------*/
762 get_sys_info(&board_cfg);
763
764 mfsdr(SDR0_DDR0, sdr_ddrpll);
765 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
766
767 /*
768 * calc_cycle_time is calculated from DDR frequency set by board/chip
769 * and is expressed in multiple of 10 picoseconds
770 * to match the way DIMM cycle time is calculated below.
771 */
772 calc_cycle_time = MULDIV64(ONE_BILLION, 100, sdram_freq);
773
774 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
775 if (dimm_populated[dimm_num] != SDRAM_NONE) {
776 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
777 /*
778 * Byte 9, Cycle time for CAS Latency=X, is split into two nibbles:
779 * the higher order nibble (bits 4-7) designates the cycle time
780 * to a granularity of 1ns;
781 * the value presented by the lower order nibble (bits 0-3)
782 * has a granularity of .1ns and is added to the value designated
783 * by the higher nibble. In addition, four lines of the lower order
784 * nibble are assigned to support +.25,+.33, +.66 and +.75.
785 */
786 /* Convert from hex to decimal */
787 if ((tcyc_reg & 0x0F) == 0x0D)
788 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
789 else if ((tcyc_reg & 0x0F) == 0x0C)
790 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 66;
791 else if ((tcyc_reg & 0x0F) == 0x0B)
792 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 33;
793 else if ((tcyc_reg & 0x0F) == 0x0A)
794 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) + 25;
795 else
796 cycle_time = (((tcyc_reg & 0xF0) >> 4) * 100) +
797 ((tcyc_reg & 0x0F)*10);
798 debug("cycle_time=%d [10 picoseconds]\n", cycle_time);
799
800 if (cycle_time > (calc_cycle_time + 10)) {
801 /*
802 * the provided sdram cycle_time is too small
803 * for the available DIMM cycle_time.
804 * The additionnal 100ps is here to accept a small incertainty.
805 */
806 printf("ERROR: DRAM DIMM detected with cycle_time %d ps in "
807 "slot %d \n while calculated cycle time is %d ps.\n",
808 (unsigned int)(cycle_time*10),
809 (unsigned int)dimm_num,
810 (unsigned int)(calc_cycle_time*10));
811 printf("Replace the DIMM, or change DDR frequency via "
812 "strapping bits.\n\n");
813 spd_ddr_init_hang ();
814 }
815 }
816 }
817 }
818
819 /*------------------------------------------------------------------
820 * For the memory DIMMs installed, this routine verifies two
821 * ranks/banks maximum are availables.
822 *-----------------------------------------------------------------*/
823 static void check_rank_number(unsigned long *dimm_populated,
824 unsigned char *iic0_dimm_addr,
825 unsigned long num_dimm_banks)
826 {
827 unsigned long dimm_num;
828 unsigned long dimm_rank;
829 unsigned long total_rank = 0;
830
831 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
832 if (dimm_populated[dimm_num] != SDRAM_NONE) {
833 dimm_rank = spd_read(iic0_dimm_addr[dimm_num], 5);
834 if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
835 dimm_rank = (dimm_rank & 0x0F) +1;
836 else
837 dimm_rank = dimm_rank & 0x0F;
838
839
840 if (dimm_rank > MAXRANKS) {
841 printf("ERROR: DRAM DIMM detected with %lu ranks in "
842 "slot %lu is not supported.\n", dimm_rank, dimm_num);
843 printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
844 printf("Replace the DIMM module with a supported DIMM.\n\n");
845 spd_ddr_init_hang ();
846 } else
847 total_rank += dimm_rank;
848 }
849 if (total_rank > MAXRANKS) {
850 printf("ERROR: DRAM DIMM detected with a total of %d ranks "
851 "for all slots.\n", (unsigned int)total_rank);
852 printf("Only %d ranks are supported for all DIMM.\n", MAXRANKS);
853 printf("Remove one of the DIMM modules.\n\n");
854 spd_ddr_init_hang ();
855 }
856 }
857 }
858
859 /*------------------------------------------------------------------
860 * only support 2.5V modules.
861 * This routine verifies this.
862 *-----------------------------------------------------------------*/
863 static void check_voltage_type(unsigned long *dimm_populated,
864 unsigned char *iic0_dimm_addr,
865 unsigned long num_dimm_banks)
866 {
867 unsigned long dimm_num;
868 unsigned long voltage_type;
869
870 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
871 if (dimm_populated[dimm_num] != SDRAM_NONE) {
872 voltage_type = spd_read(iic0_dimm_addr[dimm_num], 8);
873 switch (voltage_type) {
874 case 0x00:
875 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
876 printf("This DIMM is 5.0 Volt/TTL.\n");
877 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
878 (unsigned int)dimm_num);
879 spd_ddr_init_hang ();
880 break;
881 case 0x01:
882 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
883 printf("This DIMM is LVTTL.\n");
884 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
885 (unsigned int)dimm_num);
886 spd_ddr_init_hang ();
887 break;
888 case 0x02:
889 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
890 printf("This DIMM is 1.5 Volt.\n");
891 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
892 (unsigned int)dimm_num);
893 spd_ddr_init_hang ();
894 break;
895 case 0x03:
896 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
897 printf("This DIMM is 3.3 Volt/TTL.\n");
898 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
899 (unsigned int)dimm_num);
900 spd_ddr_init_hang ();
901 break;
902 case 0x04:
903 /* 2.5 Voltage only for DDR1 */
904 break;
905 case 0x05:
906 /* 1.8 Voltage only for DDR2 */
907 break;
908 default:
909 printf("ERROR: Only DIMMs DDR 2.5V or DDR2 1.8V are supported.\n");
910 printf("Replace the DIMM module in slot %d with a supported DIMM.\n\n",
911 (unsigned int)dimm_num);
912 spd_ddr_init_hang ();
913 break;
914 }
915 }
916 }
917 }
918
919 /*-----------------------------------------------------------------------------+
920 * program_copt1.
921 *-----------------------------------------------------------------------------*/
922 static void program_copt1(unsigned long *dimm_populated,
923 unsigned char *iic0_dimm_addr,
924 unsigned long num_dimm_banks)
925 {
926 unsigned long dimm_num;
927 unsigned long mcopt1;
928 unsigned long ecc_enabled;
929 unsigned long ecc = 0;
930 unsigned long data_width = 0;
931 unsigned long dimm_32bit;
932 unsigned long dimm_64bit;
933 unsigned long registered = 0;
934 unsigned long attribute = 0;
935 unsigned long buf0, buf1; /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
936 unsigned long bankcount;
937 unsigned long ddrtype;
938 unsigned long val;
939
940 #ifdef CONFIG_DDR_ECC
941 ecc_enabled = TRUE;
942 #else
943 ecc_enabled = FALSE;
944 #endif
945 dimm_32bit = FALSE;
946 dimm_64bit = FALSE;
947 buf0 = FALSE;
948 buf1 = FALSE;
949
950 /*------------------------------------------------------------------
951 * Set memory controller options reg 1, SDRAM_MCOPT1.
952 *-----------------------------------------------------------------*/
953 mfsdram(SDRAM_MCOPT1, val);
954 mcopt1 = val & ~(SDRAM_MCOPT1_MCHK_MASK | SDRAM_MCOPT1_RDEN_MASK |
955 SDRAM_MCOPT1_PMU_MASK | SDRAM_MCOPT1_DMWD_MASK |
956 SDRAM_MCOPT1_UIOS_MASK | SDRAM_MCOPT1_BCNT_MASK |
957 SDRAM_MCOPT1_DDR_TYPE_MASK | SDRAM_MCOPT1_RWOO_MASK |
958 SDRAM_MCOPT1_WOOO_MASK | SDRAM_MCOPT1_DCOO_MASK |
959 SDRAM_MCOPT1_DREF_MASK);
960
961 mcopt1 |= SDRAM_MCOPT1_QDEP;
962 mcopt1 |= SDRAM_MCOPT1_PMU_OPEN;
963 mcopt1 |= SDRAM_MCOPT1_RWOO_DISABLED;
964 mcopt1 |= SDRAM_MCOPT1_WOOO_DISABLED;
965 mcopt1 |= SDRAM_MCOPT1_DCOO_DISABLED;
966 mcopt1 |= SDRAM_MCOPT1_DREF_NORMAL;
967
968 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
969 if (dimm_populated[dimm_num] != SDRAM_NONE) {
970 /* test ecc support */
971 ecc = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 11);
972 if (ecc != 0x02) /* ecc not supported */
973 ecc_enabled = FALSE;
974
975 /* test bank count */
976 bankcount = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 17);
977 if (bankcount == 0x04) /* bank count = 4 */
978 mcopt1 |= SDRAM_MCOPT1_4_BANKS;
979 else /* bank count = 8 */
980 mcopt1 |= SDRAM_MCOPT1_8_BANKS;
981
982 /* test DDR type */
983 ddrtype = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2);
984 /* test for buffered/unbuffered, registered, differential clocks */
985 registered = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 20);
986 attribute = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 21);
987
988 /* TODO: code to be changed for IOP1.6 to support 4 DIMMs */
989 if (dimm_num == 0) {
990 if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
991 mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
992 if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
993 mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
994 if (registered == 1) { /* DDR2 always buffered */
995 /* TODO: what about above comments ? */
996 mcopt1 |= SDRAM_MCOPT1_RDEN;
997 buf0 = TRUE;
998 } else {
999 /* TODO: the mask 0x02 doesn't match Samsung def for byte 21. */
1000 if ((attribute & 0x02) == 0x00) {
1001 /* buffered not supported */
1002 buf0 = FALSE;
1003 } else {
1004 mcopt1 |= SDRAM_MCOPT1_RDEN;
1005 buf0 = TRUE;
1006 }
1007 }
1008 }
1009 else if (dimm_num == 1) {
1010 if (dimm_populated[dimm_num] == SDRAM_DDR1) /* DDR1 type */
1011 mcopt1 |= SDRAM_MCOPT1_DDR1_TYPE;
1012 if (dimm_populated[dimm_num] == SDRAM_DDR2) /* DDR2 type */
1013 mcopt1 |= SDRAM_MCOPT1_DDR2_TYPE;
1014 if (registered == 1) {
1015 /* DDR2 always buffered */
1016 mcopt1 |= SDRAM_MCOPT1_RDEN;
1017 buf1 = TRUE;
1018 } else {
1019 if ((attribute & 0x02) == 0x00) {
1020 /* buffered not supported */
1021 buf1 = FALSE;
1022 } else {
1023 mcopt1 |= SDRAM_MCOPT1_RDEN;
1024 buf1 = TRUE;
1025 }
1026 }
1027 }
1028
1029 /* Note that for DDR2 the byte 7 is reserved, but OK to keep code as is. */
1030 data_width = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 6) +
1031 (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 7)) << 8);
1032
1033 switch (data_width) {
1034 case 72:
1035 case 64:
1036 dimm_64bit = TRUE;
1037 break;
1038 case 40:
1039 case 32:
1040 dimm_32bit = TRUE;
1041 break;
1042 default:
1043 printf("WARNING: Detected a DIMM with a data width of %lu bits.\n",
1044 data_width);
1045 printf("Only DIMMs with 32 or 64 bit DDR-SDRAM widths are supported.\n");
1046 break;
1047 }
1048 }
1049 }
1050
1051 /* verify matching properties */
1052 if ((dimm_populated[0] != SDRAM_NONE) && (dimm_populated[1] != SDRAM_NONE)) {
1053 if (buf0 != buf1) {
1054 printf("ERROR: DIMM's buffered/unbuffered, registered, clocking don't match.\n");
1055 spd_ddr_init_hang ();
1056 }
1057 }
1058
1059 if ((dimm_64bit == TRUE) && (dimm_32bit == TRUE)) {
1060 printf("ERROR: Cannot mix 32 bit and 64 bit DDR-SDRAM DIMMs together.\n");
1061 spd_ddr_init_hang ();
1062 }
1063 else if ((dimm_64bit == TRUE) && (dimm_32bit == FALSE)) {
1064 mcopt1 |= SDRAM_MCOPT1_DMWD_64;
1065 } else if ((dimm_64bit == FALSE) && (dimm_32bit == TRUE)) {
1066 mcopt1 |= SDRAM_MCOPT1_DMWD_32;
1067 } else {
1068 printf("ERROR: Please install only 32 or 64 bit DDR-SDRAM DIMMs.\n\n");
1069 spd_ddr_init_hang ();
1070 }
1071
1072 if (ecc_enabled == TRUE)
1073 mcopt1 |= SDRAM_MCOPT1_MCHK_GEN;
1074 else
1075 mcopt1 |= SDRAM_MCOPT1_MCHK_NON;
1076
1077 mtsdram(SDRAM_MCOPT1, mcopt1);
1078 }
1079
1080 /*-----------------------------------------------------------------------------+
1081 * program_codt.
1082 *-----------------------------------------------------------------------------*/
1083 static void program_codt(unsigned long *dimm_populated,
1084 unsigned char *iic0_dimm_addr,
1085 unsigned long num_dimm_banks)
1086 {
1087 unsigned long codt;
1088 unsigned long modt0 = 0;
1089 unsigned long modt1 = 0;
1090 unsigned long modt2 = 0;
1091 unsigned long modt3 = 0;
1092 unsigned char dimm_num;
1093 unsigned char dimm_rank;
1094 unsigned char total_rank = 0;
1095 unsigned char total_dimm = 0;
1096 unsigned char dimm_type = 0;
1097 unsigned char firstSlot = 0;
1098
1099 /*------------------------------------------------------------------
1100 * Set the SDRAM Controller On Die Termination Register
1101 *-----------------------------------------------------------------*/
1102 mfsdram(SDRAM_CODT, codt);
1103 codt |= (SDRAM_CODT_IO_NMODE
1104 & (~SDRAM_CODT_DQS_SINGLE_END
1105 & ~SDRAM_CODT_CKSE_SINGLE_END
1106 & ~SDRAM_CODT_FEEBBACK_RCV_SINGLE_END
1107 & ~SDRAM_CODT_FEEBBACK_DRV_SINGLE_END));
1108
1109 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1110 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1111 dimm_rank = (unsigned long)spd_read(iic0_dimm_addr[dimm_num], 5);
1112 if (((unsigned long)spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08) {
1113 dimm_rank = (dimm_rank & 0x0F) + 1;
1114 dimm_type = SDRAM_DDR2;
1115 } else {
1116 dimm_rank = dimm_rank & 0x0F;
1117 dimm_type = SDRAM_DDR1;
1118 }
1119
1120 total_rank += dimm_rank;
1121 total_dimm++;
1122 if ((dimm_num == 0) && (total_dimm == 1))
1123 firstSlot = TRUE;
1124 else
1125 firstSlot = FALSE;
1126 }
1127 }
1128 if (dimm_type == SDRAM_DDR2) {
1129 codt |= SDRAM_CODT_DQS_1_8_V_DDR2;
1130 if ((total_dimm == 1) && (firstSlot == TRUE)) {
1131 if (total_rank == 1) { /* PUUU */
1132 codt |= CALC_ODT_R(0);
1133 modt0 = CALC_ODT_W(0);
1134 modt1 = 0x00000000;
1135 modt2 = 0x00000000;
1136 modt3 = 0x00000000;
1137 }
1138 if (total_rank == 2) { /* PPUU */
1139 codt |= CALC_ODT_R(0) | CALC_ODT_R(1);
1140 modt0 = CALC_ODT_W(0) | CALC_ODT_W(1);
1141 modt1 = 0x00000000;
1142 modt2 = 0x00000000;
1143 modt3 = 0x00000000;
1144 }
1145 } else if ((total_dimm == 1) && (firstSlot != TRUE)) {
1146 if (total_rank == 1) { /* UUPU */
1147 codt |= CALC_ODT_R(2);
1148 modt0 = 0x00000000;
1149 modt1 = 0x00000000;
1150 modt2 = CALC_ODT_W(2);
1151 modt3 = 0x00000000;
1152 }
1153 if (total_rank == 2) { /* UUPP */
1154 codt |= CALC_ODT_R(2) | CALC_ODT_R(3);
1155 modt0 = 0x00000000;
1156 modt1 = 0x00000000;
1157 modt2 = CALC_ODT_W(2) | CALC_ODT_W(3);
1158 modt3 = 0x00000000;
1159 }
1160 }
1161 if (total_dimm == 2) {
1162 if (total_rank == 2) { /* PUPU */
1163 codt |= CALC_ODT_R(0) | CALC_ODT_R(2);
1164 modt0 = CALC_ODT_RW(2);
1165 modt1 = 0x00000000;
1166 modt2 = CALC_ODT_RW(0);
1167 modt3 = 0x00000000;
1168 }
1169 if (total_rank == 4) { /* PPPP */
1170 codt |= CALC_ODT_R(0) | CALC_ODT_R(1) |
1171 CALC_ODT_R(2) | CALC_ODT_R(3);
1172 modt0 = CALC_ODT_RW(2) | CALC_ODT_RW(3);
1173 modt1 = 0x00000000;
1174 modt2 = CALC_ODT_RW(0) | CALC_ODT_RW(1);
1175 modt3 = 0x00000000;
1176 }
1177 }
1178 } else {
1179 codt |= SDRAM_CODT_DQS_2_5_V_DDR1;
1180 modt0 = 0x00000000;
1181 modt1 = 0x00000000;
1182 modt2 = 0x00000000;
1183 modt3 = 0x00000000;
1184
1185 if (total_dimm == 1) {
1186 if (total_rank == 1)
1187 codt |= 0x00800000;
1188 if (total_rank == 2)
1189 codt |= 0x02800000;
1190 }
1191 if (total_dimm == 2) {
1192 if (total_rank == 2)
1193 codt |= 0x08800000;
1194 if (total_rank == 4)
1195 codt |= 0x2a800000;
1196 }
1197 }
1198
1199 debug("nb of dimm %d\n", total_dimm);
1200 debug("nb of rank %d\n", total_rank);
1201 if (total_dimm == 1)
1202 debug("dimm in slot %d\n", firstSlot);
1203
1204 mtsdram(SDRAM_CODT, codt);
1205 mtsdram(SDRAM_MODT0, modt0);
1206 mtsdram(SDRAM_MODT1, modt1);
1207 mtsdram(SDRAM_MODT2, modt2);
1208 mtsdram(SDRAM_MODT3, modt3);
1209 }
1210
1211 /*-----------------------------------------------------------------------------+
1212 * program_initplr.
1213 *-----------------------------------------------------------------------------*/
1214 static void program_initplr(unsigned long *dimm_populated,
1215 unsigned char *iic0_dimm_addr,
1216 unsigned long num_dimm_banks,
1217 ddr_cas_id_t selected_cas,
1218 int write_recovery)
1219 {
1220 u32 cas = 0;
1221 u32 odt = 0;
1222 u32 ods = 0;
1223 u32 mr;
1224 u32 wr;
1225 u32 emr;
1226 u32 emr2;
1227 u32 emr3;
1228 int dimm_num;
1229 int total_dimm = 0;
1230
1231 /******************************************************
1232 ** Assumption: if more than one DIMM, all DIMMs are the same
1233 ** as already checked in check_memory_type
1234 ******************************************************/
1235
1236 if ((dimm_populated[0] == SDRAM_DDR1) || (dimm_populated[1] == SDRAM_DDR1)) {
1237 mtsdram(SDRAM_INITPLR0, 0x81B80000);
1238 mtsdram(SDRAM_INITPLR1, 0x81900400);
1239 mtsdram(SDRAM_INITPLR2, 0x81810000);
1240 mtsdram(SDRAM_INITPLR3, 0xff800162);
1241 mtsdram(SDRAM_INITPLR4, 0x81900400);
1242 mtsdram(SDRAM_INITPLR5, 0x86080000);
1243 mtsdram(SDRAM_INITPLR6, 0x86080000);
1244 mtsdram(SDRAM_INITPLR7, 0x81000062);
1245 } else if ((dimm_populated[0] == SDRAM_DDR2) || (dimm_populated[1] == SDRAM_DDR2)) {
1246 switch (selected_cas) {
1247 case DDR_CAS_3:
1248 cas = 3 << 4;
1249 break;
1250 case DDR_CAS_4:
1251 cas = 4 << 4;
1252 break;
1253 case DDR_CAS_5:
1254 cas = 5 << 4;
1255 break;
1256 default:
1257 printf("ERROR: ucode error on selected_cas value %d", selected_cas);
1258 spd_ddr_init_hang ();
1259 break;
1260 }
1261
1262 #if 0
1263 /*
1264 * ToDo - Still a problem with the write recovery:
1265 * On the Corsair CM2X512-5400C4 module, setting write recovery
1266 * in the INITPLR reg to the value calculated in program_mode()
1267 * results in not correctly working DDR2 memory (crash after
1268 * relocation).
1269 *
1270 * So for now, set the write recovery to 3. This seems to work
1271 * on the Corair module too.
1272 *
1273 * 2007-03-01, sr
1274 */
1275 switch (write_recovery) {
1276 case 3:
1277 wr = WRITE_RECOV_3;
1278 break;
1279 case 4:
1280 wr = WRITE_RECOV_4;
1281 break;
1282 case 5:
1283 wr = WRITE_RECOV_5;
1284 break;
1285 case 6:
1286 wr = WRITE_RECOV_6;
1287 break;
1288 default:
1289 printf("ERROR: write recovery not support (%d)", write_recovery);
1290 spd_ddr_init_hang ();
1291 break;
1292 }
1293 #else
1294 wr = WRITE_RECOV_3; /* test-only, see description above */
1295 #endif
1296
1297 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++)
1298 if (dimm_populated[dimm_num] != SDRAM_NONE)
1299 total_dimm++;
1300 if (total_dimm == 1) {
1301 odt = ODT_150_OHM;
1302 ods = ODS_FULL;
1303 } else if (total_dimm == 2) {
1304 odt = ODT_75_OHM;
1305 ods = ODS_REDUCED;
1306 } else {
1307 printf("ERROR: Unsupported number of DIMM's (%d)", total_dimm);
1308 spd_ddr_init_hang ();
1309 }
1310
1311 mr = CMD_EMR | SELECT_MR | BURST_LEN_4 | wr | cas;
1312 emr = CMD_EMR | SELECT_EMR | odt | ods;
1313 emr2 = CMD_EMR | SELECT_EMR2;
1314 emr3 = CMD_EMR | SELECT_EMR3;
1315 /* NOP - Wait 106 MemClk cycles */
1316 mtsdram(SDRAM_INITPLR0, SDRAM_INITPLR_ENABLE | CMD_NOP |
1317 SDRAM_INITPLR_IMWT_ENCODE(106));
1318 udelay(1000);
1319 /* precharge 4 MemClk cycles */
1320 mtsdram(SDRAM_INITPLR1, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1321 SDRAM_INITPLR_IMWT_ENCODE(4));
1322 /* EMR2 - Wait tMRD (2 MemClk cycles) */
1323 mtsdram(SDRAM_INITPLR2, SDRAM_INITPLR_ENABLE | emr2 |
1324 SDRAM_INITPLR_IMWT_ENCODE(2));
1325 /* EMR3 - Wait tMRD (2 MemClk cycles) */
1326 mtsdram(SDRAM_INITPLR3, SDRAM_INITPLR_ENABLE | emr3 |
1327 SDRAM_INITPLR_IMWT_ENCODE(2));
1328 /* EMR DLL ENABLE - Wait tMRD (2 MemClk cycles) */
1329 mtsdram(SDRAM_INITPLR4, SDRAM_INITPLR_ENABLE | emr |
1330 SDRAM_INITPLR_IMWT_ENCODE(2));
1331 /* MR w/ DLL reset - 200 cycle wait for DLL reset */
1332 mtsdram(SDRAM_INITPLR5, SDRAM_INITPLR_ENABLE | mr | DLL_RESET |
1333 SDRAM_INITPLR_IMWT_ENCODE(200));
1334 udelay(1000);
1335 /* precharge 4 MemClk cycles */
1336 mtsdram(SDRAM_INITPLR6, SDRAM_INITPLR_ENABLE | CMD_PRECHARGE |
1337 SDRAM_INITPLR_IMWT_ENCODE(4));
1338 /* Refresh 25 MemClk cycles */
1339 mtsdram(SDRAM_INITPLR7, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1340 SDRAM_INITPLR_IMWT_ENCODE(25));
1341 /* Refresh 25 MemClk cycles */
1342 mtsdram(SDRAM_INITPLR8, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1343 SDRAM_INITPLR_IMWT_ENCODE(25));
1344 /* Refresh 25 MemClk cycles */
1345 mtsdram(SDRAM_INITPLR9, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1346 SDRAM_INITPLR_IMWT_ENCODE(25));
1347 /* Refresh 25 MemClk cycles */
1348 mtsdram(SDRAM_INITPLR10, SDRAM_INITPLR_ENABLE | CMD_REFRESH |
1349 SDRAM_INITPLR_IMWT_ENCODE(25));
1350 /* MR w/o DLL reset - Wait tMRD (2 MemClk cycles) */
1351 mtsdram(SDRAM_INITPLR11, SDRAM_INITPLR_ENABLE | mr |
1352 SDRAM_INITPLR_IMWT_ENCODE(2));
1353 /* EMR OCD Default - Wait tMRD (2 MemClk cycles) */
1354 mtsdram(SDRAM_INITPLR12, SDRAM_INITPLR_ENABLE | OCD_CALIB_DEF |
1355 SDRAM_INITPLR_IMWT_ENCODE(2) | emr);
1356 /* EMR OCD Exit */
1357 mtsdram(SDRAM_INITPLR13, SDRAM_INITPLR_ENABLE | emr |
1358 SDRAM_INITPLR_IMWT_ENCODE(2));
1359 } else {
1360 printf("ERROR: ucode error as unknown DDR type in program_initplr");
1361 spd_ddr_init_hang ();
1362 }
1363 }
1364
1365 /*------------------------------------------------------------------
1366 * This routine programs the SDRAM_MMODE register.
1367 * the selected_cas is an output parameter, that will be passed
1368 * by caller to call the above program_initplr( )
1369 *-----------------------------------------------------------------*/
1370 static void program_mode(unsigned long *dimm_populated,
1371 unsigned char *iic0_dimm_addr,
1372 unsigned long num_dimm_banks,
1373 ddr_cas_id_t *selected_cas,
1374 int *write_recovery)
1375 {
1376 unsigned long dimm_num;
1377 unsigned long sdram_ddr1;
1378 unsigned long t_wr_ns;
1379 unsigned long t_wr_clk;
1380 unsigned long cas_bit;
1381 unsigned long cas_index;
1382 unsigned long sdram_freq;
1383 unsigned long ddr_check;
1384 unsigned long mmode;
1385 unsigned long tcyc_reg;
1386 unsigned long cycle_2_0_clk;
1387 unsigned long cycle_2_5_clk;
1388 unsigned long cycle_3_0_clk;
1389 unsigned long cycle_4_0_clk;
1390 unsigned long cycle_5_0_clk;
1391 unsigned long max_2_0_tcyc_ns_x_100;
1392 unsigned long max_2_5_tcyc_ns_x_100;
1393 unsigned long max_3_0_tcyc_ns_x_100;
1394 unsigned long max_4_0_tcyc_ns_x_100;
1395 unsigned long max_5_0_tcyc_ns_x_100;
1396 unsigned long cycle_time_ns_x_100[3];
1397 PPC4xx_SYS_INFO board_cfg;
1398 unsigned char cas_2_0_available;
1399 unsigned char cas_2_5_available;
1400 unsigned char cas_3_0_available;
1401 unsigned char cas_4_0_available;
1402 unsigned char cas_5_0_available;
1403 unsigned long sdr_ddrpll;
1404
1405 /*------------------------------------------------------------------
1406 * Get the board configuration info.
1407 *-----------------------------------------------------------------*/
1408 get_sys_info(&board_cfg);
1409
1410 mfsdr(SDR0_DDR0, sdr_ddrpll);
1411 sdram_freq = MULDIV64((board_cfg.freqPLB), SDR0_DDR0_DDRM_DECODE(sdr_ddrpll), 1);
1412 debug("sdram_freq=%d\n", sdram_freq);
1413
1414 /*------------------------------------------------------------------
1415 * Handle the timing. We need to find the worst case timing of all
1416 * the dimm modules installed.
1417 *-----------------------------------------------------------------*/
1418 t_wr_ns = 0;
1419 cas_2_0_available = TRUE;
1420 cas_2_5_available = TRUE;
1421 cas_3_0_available = TRUE;
1422 cas_4_0_available = TRUE;
1423 cas_5_0_available = TRUE;
1424 max_2_0_tcyc_ns_x_100 = 10;
1425 max_2_5_tcyc_ns_x_100 = 10;
1426 max_3_0_tcyc_ns_x_100 = 10;
1427 max_4_0_tcyc_ns_x_100 = 10;
1428 max_5_0_tcyc_ns_x_100 = 10;
1429 sdram_ddr1 = TRUE;
1430
1431 /* loop through all the DIMM slots on the board */
1432 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1433 /* If a dimm is installed in a particular slot ... */
1434 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1435 if (dimm_populated[dimm_num] == SDRAM_DDR1)
1436 sdram_ddr1 = TRUE;
1437 else
1438 sdram_ddr1 = FALSE;
1439
1440 /* t_wr_ns = max(t_wr_ns, (unsigned long)dimm_spd[dimm_num][36] >> 2); */ /* not used in this loop. */
1441 cas_bit = spd_read(iic0_dimm_addr[dimm_num], 18);
1442 debug("cas_bit[SPD byte 18]=%02x\n", cas_bit);
1443
1444 /* For a particular DIMM, grab the three CAS values it supports */
1445 for (cas_index = 0; cas_index < 3; cas_index++) {
1446 switch (cas_index) {
1447 case 0:
1448 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 9);
1449 break;
1450 case 1:
1451 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 23);
1452 break;
1453 default:
1454 tcyc_reg = spd_read(iic0_dimm_addr[dimm_num], 25);
1455 break;
1456 }
1457
1458 if ((tcyc_reg & 0x0F) >= 10) {
1459 if ((tcyc_reg & 0x0F) == 0x0D) {
1460 /* Convert from hex to decimal */
1461 cycle_time_ns_x_100[cas_index] =
1462 (((tcyc_reg & 0xF0) >> 4) * 100) + 75;
1463 } else {
1464 printf("ERROR: SPD reported Tcyc is incorrect for DIMM "
1465 "in slot %d\n", (unsigned int)dimm_num);
1466 spd_ddr_init_hang ();
1467 }
1468 } else {
1469 /* Convert from hex to decimal */
1470 cycle_time_ns_x_100[cas_index] =
1471 (((tcyc_reg & 0xF0) >> 4) * 100) +
1472 ((tcyc_reg & 0x0F)*10);
1473 }
1474 debug("cas_index=%d: cycle_time_ns_x_100=%d\n", cas_index,
1475 cycle_time_ns_x_100[cas_index]);
1476 }
1477
1478 /* The rest of this routine determines if CAS 2.0, 2.5, 3.0, 4.0 and 5.0 are */
1479 /* supported for a particular DIMM. */
1480 cas_index = 0;
1481
1482 if (sdram_ddr1) {
1483 /*
1484 * DDR devices use the following bitmask for CAS latency:
1485 * Bit 7 6 5 4 3 2 1 0
1486 * TBD 4.0 3.5 3.0 2.5 2.0 1.5 1.0
1487 */
1488 if (((cas_bit & 0x40) == 0x40) && (cas_index < 3) &&
1489 (cycle_time_ns_x_100[cas_index] != 0)) {
1490 max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1491 cycle_time_ns_x_100[cas_index]);
1492 cas_index++;
1493 } else {
1494 if (cas_index != 0)
1495 cas_index++;
1496 cas_4_0_available = FALSE;
1497 }
1498
1499 if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1500 (cycle_time_ns_x_100[cas_index] != 0)) {
1501 max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1502 cycle_time_ns_x_100[cas_index]);
1503 cas_index++;
1504 } else {
1505 if (cas_index != 0)
1506 cas_index++;
1507 cas_3_0_available = FALSE;
1508 }
1509
1510 if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1511 (cycle_time_ns_x_100[cas_index] != 0)) {
1512 max_2_5_tcyc_ns_x_100 = max(max_2_5_tcyc_ns_x_100,
1513 cycle_time_ns_x_100[cas_index]);
1514 cas_index++;
1515 } else {
1516 if (cas_index != 0)
1517 cas_index++;
1518 cas_2_5_available = FALSE;
1519 }
1520
1521 if (((cas_bit & 0x04) == 0x04) && (cas_index < 3) &&
1522 (cycle_time_ns_x_100[cas_index] != 0)) {
1523 max_2_0_tcyc_ns_x_100 = max(max_2_0_tcyc_ns_x_100,
1524 cycle_time_ns_x_100[cas_index]);
1525 cas_index++;
1526 } else {
1527 if (cas_index != 0)
1528 cas_index++;
1529 cas_2_0_available = FALSE;
1530 }
1531 } else {
1532 /*
1533 * DDR2 devices use the following bitmask for CAS latency:
1534 * Bit 7 6 5 4 3 2 1 0
1535 * TBD 6.0 5.0 4.0 3.0 2.0 TBD TBD
1536 */
1537 if (((cas_bit & 0x20) == 0x20) && (cas_index < 3) &&
1538 (cycle_time_ns_x_100[cas_index] != 0)) {
1539 max_5_0_tcyc_ns_x_100 = max(max_5_0_tcyc_ns_x_100,
1540 cycle_time_ns_x_100[cas_index]);
1541 cas_index++;
1542 } else {
1543 if (cas_index != 0)
1544 cas_index++;
1545 cas_5_0_available = FALSE;
1546 }
1547
1548 if (((cas_bit & 0x10) == 0x10) && (cas_index < 3) &&
1549 (cycle_time_ns_x_100[cas_index] != 0)) {
1550 max_4_0_tcyc_ns_x_100 = max(max_4_0_tcyc_ns_x_100,
1551 cycle_time_ns_x_100[cas_index]);
1552 cas_index++;
1553 } else {
1554 if (cas_index != 0)
1555 cas_index++;
1556 cas_4_0_available = FALSE;
1557 }
1558
1559 if (((cas_bit & 0x08) == 0x08) && (cas_index < 3) &&
1560 (cycle_time_ns_x_100[cas_index] != 0)) {
1561 max_3_0_tcyc_ns_x_100 = max(max_3_0_tcyc_ns_x_100,
1562 cycle_time_ns_x_100[cas_index]);
1563 cas_index++;
1564 } else {
1565 if (cas_index != 0)
1566 cas_index++;
1567 cas_3_0_available = FALSE;
1568 }
1569 }
1570 }
1571 }
1572
1573 /*------------------------------------------------------------------
1574 * Set the SDRAM mode, SDRAM_MMODE
1575 *-----------------------------------------------------------------*/
1576 mfsdram(SDRAM_MMODE, mmode);
1577 mmode = mmode & ~(SDRAM_MMODE_WR_MASK | SDRAM_MMODE_DCL_MASK);
1578
1579 /* add 10 here because of rounding problems */
1580 cycle_2_0_clk = MULDIV64(ONE_BILLION, 100, max_2_0_tcyc_ns_x_100) + 10;
1581 cycle_2_5_clk = MULDIV64(ONE_BILLION, 100, max_2_5_tcyc_ns_x_100) + 10;
1582 cycle_3_0_clk = MULDIV64(ONE_BILLION, 100, max_3_0_tcyc_ns_x_100) + 10;
1583 cycle_4_0_clk = MULDIV64(ONE_BILLION, 100, max_4_0_tcyc_ns_x_100) + 10;
1584 cycle_5_0_clk = MULDIV64(ONE_BILLION, 100, max_5_0_tcyc_ns_x_100) + 10;
1585 debug("cycle_3_0_clk=%d\n", cycle_3_0_clk);
1586 debug("cycle_4_0_clk=%d\n", cycle_4_0_clk);
1587 debug("cycle_5_0_clk=%d\n", cycle_5_0_clk);
1588
1589 if (sdram_ddr1 == TRUE) { /* DDR1 */
1590 if ((cas_2_0_available == TRUE) && (sdram_freq <= cycle_2_0_clk)) {
1591 mmode |= SDRAM_MMODE_DCL_DDR1_2_0_CLK;
1592 *selected_cas = DDR_CAS_2;
1593 } else if ((cas_2_5_available == TRUE) && (sdram_freq <= cycle_2_5_clk)) {
1594 mmode |= SDRAM_MMODE_DCL_DDR1_2_5_CLK;
1595 *selected_cas = DDR_CAS_2_5;
1596 } else if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
1597 mmode |= SDRAM_MMODE_DCL_DDR1_3_0_CLK;
1598 *selected_cas = DDR_CAS_3;
1599 } else {
1600 printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1601 printf("Only DIMMs DDR1 with CAS latencies of 2.0, 2.5, and 3.0 are supported.\n");
1602 printf("Make sure the PLB speed is within the supported range of the DIMMs.\n\n");
1603 spd_ddr_init_hang ();
1604 }
1605 } else { /* DDR2 */
1606 debug("cas_3_0_available=%d\n", cas_3_0_available);
1607 debug("cas_4_0_available=%d\n", cas_4_0_available);
1608 debug("cas_5_0_available=%d\n", cas_5_0_available);
1609 if ((cas_3_0_available == TRUE) && (sdram_freq <= cycle_3_0_clk)) {
1610 mmode |= SDRAM_MMODE_DCL_DDR2_3_0_CLK;
1611 *selected_cas = DDR_CAS_3;
1612 } else if ((cas_4_0_available == TRUE) && (sdram_freq <= cycle_4_0_clk)) {
1613 mmode |= SDRAM_MMODE_DCL_DDR2_4_0_CLK;
1614 *selected_cas = DDR_CAS_4;
1615 } else if ((cas_5_0_available == TRUE) && (sdram_freq <= cycle_5_0_clk)) {
1616 mmode |= SDRAM_MMODE_DCL_DDR2_5_0_CLK;
1617 *selected_cas = DDR_CAS_5;
1618 } else {
1619 printf("ERROR: Cannot find a supported CAS latency with the installed DIMMs.\n");
1620 printf("Only DIMMs DDR2 with CAS latencies of 3.0, 4.0, and 5.0 are supported.\n");
1621 printf("Make sure the PLB speed is within the supported range of the DIMMs.\n");
1622 printf("cas3=%d cas4=%d cas5=%d\n",
1623 cas_3_0_available, cas_4_0_available, cas_5_0_available);
1624 printf("sdram_freq=%lu cycle3=%lu cycle4=%lu cycle5=%lu\n\n",
1625 sdram_freq, cycle_3_0_clk, cycle_4_0_clk, cycle_5_0_clk);
1626 spd_ddr_init_hang ();
1627 }
1628 }
1629
1630 if (sdram_ddr1 == TRUE)
1631 mmode |= SDRAM_MMODE_WR_DDR1;
1632 else {
1633
1634 /* loop through all the DIMM slots on the board */
1635 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1636 /* If a dimm is installed in a particular slot ... */
1637 if (dimm_populated[dimm_num] != SDRAM_NONE)
1638 t_wr_ns = max(t_wr_ns,
1639 spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1640 }
1641
1642 /*
1643 * convert from nanoseconds to ddr clocks
1644 * round up if necessary
1645 */
1646 t_wr_clk = MULDIV64(sdram_freq, t_wr_ns, ONE_BILLION);
1647 ddr_check = MULDIV64(ONE_BILLION, t_wr_clk, t_wr_ns);
1648 if (sdram_freq != ddr_check)
1649 t_wr_clk++;
1650
1651 switch (t_wr_clk) {
1652 case 0:
1653 case 1:
1654 case 2:
1655 case 3:
1656 mmode |= SDRAM_MMODE_WR_DDR2_3_CYC;
1657 break;
1658 case 4:
1659 mmode |= SDRAM_MMODE_WR_DDR2_4_CYC;
1660 break;
1661 case 5:
1662 mmode |= SDRAM_MMODE_WR_DDR2_5_CYC;
1663 break;
1664 default:
1665 mmode |= SDRAM_MMODE_WR_DDR2_6_CYC;
1666 break;
1667 }
1668 *write_recovery = t_wr_clk;
1669 }
1670
1671 debug("CAS latency = %d\n", *selected_cas);
1672 debug("Write recovery = %d\n", *write_recovery);
1673
1674 mtsdram(SDRAM_MMODE, mmode);
1675 }
1676
1677 /*-----------------------------------------------------------------------------+
1678 * program_rtr.
1679 *-----------------------------------------------------------------------------*/
1680 static void program_rtr(unsigned long *dimm_populated,
1681 unsigned char *iic0_dimm_addr,
1682 unsigned long num_dimm_banks)
1683 {
1684 PPC4xx_SYS_INFO board_cfg;
1685 unsigned long max_refresh_rate;
1686 unsigned long dimm_num;
1687 unsigned long refresh_rate_type;
1688 unsigned long refresh_rate;
1689 unsigned long rint;
1690 unsigned long sdram_freq;
1691 unsigned long sdr_ddrpll;
1692 unsigned long val;
1693
1694 /*------------------------------------------------------------------
1695 * Get the board configuration info.
1696 *-----------------------------------------------------------------*/
1697 get_sys_info(&board_cfg);
1698
1699 /*------------------------------------------------------------------
1700 * Set the SDRAM Refresh Timing Register, SDRAM_RTR
1701 *-----------------------------------------------------------------*/
1702 mfsdr(SDR0_DDR0, sdr_ddrpll);
1703 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1704
1705 max_refresh_rate = 0;
1706 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1707 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1708
1709 refresh_rate_type = spd_read(iic0_dimm_addr[dimm_num], 12);
1710 refresh_rate_type &= 0x7F;
1711 switch (refresh_rate_type) {
1712 case 0:
1713 refresh_rate = 15625;
1714 break;
1715 case 1:
1716 refresh_rate = 3906;
1717 break;
1718 case 2:
1719 refresh_rate = 7812;
1720 break;
1721 case 3:
1722 refresh_rate = 31250;
1723 break;
1724 case 4:
1725 refresh_rate = 62500;
1726 break;
1727 case 5:
1728 refresh_rate = 125000;
1729 break;
1730 default:
1731 refresh_rate = 0;
1732 printf("ERROR: DIMM %d unsupported refresh rate/type.\n",
1733 (unsigned int)dimm_num);
1734 printf("Replace the DIMM module with a supported DIMM.\n\n");
1735 spd_ddr_init_hang ();
1736 break;
1737 }
1738
1739 max_refresh_rate = max(max_refresh_rate, refresh_rate);
1740 }
1741 }
1742
1743 rint = MULDIV64(sdram_freq, max_refresh_rate, ONE_BILLION);
1744 mfsdram(SDRAM_RTR, val);
1745 mtsdram(SDRAM_RTR, (val & ~SDRAM_RTR_RINT_MASK) |
1746 (SDRAM_RTR_RINT_ENCODE(rint)));
1747 }
1748
1749 /*------------------------------------------------------------------
1750 * This routine programs the SDRAM_TRx registers.
1751 *-----------------------------------------------------------------*/
1752 static void program_tr(unsigned long *dimm_populated,
1753 unsigned char *iic0_dimm_addr,
1754 unsigned long num_dimm_banks)
1755 {
1756 unsigned long dimm_num;
1757 unsigned long sdram_ddr1;
1758 unsigned long t_rp_ns;
1759 unsigned long t_rcd_ns;
1760 unsigned long t_rrd_ns;
1761 unsigned long t_ras_ns;
1762 unsigned long t_rc_ns;
1763 unsigned long t_rfc_ns;
1764 unsigned long t_wpc_ns;
1765 unsigned long t_wtr_ns;
1766 unsigned long t_rpc_ns;
1767 unsigned long t_rp_clk;
1768 unsigned long t_rcd_clk;
1769 unsigned long t_rrd_clk;
1770 unsigned long t_ras_clk;
1771 unsigned long t_rc_clk;
1772 unsigned long t_rfc_clk;
1773 unsigned long t_wpc_clk;
1774 unsigned long t_wtr_clk;
1775 unsigned long t_rpc_clk;
1776 unsigned long sdtr1, sdtr2, sdtr3;
1777 unsigned long ddr_check;
1778 unsigned long sdram_freq;
1779 unsigned long sdr_ddrpll;
1780
1781 PPC4xx_SYS_INFO board_cfg;
1782
1783 /*------------------------------------------------------------------
1784 * Get the board configuration info.
1785 *-----------------------------------------------------------------*/
1786 get_sys_info(&board_cfg);
1787
1788 mfsdr(SDR0_DDR0, sdr_ddrpll);
1789 sdram_freq = ((board_cfg.freqPLB) * SDR0_DDR0_DDRM_DECODE(sdr_ddrpll));
1790
1791 /*------------------------------------------------------------------
1792 * Handle the timing. We need to find the worst case timing of all
1793 * the dimm modules installed.
1794 *-----------------------------------------------------------------*/
1795 t_rp_ns = 0;
1796 t_rrd_ns = 0;
1797 t_rcd_ns = 0;
1798 t_ras_ns = 0;
1799 t_rc_ns = 0;
1800 t_rfc_ns = 0;
1801 t_wpc_ns = 0;
1802 t_wtr_ns = 0;
1803 t_rpc_ns = 0;
1804 sdram_ddr1 = TRUE;
1805
1806 /* loop through all the DIMM slots on the board */
1807 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1808 /* If a dimm is installed in a particular slot ... */
1809 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1810 if (dimm_populated[dimm_num] == SDRAM_DDR2)
1811 sdram_ddr1 = TRUE;
1812 else
1813 sdram_ddr1 = FALSE;
1814
1815 t_rcd_ns = max(t_rcd_ns, spd_read(iic0_dimm_addr[dimm_num], 29) >> 2);
1816 t_rrd_ns = max(t_rrd_ns, spd_read(iic0_dimm_addr[dimm_num], 28) >> 2);
1817 t_rp_ns = max(t_rp_ns, spd_read(iic0_dimm_addr[dimm_num], 27) >> 2);
1818 t_ras_ns = max(t_ras_ns, spd_read(iic0_dimm_addr[dimm_num], 30));
1819 t_rc_ns = max(t_rc_ns, spd_read(iic0_dimm_addr[dimm_num], 41));
1820 t_rfc_ns = max(t_rfc_ns, spd_read(iic0_dimm_addr[dimm_num], 42));
1821 }
1822 }
1823
1824 /*------------------------------------------------------------------
1825 * Set the SDRAM Timing Reg 1, SDRAM_TR1
1826 *-----------------------------------------------------------------*/
1827 mfsdram(SDRAM_SDTR1, sdtr1);
1828 sdtr1 &= ~(SDRAM_SDTR1_LDOF_MASK | SDRAM_SDTR1_RTW_MASK |
1829 SDRAM_SDTR1_WTWO_MASK | SDRAM_SDTR1_RTRO_MASK);
1830
1831 /* default values */
1832 sdtr1 |= SDRAM_SDTR1_LDOF_2_CLK;
1833 sdtr1 |= SDRAM_SDTR1_RTW_2_CLK;
1834
1835 /* normal operations */
1836 sdtr1 |= SDRAM_SDTR1_WTWO_0_CLK;
1837 sdtr1 |= SDRAM_SDTR1_RTRO_1_CLK;
1838
1839 mtsdram(SDRAM_SDTR1, sdtr1);
1840
1841 /*------------------------------------------------------------------
1842 * Set the SDRAM Timing Reg 2, SDRAM_TR2
1843 *-----------------------------------------------------------------*/
1844 mfsdram(SDRAM_SDTR2, sdtr2);
1845 sdtr2 &= ~(SDRAM_SDTR2_RCD_MASK | SDRAM_SDTR2_WTR_MASK |
1846 SDRAM_SDTR2_XSNR_MASK | SDRAM_SDTR2_WPC_MASK |
1847 SDRAM_SDTR2_RPC_MASK | SDRAM_SDTR2_RP_MASK |
1848 SDRAM_SDTR2_RRD_MASK);
1849
1850 /*
1851 * convert t_rcd from nanoseconds to ddr clocks
1852 * round up if necessary
1853 */
1854 t_rcd_clk = MULDIV64(sdram_freq, t_rcd_ns, ONE_BILLION);
1855 ddr_check = MULDIV64(ONE_BILLION, t_rcd_clk, t_rcd_ns);
1856 if (sdram_freq != ddr_check)
1857 t_rcd_clk++;
1858
1859 switch (t_rcd_clk) {
1860 case 0:
1861 case 1:
1862 sdtr2 |= SDRAM_SDTR2_RCD_1_CLK;
1863 break;
1864 case 2:
1865 sdtr2 |= SDRAM_SDTR2_RCD_2_CLK;
1866 break;
1867 case 3:
1868 sdtr2 |= SDRAM_SDTR2_RCD_3_CLK;
1869 break;
1870 case 4:
1871 sdtr2 |= SDRAM_SDTR2_RCD_4_CLK;
1872 break;
1873 default:
1874 sdtr2 |= SDRAM_SDTR2_RCD_5_CLK;
1875 break;
1876 }
1877
1878 if (sdram_ddr1 == TRUE) { /* DDR1 */
1879 if (sdram_freq < 200000000) {
1880 sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1881 sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1882 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1883 } else {
1884 sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1885 sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1886 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1887 }
1888 } else { /* DDR2 */
1889 /* loop through all the DIMM slots on the board */
1890 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
1891 /* If a dimm is installed in a particular slot ... */
1892 if (dimm_populated[dimm_num] != SDRAM_NONE) {
1893 t_wpc_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 36) >> 2);
1894 t_wtr_ns = max(t_wtr_ns, spd_read(iic0_dimm_addr[dimm_num], 37) >> 2);
1895 t_rpc_ns = max(t_rpc_ns, spd_read(iic0_dimm_addr[dimm_num], 38) >> 2);
1896 }
1897 }
1898
1899 /*
1900 * convert from nanoseconds to ddr clocks
1901 * round up if necessary
1902 */
1903 t_wpc_clk = MULDIV64(sdram_freq, t_wpc_ns, ONE_BILLION);
1904 ddr_check = MULDIV64(ONE_BILLION, t_wpc_clk, t_wpc_ns);
1905 if (sdram_freq != ddr_check)
1906 t_wpc_clk++;
1907
1908 switch (t_wpc_clk) {
1909 case 0:
1910 case 1:
1911 case 2:
1912 sdtr2 |= SDRAM_SDTR2_WPC_2_CLK;
1913 break;
1914 case 3:
1915 sdtr2 |= SDRAM_SDTR2_WPC_3_CLK;
1916 break;
1917 case 4:
1918 sdtr2 |= SDRAM_SDTR2_WPC_4_CLK;
1919 break;
1920 case 5:
1921 sdtr2 |= SDRAM_SDTR2_WPC_5_CLK;
1922 break;
1923 default:
1924 sdtr2 |= SDRAM_SDTR2_WPC_6_CLK;
1925 break;
1926 }
1927
1928 /*
1929 * convert from nanoseconds to ddr clocks
1930 * round up if necessary
1931 */
1932 t_wtr_clk = MULDIV64(sdram_freq, t_wtr_ns, ONE_BILLION);
1933 ddr_check = MULDIV64(ONE_BILLION, t_wtr_clk, t_wtr_ns);
1934 if (sdram_freq != ddr_check)
1935 t_wtr_clk++;
1936
1937 switch (t_wtr_clk) {
1938 case 0:
1939 case 1:
1940 sdtr2 |= SDRAM_SDTR2_WTR_1_CLK;
1941 break;
1942 case 2:
1943 sdtr2 |= SDRAM_SDTR2_WTR_2_CLK;
1944 break;
1945 case 3:
1946 sdtr2 |= SDRAM_SDTR2_WTR_3_CLK;
1947 break;
1948 default:
1949 sdtr2 |= SDRAM_SDTR2_WTR_4_CLK;
1950 break;
1951 }
1952
1953 /*
1954 * convert from nanoseconds to ddr clocks
1955 * round up if necessary
1956 */
1957 t_rpc_clk = MULDIV64(sdram_freq, t_rpc_ns, ONE_BILLION);
1958 ddr_check = MULDIV64(ONE_BILLION, t_rpc_clk, t_rpc_ns);
1959 if (sdram_freq != ddr_check)
1960 t_rpc_clk++;
1961
1962 switch (t_rpc_clk) {
1963 case 0:
1964 case 1:
1965 case 2:
1966 sdtr2 |= SDRAM_SDTR2_RPC_2_CLK;
1967 break;
1968 case 3:
1969 sdtr2 |= SDRAM_SDTR2_RPC_3_CLK;
1970 break;
1971 default:
1972 sdtr2 |= SDRAM_SDTR2_RPC_4_CLK;
1973 break;
1974 }
1975 }
1976
1977 /* default value */
1978 sdtr2 |= SDRAM_SDTR2_XSNR_16_CLK;
1979
1980 /*
1981 * convert t_rrd from nanoseconds to ddr clocks
1982 * round up if necessary
1983 */
1984 t_rrd_clk = MULDIV64(sdram_freq, t_rrd_ns, ONE_BILLION);
1985 ddr_check = MULDIV64(ONE_BILLION, t_rrd_clk, t_rrd_ns);
1986 if (sdram_freq != ddr_check)
1987 t_rrd_clk++;
1988
1989 if (t_rrd_clk == 3)
1990 sdtr2 |= SDRAM_SDTR2_RRD_3_CLK;
1991 else
1992 sdtr2 |= SDRAM_SDTR2_RRD_2_CLK;
1993
1994 /*
1995 * convert t_rp from nanoseconds to ddr clocks
1996 * round up if necessary
1997 */
1998 t_rp_clk = MULDIV64(sdram_freq, t_rp_ns, ONE_BILLION);
1999 ddr_check = MULDIV64(ONE_BILLION, t_rp_clk, t_rp_ns);
2000 if (sdram_freq != ddr_check)
2001 t_rp_clk++;
2002
2003 switch (t_rp_clk) {
2004 case 0:
2005 case 1:
2006 case 2:
2007 case 3:
2008 sdtr2 |= SDRAM_SDTR2_RP_3_CLK;
2009 break;
2010 case 4:
2011 sdtr2 |= SDRAM_SDTR2_RP_4_CLK;
2012 break;
2013 case 5:
2014 sdtr2 |= SDRAM_SDTR2_RP_5_CLK;
2015 break;
2016 case 6:
2017 sdtr2 |= SDRAM_SDTR2_RP_6_CLK;
2018 break;
2019 default:
2020 sdtr2 |= SDRAM_SDTR2_RP_7_CLK;
2021 break;
2022 }
2023
2024 mtsdram(SDRAM_SDTR2, sdtr2);
2025
2026 /*------------------------------------------------------------------
2027 * Set the SDRAM Timing Reg 3, SDRAM_TR3
2028 *-----------------------------------------------------------------*/
2029 mfsdram(SDRAM_SDTR3, sdtr3);
2030 sdtr3 &= ~(SDRAM_SDTR3_RAS_MASK | SDRAM_SDTR3_RC_MASK |
2031 SDRAM_SDTR3_XCS_MASK | SDRAM_SDTR3_RFC_MASK);
2032
2033 /*
2034 * convert t_ras from nanoseconds to ddr clocks
2035 * round up if necessary
2036 */
2037 t_ras_clk = MULDIV64(sdram_freq, t_ras_ns, ONE_BILLION);
2038 ddr_check = MULDIV64(ONE_BILLION, t_ras_clk, t_ras_ns);
2039 if (sdram_freq != ddr_check)
2040 t_ras_clk++;
2041
2042 sdtr3 |= SDRAM_SDTR3_RAS_ENCODE(t_ras_clk);
2043
2044 /*
2045 * convert t_rc from nanoseconds to ddr clocks
2046 * round up if necessary
2047 */
2048 t_rc_clk = MULDIV64(sdram_freq, t_rc_ns, ONE_BILLION);
2049 ddr_check = MULDIV64(ONE_BILLION, t_rc_clk, t_rc_ns);
2050 if (sdram_freq != ddr_check)
2051 t_rc_clk++;
2052
2053 sdtr3 |= SDRAM_SDTR3_RC_ENCODE(t_rc_clk);
2054
2055 /* default xcs value */
2056 sdtr3 |= SDRAM_SDTR3_XCS;
2057
2058 /*
2059 * convert t_rfc from nanoseconds to ddr clocks
2060 * round up if necessary
2061 */
2062 t_rfc_clk = MULDIV64(sdram_freq, t_rfc_ns, ONE_BILLION);
2063 ddr_check = MULDIV64(ONE_BILLION, t_rfc_clk, t_rfc_ns);
2064 if (sdram_freq != ddr_check)
2065 t_rfc_clk++;
2066
2067 sdtr3 |= SDRAM_SDTR3_RFC_ENCODE(t_rfc_clk);
2068
2069 mtsdram(SDRAM_SDTR3, sdtr3);
2070 }
2071
2072 /*-----------------------------------------------------------------------------+
2073 * program_bxcf.
2074 *-----------------------------------------------------------------------------*/
2075 static void program_bxcf(unsigned long *dimm_populated,
2076 unsigned char *iic0_dimm_addr,
2077 unsigned long num_dimm_banks)
2078 {
2079 unsigned long dimm_num;
2080 unsigned long num_col_addr;
2081 unsigned long num_ranks;
2082 unsigned long num_banks;
2083 unsigned long mode;
2084 unsigned long ind_rank;
2085 unsigned long ind;
2086 unsigned long ind_bank;
2087 unsigned long bank_0_populated;
2088
2089 /*------------------------------------------------------------------
2090 * Set the BxCF regs. First, wipe out the bank config registers.
2091 *-----------------------------------------------------------------*/
2092 mtsdram(SDRAM_MB0CF, 0x00000000);
2093 mtsdram(SDRAM_MB1CF, 0x00000000);
2094 mtsdram(SDRAM_MB2CF, 0x00000000);
2095 mtsdram(SDRAM_MB3CF, 0x00000000);
2096
2097 mode = SDRAM_BXCF_M_BE_ENABLE;
2098
2099 bank_0_populated = 0;
2100
2101 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2102 if (dimm_populated[dimm_num] != SDRAM_NONE) {
2103 num_col_addr = spd_read(iic0_dimm_addr[dimm_num], 4);
2104 num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2105 if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2106 num_ranks = (num_ranks & 0x0F) +1;
2107 else
2108 num_ranks = num_ranks & 0x0F;
2109
2110 num_banks = spd_read(iic0_dimm_addr[dimm_num], 17);
2111
2112 for (ind_bank = 0; ind_bank < 2; ind_bank++) {
2113 if (num_banks == 4)
2114 ind = 0;
2115 else
2116 ind = 5 << 8;
2117 switch (num_col_addr) {
2118 case 0x08:
2119 mode |= (SDRAM_BXCF_M_AM_0 + ind);
2120 break;
2121 case 0x09:
2122 mode |= (SDRAM_BXCF_M_AM_1 + ind);
2123 break;
2124 case 0x0A:
2125 mode |= (SDRAM_BXCF_M_AM_2 + ind);
2126 break;
2127 case 0x0B:
2128 mode |= (SDRAM_BXCF_M_AM_3 + ind);
2129 break;
2130 case 0x0C:
2131 mode |= (SDRAM_BXCF_M_AM_4 + ind);
2132 break;
2133 default:
2134 printf("DDR-SDRAM: DIMM %d BxCF configuration.\n",
2135 (unsigned int)dimm_num);
2136 printf("ERROR: Unsupported value for number of "
2137 "column addresses: %d.\n", (unsigned int)num_col_addr);
2138 printf("Replace the DIMM module with a supported DIMM.\n\n");
2139 spd_ddr_init_hang ();
2140 }
2141 }
2142
2143 if ((dimm_populated[dimm_num] != SDRAM_NONE)&& (dimm_num ==1))
2144 bank_0_populated = 1;
2145
2146 for (ind_rank = 0; ind_rank < num_ranks; ind_rank++) {
2147 mtsdram(SDRAM_MB0CF +
2148 ((dimm_num + bank_0_populated + ind_rank) << 2),
2149 mode);
2150 }
2151 }
2152 }
2153 }
2154
2155 /*------------------------------------------------------------------
2156 * program memory queue.
2157 *-----------------------------------------------------------------*/
2158 static void program_memory_queue(unsigned long *dimm_populated,
2159 unsigned char *iic0_dimm_addr,
2160 unsigned long num_dimm_banks)
2161 {
2162 unsigned long dimm_num;
2163 phys_size_t rank_base_addr;
2164 unsigned long rank_reg;
2165 phys_size_t rank_size_bytes;
2166 unsigned long rank_size_id;
2167 unsigned long num_ranks;
2168 unsigned long baseadd_size;
2169 unsigned long i;
2170 unsigned long bank_0_populated = 0;
2171 phys_size_t total_size = 0;
2172
2173 /*------------------------------------------------------------------
2174 * Reset the rank_base_address.
2175 *-----------------------------------------------------------------*/
2176 rank_reg = SDRAM_R0BAS;
2177
2178 rank_base_addr = 0x00000000;
2179
2180 for (dimm_num = 0; dimm_num < num_dimm_banks; dimm_num++) {
2181 if (dimm_populated[dimm_num] != SDRAM_NONE) {
2182 num_ranks = spd_read(iic0_dimm_addr[dimm_num], 5);
2183 if ((spd_read(iic0_dimm_addr[dimm_num], 2)) == 0x08)
2184 num_ranks = (num_ranks & 0x0F) + 1;
2185 else
2186 num_ranks = num_ranks & 0x0F;
2187
2188 rank_size_id = spd_read(iic0_dimm_addr[dimm_num], 31);
2189
2190 /*------------------------------------------------------------------
2191 * Set the sizes
2192 *-----------------------------------------------------------------*/
2193 baseadd_size = 0;
2194 switch (rank_size_id) {
2195 case 0x01:
2196 baseadd_size |= SDRAM_RXBAS_SDSZ_1024;
2197 total_size = 1024;
2198 break;
2199 case 0x02:
2200 baseadd_size |= SDRAM_RXBAS_SDSZ_2048;
2201 total_size = 2048;
2202 break;
2203 case 0x04:
2204 baseadd_size |= SDRAM_RXBAS_SDSZ_4096;
2205 total_size = 4096;
2206 break;
2207 case 0x08:
2208 baseadd_size |= SDRAM_RXBAS_SDSZ_32;
2209 total_size = 32;
2210 break;
2211 case 0x10:
2212 baseadd_size |= SDRAM_RXBAS_SDSZ_64;
2213 total_size = 64;
2214 break;
2215 case 0x20:
2216 baseadd_size |= SDRAM_RXBAS_SDSZ_128;
2217 total_size = 128;
2218 break;
2219 case 0x40:
2220 baseadd_size |= SDRAM_RXBAS_SDSZ_256;
2221 total_size = 256;
2222 break;
2223 case 0x80:
2224 baseadd_size |= SDRAM_RXBAS_SDSZ_512;
2225 total_size = 512;
2226 break;
2227 default:
2228 printf("DDR-SDRAM: DIMM %d memory queue configuration.\n",
2229 (unsigned int)dimm_num);
2230 printf("ERROR: Unsupported value for the banksize: %d.\n",
2231 (unsigned int)rank_size_id);
2232 printf("Replace the DIMM module with a supported DIMM.\n\n");
2233 spd_ddr_init_hang ();
2234 }
2235 rank_size_bytes = total_size << 20;
2236
2237 if ((dimm_populated[dimm_num] != SDRAM_NONE) && (dimm_num == 1))
2238 bank_0_populated = 1;
2239
2240 for (i = 0; i < num_ranks; i++) {
2241 mtdcr_any(rank_reg+i+dimm_num+bank_0_populated,
2242 (SDRAM_RXBAS_SDBA_ENCODE(rank_base_addr) |
2243 baseadd_size));
2244 rank_base_addr += rank_size_bytes;
2245 }
2246 }
2247 }
2248
2249 #if defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
2250 defined(CONFIG_460EX) || defined(CONFIG_460GT) || \
2251 defined(CONFIG_460SX)
2252 /*
2253 * Enable high bandwidth access
2254 * This is currently not used, but with this setup
2255 * it is possible to use it later on in e.g. the Linux
2256 * EMAC driver for performance gain.
2257 */
2258 mtdcr(SDRAM_PLBADDULL, 0x00000000); /* MQ0_BAUL */
2259 mtdcr(SDRAM_PLBADDUHB, 0x00000008); /* MQ0_BAUH */
2260
2261 /*
2262 * Set optimal value for Memory Queue HB/LL Configuration registers
2263 */
2264 mtdcr(SDRAM_CONF1HB, (mfdcr(SDRAM_CONF1HB) & ~SDRAM_CONF1HB_MASK) |
2265 SDRAM_CONF1HB_AAFR | SDRAM_CONF1HB_RPEN | SDRAM_CONF1HB_RFTE |
2266 SDRAM_CONF1HB_RPLM | SDRAM_CONF1HB_WRCL);
2267 mtdcr(SDRAM_CONF1LL, (mfdcr(SDRAM_CONF1LL) & ~SDRAM_CONF1LL_MASK) |
2268 SDRAM_CONF1LL_AAFR | SDRAM_CONF1LL_RPEN | SDRAM_CONF1LL_RFTE |
2269 SDRAM_CONF1LL_RPLM);
2270 mtdcr(SDRAM_CONFPATHB, mfdcr(SDRAM_CONFPATHB) | SDRAM_CONFPATHB_TPEN);
2271 #endif
2272 }
2273
2274 /*-----------------------------------------------------------------------------+
2275 * is_ecc_enabled.
2276 *-----------------------------------------------------------------------------*/
2277 static unsigned long is_ecc_enabled(void)
2278 {
2279 unsigned long dimm_num;
2280 unsigned long ecc;
2281 unsigned long val;
2282
2283 ecc = 0;
2284 /* loop through all the DIMM slots on the board */
2285 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2286 mfsdram(SDRAM_MCOPT1, val);
2287 ecc = max(ecc, SDRAM_MCOPT1_MCHK_CHK_DECODE(val));
2288 }
2289
2290 return ecc;
2291 }
2292
2293 #ifdef CONFIG_DDR_ECC
2294 /*-----------------------------------------------------------------------------+
2295 * program_ecc.
2296 *-----------------------------------------------------------------------------*/
2297 static void program_ecc(unsigned long *dimm_populated,
2298 unsigned char *iic0_dimm_addr,
2299 unsigned long num_dimm_banks,
2300 unsigned long tlb_word2_i_value)
2301 {
2302 unsigned long mcopt1;
2303 unsigned long mcopt2;
2304 unsigned long mcstat;
2305 unsigned long dimm_num;
2306 unsigned long ecc;
2307
2308 ecc = 0;
2309 /* loop through all the DIMM slots on the board */
2310 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2311 /* If a dimm is installed in a particular slot ... */
2312 if (dimm_populated[dimm_num] != SDRAM_NONE)
2313 ecc = max(ecc, spd_read(iic0_dimm_addr[dimm_num], 11));
2314 }
2315 if (ecc == 0)
2316 return;
2317
2318 if (sdram_memsize() > CONFIG_MAX_MEM_MAPPED) {
2319 printf("\nWarning: Can't enable ECC on systems with more than 2GB of SDRAM!\n");
2320 return;
2321 }
2322
2323 mfsdram(SDRAM_MCOPT1, mcopt1);
2324 mfsdram(SDRAM_MCOPT2, mcopt2);
2325
2326 if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
2327 /* DDR controller must be enabled and not in self-refresh. */
2328 mfsdram(SDRAM_MCSTAT, mcstat);
2329 if (((mcopt2 & SDRAM_MCOPT2_DCEN_MASK) == SDRAM_MCOPT2_DCEN_ENABLE)
2330 && ((mcopt2 & SDRAM_MCOPT2_SREN_MASK) == SDRAM_MCOPT2_SREN_EXIT)
2331 && ((mcstat & (SDRAM_MCSTAT_MIC_MASK | SDRAM_MCSTAT_SRMS_MASK))
2332 == (SDRAM_MCSTAT_MIC_COMP | SDRAM_MCSTAT_SRMS_NOT_SF))) {
2333
2334 program_ecc_addr(0, sdram_memsize(), tlb_word2_i_value);
2335 }
2336 }
2337
2338 return;
2339 }
2340
2341 static void wait_ddr_idle(void)
2342 {
2343 u32 val;
2344
2345 do {
2346 mfsdram(SDRAM_MCSTAT, val);
2347 } while ((val & SDRAM_MCSTAT_IDLE_MASK) == SDRAM_MCSTAT_IDLE_NOT);
2348 }
2349
2350 /*-----------------------------------------------------------------------------+
2351 * program_ecc_addr.
2352 *-----------------------------------------------------------------------------*/
2353 static void program_ecc_addr(unsigned long start_address,
2354 unsigned long num_bytes,
2355 unsigned long tlb_word2_i_value)
2356 {
2357 unsigned long current_address;
2358 unsigned long end_address;
2359 unsigned long address_increment;
2360 unsigned long mcopt1;
2361 char str[] = "ECC generation -";
2362 char slash[] = "\\|/-\\|/-";
2363 int loop = 0;
2364 int loopi = 0;
2365
2366 current_address = start_address;
2367 mfsdram(SDRAM_MCOPT1, mcopt1);
2368 if ((mcopt1 & SDRAM_MCOPT1_MCHK_MASK) != SDRAM_MCOPT1_MCHK_NON) {
2369 mtsdram(SDRAM_MCOPT1,
2370 (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_GEN);
2371 sync();
2372 eieio();
2373 wait_ddr_idle();
2374
2375 puts(str);
2376 if (tlb_word2_i_value == TLB_WORD2_I_ENABLE) {
2377 /* ECC bit set method for non-cached memory */
2378 if ((mcopt1 & SDRAM_MCOPT1_DMWD_MASK) == SDRAM_MCOPT1_DMWD_32)
2379 address_increment = 4;
2380 else
2381 address_increment = 8;
2382 end_address = current_address + num_bytes;
2383
2384 while (current_address < end_address) {
2385 *((unsigned long *)current_address) = 0x00000000;
2386 current_address += address_increment;
2387
2388 if ((loop++ % (2 << 20)) == 0) {
2389 putc('\b');
2390 putc(slash[loopi++ % 8]);
2391 }
2392 }
2393
2394 } else {
2395 /* ECC bit set method for cached memory */
2396 dcbz_area(start_address, num_bytes);
2397 /* Write modified dcache lines back to memory */
2398 clean_dcache_range(start_address, start_address + num_bytes);
2399 }
2400
2401 blank_string(strlen(str));
2402
2403 sync();
2404 eieio();
2405 wait_ddr_idle();
2406
2407 /* clear ECC error repoting registers */
2408 mtsdram(SDRAM_ECCCR, 0xffffffff);
2409 mtdcr(0x4c, 0xffffffff);
2410
2411 mtsdram(SDRAM_MCOPT1,
2412 (mcopt1 & ~SDRAM_MCOPT1_MCHK_MASK) | SDRAM_MCOPT1_MCHK_CHK_REP);
2413 sync();
2414 eieio();
2415 wait_ddr_idle();
2416 }
2417 }
2418 #endif
2419
2420 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
2421 /*-----------------------------------------------------------------------------+
2422 * program_DQS_calibration.
2423 *-----------------------------------------------------------------------------*/
2424 static void program_DQS_calibration(unsigned long *dimm_populated,
2425 unsigned char *iic0_dimm_addr,
2426 unsigned long num_dimm_banks)
2427 {
2428 unsigned long val;
2429
2430 #ifdef HARD_CODED_DQS /* calibration test with hardvalues */
2431 mtsdram(SDRAM_RQDC, 0x80000037);
2432 mtsdram(SDRAM_RDCC, 0x40000000);
2433 mtsdram(SDRAM_RFDC, 0x000001DF);
2434
2435 test();
2436 #else
2437 /*------------------------------------------------------------------
2438 * Program RDCC register
2439 * Read sample cycle auto-update enable
2440 *-----------------------------------------------------------------*/
2441
2442 mfsdram(SDRAM_RDCC, val);
2443 mtsdram(SDRAM_RDCC,
2444 (val & ~(SDRAM_RDCC_RDSS_MASK | SDRAM_RDCC_RSAE_MASK))
2445 | SDRAM_RDCC_RSAE_ENABLE);
2446
2447 /*------------------------------------------------------------------
2448 * Program RQDC register
2449 * Internal DQS delay mechanism enable
2450 *-----------------------------------------------------------------*/
2451 mtsdram(SDRAM_RQDC, (SDRAM_RQDC_RQDE_ENABLE|SDRAM_RQDC_RQFD_ENCODE(0x38)));
2452
2453 /*------------------------------------------------------------------
2454 * Program RFDC register
2455 * Set Feedback Fractional Oversample
2456 * Auto-detect read sample cycle enable
2457 * Set RFOS to 1/4 of memclk cycle (0x3f)
2458 *-----------------------------------------------------------------*/
2459 mfsdram(SDRAM_RFDC, val);
2460 mtsdram(SDRAM_RFDC,
2461 (val & ~(SDRAM_RFDC_ARSE_MASK | SDRAM_RFDC_RFOS_MASK |
2462 SDRAM_RFDC_RFFD_MASK))
2463 | (SDRAM_RFDC_ARSE_ENABLE | SDRAM_RFDC_RFOS_ENCODE(0x3f) |
2464 SDRAM_RFDC_RFFD_ENCODE(0)));
2465
2466 DQS_calibration_process();
2467 #endif
2468 }
2469
2470 static int short_mem_test(void)
2471 {
2472 u32 *membase;
2473 u32 bxcr_num;
2474 u32 bxcf;
2475 int i;
2476 int j;
2477 phys_size_t base_addr;
2478 u32 test[NUMMEMTESTS][NUMMEMWORDS] = {
2479 {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2480 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2481 {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2482 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2483 {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2484 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2485 {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2486 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2487 {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2488 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2489 {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2490 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2491 {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2492 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2493 {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2494 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2495 int l;
2496
2497 for (bxcr_num = 0; bxcr_num < MAXBXCF; bxcr_num++) {
2498 mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf);
2499
2500 /* Banks enabled */
2501 if ((bxcf & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2502 /* Bank is enabled */
2503
2504 /*
2505 * Only run test on accessable memory (below 2GB)
2506 */
2507 base_addr = SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+bxcr_num));
2508 if (base_addr >= CONFIG_MAX_MEM_MAPPED)
2509 continue;
2510
2511 /*------------------------------------------------------------------
2512 * Run the short memory test.
2513 *-----------------------------------------------------------------*/
2514 membase = (u32 *)(u32)base_addr;
2515
2516 for (i = 0; i < NUMMEMTESTS; i++) {
2517 for (j = 0; j < NUMMEMWORDS; j++) {
2518 membase[j] = test[i][j];
2519 ppcDcbf((u32)&(membase[j]));
2520 }
2521 sync();
2522 for (l=0; l<NUMLOOPS; l++) {
2523 for (j = 0; j < NUMMEMWORDS; j++) {
2524 if (membase[j] != test[i][j]) {
2525 ppcDcbf((u32)&(membase[j]));
2526 return 0;
2527 }
2528 ppcDcbf((u32)&(membase[j]));
2529 }
2530 sync();
2531 }
2532 }
2533 } /* if bank enabled */
2534 } /* for bxcf_num */
2535
2536 return 1;
2537 }
2538
2539 #ifndef HARD_CODED_DQS
2540 /*-----------------------------------------------------------------------------+
2541 * DQS_calibration_process.
2542 *-----------------------------------------------------------------------------*/
2543 static void DQS_calibration_process(void)
2544 {
2545 unsigned long rfdc_reg;
2546 unsigned long rffd;
2547 unsigned long val;
2548 long rffd_average;
2549 long max_start;
2550 long min_end;
2551 unsigned long begin_rqfd[MAXRANKS];
2552 unsigned long begin_rffd[MAXRANKS];
2553 unsigned long end_rqfd[MAXRANKS];
2554 unsigned long end_rffd[MAXRANKS];
2555 char window_found;
2556 unsigned long dlycal;
2557 unsigned long dly_val;
2558 unsigned long max_pass_length;
2559 unsigned long current_pass_length;
2560 unsigned long current_fail_length;
2561 unsigned long current_start;
2562 long max_end;
2563 unsigned char fail_found;
2564 unsigned char pass_found;
2565 #if !defined(CONFIG_DDR_RQDC_FIXED)
2566 u32 rqdc_reg;
2567 u32 rqfd;
2568 u32 rqfd_start;
2569 u32 rqfd_average;
2570 int loopi = 0;
2571 char str[] = "Auto calibration -";
2572 char slash[] = "\\|/-\\|/-";
2573
2574 /*------------------------------------------------------------------
2575 * Test to determine the best read clock delay tuning bits.
2576 *
2577 * Before the DDR controller can be used, the read clock delay needs to be
2578 * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2579 * This value cannot be hardcoded into the program because it changes
2580 * depending on the board's setup and environment.
2581 * To do this, all delay values are tested to see if they
2582 * work or not. By doing this, you get groups of fails with groups of
2583 * passing values. The idea is to find the start and end of a passing
2584 * window and take the center of it to use as the read clock delay.
2585 *
2586 * A failure has to be seen first so that when we hit a pass, we know
2587 * that it is truely the start of the window. If we get passing values
2588 * to start off with, we don't know if we are at the start of the window.
2589 *
2590 * The code assumes that a failure will always be found.
2591 * If a failure is not found, there is no easy way to get the middle
2592 * of the passing window. I guess we can pretty much pick any value
2593 * but some values will be better than others. Since the lowest speed
2594 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2595 * from experimentation it is safe to say you will always have a failure.
2596 *-----------------------------------------------------------------*/
2597
2598 /* first fix RQDC[RQFD] to an average of 80 degre phase shift to find RFDC[RFFD] */
2599 rqfd_start = 64; /* test-only: don't know if this is the _best_ start value */
2600
2601 puts(str);
2602
2603 calibration_loop:
2604 mfsdram(SDRAM_RQDC, rqdc_reg);
2605 mtsdram(SDRAM_RQDC, (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2606 SDRAM_RQDC_RQFD_ENCODE(rqfd_start));
2607 #else /* CONFIG_DDR_RQDC_FIXED */
2608 /*
2609 * On Katmai the complete auto-calibration somehow doesn't seem to
2610 * produce the best results, meaning optimal values for RQFD/RFFD.
2611 * This was discovered by GDA using a high bandwidth scope,
2612 * analyzing the DDR2 signals. GDA provided a fixed value for RQFD,
2613 * so now on Katmai "only" RFFD is auto-calibrated.
2614 */
2615 mtsdram(SDRAM_RQDC, CONFIG_DDR_RQDC_FIXED);
2616 #endif /* CONFIG_DDR_RQDC_FIXED */
2617
2618 max_start = 0;
2619 min_end = 0;
2620 begin_rqfd[0] = 0;
2621 begin_rffd[0] = 0;
2622 begin_rqfd[1] = 0;
2623 begin_rffd[1] = 0;
2624 end_rqfd[0] = 0;
2625 end_rffd[0] = 0;
2626 end_rqfd[1] = 0;
2627 end_rffd[1] = 0;
2628 window_found = FALSE;
2629
2630 max_pass_length = 0;
2631 max_start = 0;
2632 max_end = 0;
2633 current_pass_length = 0;
2634 current_fail_length = 0;
2635 current_start = 0;
2636 window_found = FALSE;
2637 fail_found = FALSE;
2638 pass_found = FALSE;
2639
2640 /*
2641 * get the delay line calibration register value
2642 */
2643 mfsdram(SDRAM_DLCR, dlycal);
2644 dly_val = SDRAM_DLYCAL_DLCV_DECODE(dlycal) << 2;
2645
2646 for (rffd = 0; rffd <= SDRAM_RFDC_RFFD_MAX; rffd++) {
2647 mfsdram(SDRAM_RFDC, rfdc_reg);
2648 rfdc_reg &= ~(SDRAM_RFDC_RFFD_MASK);
2649
2650 /*------------------------------------------------------------------
2651 * Set the timing reg for the test.
2652 *-----------------------------------------------------------------*/
2653 mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd));
2654
2655 /*------------------------------------------------------------------
2656 * See if the rffd value passed.
2657 *-----------------------------------------------------------------*/
2658 if (short_mem_test()) {
2659 if (fail_found == TRUE) {
2660 pass_found = TRUE;
2661 if (current_pass_length == 0)
2662 current_start = rffd;
2663
2664 current_fail_length = 0;
2665 current_pass_length++;
2666
2667 if (current_pass_length > max_pass_length) {
2668 max_pass_length = current_pass_length;
2669 max_start = current_start;
2670 max_end = rffd;
2671 }
2672 }
2673 } else {
2674 current_pass_length = 0;
2675 current_fail_length++;
2676
2677 if (current_fail_length >= (dly_val >> 2)) {
2678 if (fail_found == FALSE) {
2679 fail_found = TRUE;
2680 } else if (pass_found == TRUE) {
2681 window_found = TRUE;
2682 break;
2683 }
2684 }
2685 }
2686 } /* for rffd */
2687
2688 /*------------------------------------------------------------------
2689 * Set the average RFFD value
2690 *-----------------------------------------------------------------*/
2691 rffd_average = ((max_start + max_end) >> 1);
2692
2693 if (rffd_average < 0)
2694 rffd_average = 0;
2695
2696 if (rffd_average > SDRAM_RFDC_RFFD_MAX)
2697 rffd_average = SDRAM_RFDC_RFFD_MAX;
2698 /* now fix RFDC[RFFD] found and find RQDC[RQFD] */
2699 mtsdram(SDRAM_RFDC, rfdc_reg | SDRAM_RFDC_RFFD_ENCODE(rffd_average));
2700
2701 #if !defined(CONFIG_DDR_RQDC_FIXED)
2702 max_pass_length = 0;
2703 max_start = 0;
2704 max_end = 0;
2705 current_pass_length = 0;
2706 current_fail_length = 0;
2707 current_start = 0;
2708 window_found = FALSE;
2709 fail_found = FALSE;
2710 pass_found = FALSE;
2711
2712 for (rqfd = 0; rqfd <= SDRAM_RQDC_RQFD_MAX; rqfd++) {
2713 mfsdram(SDRAM_RQDC, rqdc_reg);
2714 rqdc_reg &= ~(SDRAM_RQDC_RQFD_MASK);
2715
2716 /*------------------------------------------------------------------
2717 * Set the timing reg for the test.
2718 *-----------------------------------------------------------------*/
2719 mtsdram(SDRAM_RQDC, rqdc_reg | SDRAM_RQDC_RQFD_ENCODE(rqfd));
2720
2721 /*------------------------------------------------------------------
2722 * See if the rffd value passed.
2723 *-----------------------------------------------------------------*/
2724 if (short_mem_test()) {
2725 if (fail_found == TRUE) {
2726 pass_found = TRUE;
2727 if (current_pass_length == 0)
2728 current_start = rqfd;
2729
2730 current_fail_length = 0;
2731 current_pass_length++;
2732
2733 if (current_pass_length > max_pass_length) {
2734 max_pass_length = current_pass_length;
2735 max_start = current_start;
2736 max_end = rqfd;
2737 }
2738 }
2739 } else {
2740 current_pass_length = 0;
2741 current_fail_length++;
2742
2743 if (fail_found == FALSE) {
2744 fail_found = TRUE;
2745 } else if (pass_found == TRUE) {
2746 window_found = TRUE;
2747 break;
2748 }
2749 }
2750 }
2751
2752 rqfd_average = ((max_start + max_end) >> 1);
2753
2754 /*------------------------------------------------------------------
2755 * Make sure we found the valid read passing window. Halt if not
2756 *-----------------------------------------------------------------*/
2757 if (window_found == FALSE) {
2758 if (rqfd_start < SDRAM_RQDC_RQFD_MAX) {
2759 putc('\b');
2760 putc(slash[loopi++ % 8]);
2761
2762 /* try again from with a different RQFD start value */
2763 rqfd_start++;
2764 goto calibration_loop;
2765 }
2766
2767 printf("\nERROR: Cannot determine a common read delay for the "
2768 "DIMM(s) installed.\n");
2769 debug("%s[%d] ERROR : \n", __FUNCTION__,__LINE__);
2770 ppc4xx_ibm_ddr2_register_dump();
2771 spd_ddr_init_hang ();
2772 }
2773
2774 if (rqfd_average < 0)
2775 rqfd_average = 0;
2776
2777 if (rqfd_average > SDRAM_RQDC_RQFD_MAX)
2778 rqfd_average = SDRAM_RQDC_RQFD_MAX;
2779
2780 mtsdram(SDRAM_RQDC,
2781 (rqdc_reg & ~SDRAM_RQDC_RQFD_MASK) |
2782 SDRAM_RQDC_RQFD_ENCODE(rqfd_average));
2783
2784 blank_string(strlen(str));
2785 #endif /* CONFIG_DDR_RQDC_FIXED */
2786
2787 /*
2788 * Now complete RDSS configuration as mentioned on page 7 of the AMCC
2789 * PowerPC440SP/SPe DDR2 application note:
2790 * "DDR1/DDR2 Initialization Sequence and Dynamic Tuning"
2791 */
2792 mfsdram(SDRAM_RTSR, val);
2793 if ((val & SDRAM_RTSR_TRK1SM_MASK) == SDRAM_RTSR_TRK1SM_ATPLS1) {
2794 mfsdram(SDRAM_RDCC, val);
2795 if ((val & SDRAM_RDCC_RDSS_MASK) != SDRAM_RDCC_RDSS_T4) {
2796 val += 0x40000000;
2797 mtsdram(SDRAM_RDCC, val);
2798 }
2799 }
2800
2801 mfsdram(SDRAM_DLCR, val);
2802 debug("%s[%d] DLCR: 0x%08X\n", __FUNCTION__, __LINE__, val);
2803 mfsdram(SDRAM_RQDC, val);
2804 debug("%s[%d] RQDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
2805 mfsdram(SDRAM_RFDC, val);
2806 debug("%s[%d] RFDC: 0x%08X\n", __FUNCTION__, __LINE__, val);
2807 mfsdram(SDRAM_RDCC, val);
2808 debug("%s[%d] RDCC: 0x%08X\n", __FUNCTION__, __LINE__, val);
2809 }
2810 #else /* calibration test with hardvalues */
2811 /*-----------------------------------------------------------------------------+
2812 * DQS_calibration_process.
2813 *-----------------------------------------------------------------------------*/
2814 static void test(void)
2815 {
2816 unsigned long dimm_num;
2817 unsigned long ecc_temp;
2818 unsigned long i, j;
2819 unsigned long *membase;
2820 unsigned long bxcf[MAXRANKS];
2821 unsigned long val;
2822 char window_found;
2823 char begin_found[MAXDIMMS];
2824 char end_found[MAXDIMMS];
2825 char search_end[MAXDIMMS];
2826 unsigned long test[NUMMEMTESTS][NUMMEMWORDS] = {
2827 {0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF,
2828 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFF},
2829 {0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000,
2830 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000, 0x00000000},
2831 {0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555,
2832 0xAAAAAAAA, 0xAAAAAAAA, 0x55555555, 0x55555555},
2833 {0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA,
2834 0x55555555, 0x55555555, 0xAAAAAAAA, 0xAAAAAAAA},
2835 {0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A,
2836 0xA5A5A5A5, 0xA5A5A5A5, 0x5A5A5A5A, 0x5A5A5A5A},
2837 {0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5,
2838 0x5A5A5A5A, 0x5A5A5A5A, 0xA5A5A5A5, 0xA5A5A5A5},
2839 {0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA,
2840 0xAA55AA55, 0xAA55AA55, 0x55AA55AA, 0x55AA55AA},
2841 {0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55,
2842 0x55AA55AA, 0x55AA55AA, 0xAA55AA55, 0xAA55AA55} };
2843
2844 /*------------------------------------------------------------------
2845 * Test to determine the best read clock delay tuning bits.
2846 *
2847 * Before the DDR controller can be used, the read clock delay needs to be
2848 * set. This is SDRAM_RQDC[RQFD] and SDRAM_RFDC[RFFD].
2849 * This value cannot be hardcoded into the program because it changes
2850 * depending on the board's setup and environment.
2851 * To do this, all delay values are tested to see if they
2852 * work or not. By doing this, you get groups of fails with groups of
2853 * passing values. The idea is to find the start and end of a passing
2854 * window and take the center of it to use as the read clock delay.
2855 *
2856 * A failure has to be seen first so that when we hit a pass, we know
2857 * that it is truely the start of the window. If we get passing values
2858 * to start off with, we don't know if we are at the start of the window.
2859 *
2860 * The code assumes that a failure will always be found.
2861 * If a failure is not found, there is no easy way to get the middle
2862 * of the passing window. I guess we can pretty much pick any value
2863 * but some values will be better than others. Since the lowest speed
2864 * we can clock the DDR interface at is 200 MHz (2x 100 MHz PLB speed),
2865 * from experimentation it is safe to say you will always have a failure.
2866 *-----------------------------------------------------------------*/
2867 mfsdram(SDRAM_MCOPT1, ecc_temp);
2868 ecc_temp &= SDRAM_MCOPT1_MCHK_MASK;
2869 mfsdram(SDRAM_MCOPT1, val);
2870 mtsdram(SDRAM_MCOPT1, (val & ~SDRAM_MCOPT1_MCHK_MASK) |
2871 SDRAM_MCOPT1_MCHK_NON);
2872
2873 window_found = FALSE;
2874 begin_found[0] = FALSE;
2875 end_found[0] = FALSE;
2876 search_end[0] = FALSE;
2877 begin_found[1] = FALSE;
2878 end_found[1] = FALSE;
2879 search_end[1] = FALSE;
2880
2881 for (dimm_num = 0; dimm_num < MAXDIMMS; dimm_num++) {
2882 mfsdram(SDRAM_MB0CF + (bxcr_num << 2), bxcf[bxcr_num]);
2883
2884 /* Banks enabled */
2885 if ((bxcf[dimm_num] & SDRAM_BXCF_M_BE_MASK) == SDRAM_BXCF_M_BE_ENABLE) {
2886
2887 /* Bank is enabled */
2888 membase =
2889 (unsigned long*)(SDRAM_RXBAS_SDBA_DECODE(mfdcr_any(SDRAM_R0BAS+dimm_num)));
2890
2891 /*------------------------------------------------------------------
2892 * Run the short memory test.
2893 *-----------------------------------------------------------------*/
2894 for (i = 0; i < NUMMEMTESTS; i++) {
2895 for (j = 0; j < NUMMEMWORDS; j++) {
2896 membase[j] = test[i][j];
2897 ppcDcbf((u32)&(membase[j]));
2898 }
2899 sync();
2900 for (j = 0; j < NUMMEMWORDS; j++) {
2901 if (membase[j] != test[i][j]) {
2902 ppcDcbf((u32)&(membase[j]));
2903 break;
2904 }
2905 ppcDcbf((u32)&(membase[j]));
2906 }
2907 sync();
2908 if (j < NUMMEMWORDS)
2909 break;
2910 }
2911
2912 /*------------------------------------------------------------------
2913 * See if the rffd value passed.
2914 *-----------------------------------------------------------------*/
2915 if (i < NUMMEMTESTS) {
2916 if ((end_found[dimm_num] == FALSE) &&
2917 (search_end[dimm_num] == TRUE)) {
2918 end_found[dimm_num] = TRUE;
2919 }
2920 if ((end_found[0] == TRUE) &&
2921 (end_found[1] == TRUE))
2922 break;
2923 } else {
2924 if (begin_found[dimm_num] == FALSE) {
2925 begin_found[dimm_num] = TRUE;
2926 search_end[dimm_num] = TRUE;
2927 }
2928 }
2929 } else {
2930 begin_found[dimm_num] = TRUE;
2931 end_found[dimm_num] = TRUE;
2932 }
2933 }
2934
2935 if ((begin_found[0] == TRUE) && (begin_found[1] == TRUE))
2936 window_found = TRUE;
2937
2938 /*------------------------------------------------------------------
2939 * Make sure we found the valid read passing window. Halt if not
2940 *-----------------------------------------------------------------*/
2941 if (window_found == FALSE) {
2942 printf("ERROR: Cannot determine a common read delay for the "
2943 "DIMM(s) installed.\n");
2944 spd_ddr_init_hang ();
2945 }
2946
2947 /*------------------------------------------------------------------
2948 * Restore the ECC variable to what it originally was
2949 *-----------------------------------------------------------------*/
2950 mtsdram(SDRAM_MCOPT1,
2951 (ppcMfdcr_sdram(SDRAM_MCOPT1) & ~SDRAM_MCOPT1_MCHK_MASK)
2952 | ecc_temp);
2953 }
2954 #endif /* !HARD_CODED_DQS */
2955 #endif /* !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION) */
2956
2957 #else /* CONFIG_SPD_EEPROM */
2958
2959 /*-----------------------------------------------------------------------------
2960 * Function: initdram
2961 * Description: Configures the PPC405EX(r) DDR1/DDR2 SDRAM memory
2962 * banks. The configuration is performed using static, compile-
2963 * time parameters.
2964 *---------------------------------------------------------------------------*/
2965 phys_size_t initdram(int board_type)
2966 {
2967 /*
2968 * Only run this SDRAM init code once. For NAND booting
2969 * targets like Kilauea, we call initdram() early from the
2970 * 4k NAND booting image (CONFIG_NAND_SPL) from nand_boot().
2971 * Later on the NAND U-Boot image runs (CONFIG_NAND_U_BOOT)
2972 * which calls initdram() again. This time the controller
2973 * mustn't be reconfigured again since we're already running
2974 * from SDRAM.
2975 */
2976 #if !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL)
2977 unsigned long val;
2978
2979 /* Set Memory Bank Configuration Registers */
2980
2981 mtsdram(SDRAM_MB0CF, CONFIG_SYS_SDRAM0_MB0CF);
2982 mtsdram(SDRAM_MB1CF, CONFIG_SYS_SDRAM0_MB1CF);
2983 mtsdram(SDRAM_MB2CF, CONFIG_SYS_SDRAM0_MB2CF);
2984 mtsdram(SDRAM_MB3CF, CONFIG_SYS_SDRAM0_MB3CF);
2985
2986 /* Set Memory Clock Timing Register */
2987
2988 mtsdram(SDRAM_CLKTR, CONFIG_SYS_SDRAM0_CLKTR);
2989
2990 /* Set Refresh Time Register */
2991
2992 mtsdram(SDRAM_RTR, CONFIG_SYS_SDRAM0_RTR);
2993
2994 /* Set SDRAM Timing Registers */
2995
2996 mtsdram(SDRAM_SDTR1, CONFIG_SYS_SDRAM0_SDTR1);
2997 mtsdram(SDRAM_SDTR2, CONFIG_SYS_SDRAM0_SDTR2);
2998 mtsdram(SDRAM_SDTR3, CONFIG_SYS_SDRAM0_SDTR3);
2999
3000 /* Set Mode and Extended Mode Registers */
3001
3002 mtsdram(SDRAM_MMODE, CONFIG_SYS_SDRAM0_MMODE);
3003 mtsdram(SDRAM_MEMODE, CONFIG_SYS_SDRAM0_MEMODE);
3004
3005 /* Set Memory Controller Options 1 Register */
3006
3007 mtsdram(SDRAM_MCOPT1, CONFIG_SYS_SDRAM0_MCOPT1);
3008
3009 /* Set Manual Initialization Control Registers */
3010
3011 mtsdram(SDRAM_INITPLR0, CONFIG_SYS_SDRAM0_INITPLR0);
3012 mtsdram(SDRAM_INITPLR1, CONFIG_SYS_SDRAM0_INITPLR1);
3013 mtsdram(SDRAM_INITPLR2, CONFIG_SYS_SDRAM0_INITPLR2);
3014 mtsdram(SDRAM_INITPLR3, CONFIG_SYS_SDRAM0_INITPLR3);
3015 mtsdram(SDRAM_INITPLR4, CONFIG_SYS_SDRAM0_INITPLR4);
3016 mtsdram(SDRAM_INITPLR5, CONFIG_SYS_SDRAM0_INITPLR5);
3017 mtsdram(SDRAM_INITPLR6, CONFIG_SYS_SDRAM0_INITPLR6);
3018 mtsdram(SDRAM_INITPLR7, CONFIG_SYS_SDRAM0_INITPLR7);
3019 mtsdram(SDRAM_INITPLR8, CONFIG_SYS_SDRAM0_INITPLR8);
3020 mtsdram(SDRAM_INITPLR9, CONFIG_SYS_SDRAM0_INITPLR9);
3021 mtsdram(SDRAM_INITPLR10, CONFIG_SYS_SDRAM0_INITPLR10);
3022 mtsdram(SDRAM_INITPLR11, CONFIG_SYS_SDRAM0_INITPLR11);
3023 mtsdram(SDRAM_INITPLR12, CONFIG_SYS_SDRAM0_INITPLR12);
3024 mtsdram(SDRAM_INITPLR13, CONFIG_SYS_SDRAM0_INITPLR13);
3025 mtsdram(SDRAM_INITPLR14, CONFIG_SYS_SDRAM0_INITPLR14);
3026 mtsdram(SDRAM_INITPLR15, CONFIG_SYS_SDRAM0_INITPLR15);
3027
3028 /* Set On-Die Termination Registers */
3029
3030 mtsdram(SDRAM_CODT, CONFIG_SYS_SDRAM0_CODT);
3031 mtsdram(SDRAM_MODT0, CONFIG_SYS_SDRAM0_MODT0);
3032 mtsdram(SDRAM_MODT1, CONFIG_SYS_SDRAM0_MODT1);
3033
3034 /* Set Write Timing Register */
3035
3036 mtsdram(SDRAM_WRDTR, CONFIG_SYS_SDRAM0_WRDTR);
3037
3038 /*
3039 * Start Initialization by SDRAM0_MCOPT2[SREN] = 0 and
3040 * SDRAM0_MCOPT2[IPTR] = 1
3041 */
3042
3043 mtsdram(SDRAM_MCOPT2, (SDRAM_MCOPT2_SREN_EXIT |
3044 SDRAM_MCOPT2_IPTR_EXECUTE));
3045
3046 /*
3047 * Poll SDRAM0_MCSTAT[MIC] for assertion to indicate the
3048 * completion of initialization.
3049 */
3050
3051 do {
3052 mfsdram(SDRAM_MCSTAT, val);
3053 } while ((val & SDRAM_MCSTAT_MIC_MASK) != SDRAM_MCSTAT_MIC_COMP);
3054
3055 /* Set Delay Control Registers */
3056
3057 mtsdram(SDRAM_DLCR, CONFIG_SYS_SDRAM0_DLCR);
3058
3059 #if !defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3060 mtsdram(SDRAM_RDCC, CONFIG_SYS_SDRAM0_RDCC);
3061 mtsdram(SDRAM_RQDC, CONFIG_SYS_SDRAM0_RQDC);
3062 mtsdram(SDRAM_RFDC, CONFIG_SYS_SDRAM0_RFDC);
3063 #endif /* !CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3064
3065 /*
3066 * Enable Controller by SDRAM0_MCOPT2[DCEN] = 1:
3067 */
3068
3069 mfsdram(SDRAM_MCOPT2, val);
3070 mtsdram(SDRAM_MCOPT2, val | SDRAM_MCOPT2_DCEN_ENABLE);
3071
3072 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3073 #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
3074 /*------------------------------------------------------------------
3075 | DQS calibration.
3076 +-----------------------------------------------------------------*/
3077 DQS_autocalibration();
3078 #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
3079 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3080
3081 #if defined(CONFIG_DDR_ECC)
3082 ecc_init(CONFIG_SYS_SDRAM_BASE, CONFIG_SYS_MBYTES_SDRAM << 20);
3083 #endif /* defined(CONFIG_DDR_ECC) */
3084
3085 ppc4xx_ibm_ddr2_register_dump();
3086
3087 #if defined(CONFIG_PPC4xx_DDR_AUTOCALIBRATION)
3088 /*
3089 * Clear potential errors resulting from auto-calibration.
3090 * If not done, then we could get an interrupt later on when
3091 * exceptions are enabled.
3092 */
3093 set_mcsr(get_mcsr());
3094 #endif /* CONFIG_PPC4xx_DDR_AUTOCALIBRATION */
3095
3096 #endif /* !defined(CONFIG_NAND_U_BOOT) || defined(CONFIG_NAND_SPL) */
3097
3098 return (CONFIG_SYS_MBYTES_SDRAM << 20);
3099 }
3100 #endif /* CONFIG_SPD_EEPROM */
3101
3102 #if !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL)
3103 #if defined(CONFIG_440)
3104 u32 mfdcr_any(u32 dcr)
3105 {
3106 u32 val;
3107
3108 switch (dcr) {
3109 case SDRAM_R0BAS + 0:
3110 val = mfdcr(SDRAM_R0BAS + 0);
3111 break;
3112 case SDRAM_R0BAS + 1:
3113 val = mfdcr(SDRAM_R0BAS + 1);
3114 break;
3115 case SDRAM_R0BAS + 2:
3116 val = mfdcr(SDRAM_R0BAS + 2);
3117 break;
3118 case SDRAM_R0BAS + 3:
3119 val = mfdcr(SDRAM_R0BAS + 3);
3120 break;
3121 default:
3122 printf("DCR %d not defined in case statement!!!\n", dcr);
3123 val = 0; /* just to satisfy the compiler */
3124 }
3125
3126 return val;
3127 }
3128
3129 void mtdcr_any(u32 dcr, u32 val)
3130 {
3131 switch (dcr) {
3132 case SDRAM_R0BAS + 0:
3133 mtdcr(SDRAM_R0BAS + 0, val);
3134 break;
3135 case SDRAM_R0BAS + 1:
3136 mtdcr(SDRAM_R0BAS + 1, val);
3137 break;
3138 case SDRAM_R0BAS + 2:
3139 mtdcr(SDRAM_R0BAS + 2, val);
3140 break;
3141 case SDRAM_R0BAS + 3:
3142 mtdcr(SDRAM_R0BAS + 3, val);
3143 break;
3144 default:
3145 printf("DCR %d not defined in case statement!!!\n", dcr);
3146 }
3147 }
3148 #endif /* defined(CONFIG_440) */
3149
3150 void blank_string(int size)
3151 {
3152 int i;
3153
3154 for (i = 0; i < size; i++)
3155 putc('\b');
3156 for (i = 0; i < size; i++)
3157 putc(' ');
3158 for (i = 0; i < size; i++)
3159 putc('\b');
3160 }
3161 #endif /* !defined(CONFIG_NAND_U_BOOT) && !defined(CONFIG_NAND_SPL) */
3162
3163 inline void ppc4xx_ibm_ddr2_register_dump(void)
3164 {
3165 #if defined(DEBUG)
3166 printf("\nPPC4xx IBM DDR2 Register Dump:\n");
3167
3168 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3169 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3170 PPC4xx_IBM_DDR2_DUMP_REGISTER(R0BAS);
3171 PPC4xx_IBM_DDR2_DUMP_REGISTER(R1BAS);
3172 PPC4xx_IBM_DDR2_DUMP_REGISTER(R2BAS);
3173 PPC4xx_IBM_DDR2_DUMP_REGISTER(R3BAS);
3174 #endif /* (defined(CONFIG_440SP) || ... */
3175 #if defined(CONFIG_405EX)
3176 PPC4xx_IBM_DDR2_DUMP_REGISTER(BESR);
3177 PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARL);
3178 PPC4xx_IBM_DDR2_DUMP_REGISTER(BEARH);
3179 PPC4xx_IBM_DDR2_DUMP_REGISTER(WMIRQ);
3180 PPC4xx_IBM_DDR2_DUMP_REGISTER(PLBOPT);
3181 PPC4xx_IBM_DDR2_DUMP_REGISTER(PUABA);
3182 #endif /* defined(CONFIG_405EX) */
3183 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB0CF);
3184 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB1CF);
3185 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB2CF);
3186 PPC4xx_IBM_DDR2_DUMP_REGISTER(MB3CF);
3187 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCSTAT);
3188 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT1);
3189 PPC4xx_IBM_DDR2_DUMP_REGISTER(MCOPT2);
3190 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT0);
3191 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT1);
3192 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT2);
3193 PPC4xx_IBM_DDR2_DUMP_REGISTER(MODT3);
3194 PPC4xx_IBM_DDR2_DUMP_REGISTER(CODT);
3195 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3196 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3197 PPC4xx_IBM_DDR2_DUMP_REGISTER(VVPR);
3198 PPC4xx_IBM_DDR2_DUMP_REGISTER(OPARS);
3199 /*
3200 * OPART is only used as a trigger register.
3201 *
3202 * No data is contained in this register, and reading or writing
3203 * to is can cause bad things to happen (hangs). Just skip it and
3204 * report "N/A".
3205 */
3206 printf("%20s = N/A\n", "SDRAM_OPART");
3207 #endif /* defined(CONFIG_440SP) || ... */
3208 PPC4xx_IBM_DDR2_DUMP_REGISTER(RTR);
3209 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR0);
3210 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR1);
3211 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR2);
3212 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR3);
3213 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR4);
3214 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR5);
3215 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR6);
3216 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR7);
3217 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR8);
3218 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR9);
3219 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR10);
3220 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR11);
3221 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR12);
3222 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR13);
3223 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR14);
3224 PPC4xx_IBM_DDR2_DUMP_REGISTER(INITPLR15);
3225 PPC4xx_IBM_DDR2_DUMP_REGISTER(RQDC);
3226 PPC4xx_IBM_DDR2_DUMP_REGISTER(RFDC);
3227 PPC4xx_IBM_DDR2_DUMP_REGISTER(RDCC);
3228 PPC4xx_IBM_DDR2_DUMP_REGISTER(DLCR);
3229 PPC4xx_IBM_DDR2_DUMP_REGISTER(CLKTR);
3230 PPC4xx_IBM_DDR2_DUMP_REGISTER(WRDTR);
3231 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR1);
3232 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR2);
3233 PPC4xx_IBM_DDR2_DUMP_REGISTER(SDTR3);
3234 PPC4xx_IBM_DDR2_DUMP_REGISTER(MMODE);
3235 PPC4xx_IBM_DDR2_DUMP_REGISTER(MEMODE);
3236 PPC4xx_IBM_DDR2_DUMP_REGISTER(ECCCR);
3237 #if (defined(CONFIG_440SP) || defined(CONFIG_440SPE) || \
3238 defined(CONFIG_460EX) || defined(CONFIG_460GT))
3239 PPC4xx_IBM_DDR2_DUMP_REGISTER(CID);
3240 #endif /* defined(CONFIG_440SP) || ... */
3241 PPC4xx_IBM_DDR2_DUMP_REGISTER(RID);
3242 PPC4xx_IBM_DDR2_DUMP_REGISTER(FCSR);
3243 PPC4xx_IBM_DDR2_DUMP_REGISTER(RTSR);
3244 #endif /* defined(DEBUG) */
3245 }
3246
3247 #endif /* CONFIG_SDRAM_PPC4xx_IBM_DDR2 */