]> git.ipfire.org Git - people/ms/u-boot.git/blob - cpu/mpc83xx/spd_sdram.c
mpc83xx: implement board_add_ram_info
[people/ms/u-boot.git] / cpu / mpc83xx / spd_sdram.c
1 /*
2 * (C) Copyright 2006 Freescale Semiconductor, Inc.
3 *
4 * (C) Copyright 2006
5 * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
6 *
7 * Copyright (C) 2004-2006 Freescale Semiconductor, Inc.
8 * (C) Copyright 2003 Motorola Inc.
9 * Xianghua Xiao (X.Xiao@motorola.com)
10 *
11 * See file CREDITS for list of people who contributed to this
12 * project.
13 *
14 * This program is free software; you can redistribute it and/or
15 * modify it under the terms of the GNU General Public License as
16 * published by the Free Software Foundation; either version 2 of
17 * the License, or (at your option) any later version.
18 *
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
23 *
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, write to the Free Software
26 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
27 * MA 02111-1307 USA
28 */
29
30 #include <common.h>
31 #include <asm/processor.h>
32 #include <i2c.h>
33 #include <spd.h>
34 #include <asm/mmu.h>
35 #include <spd_sdram.h>
36
37 void board_add_ram_info(int use_default)
38 {
39 volatile immap_t *immap = (immap_t *) CFG_IMMR;
40 volatile ddr83xx_t *ddr = &immap->ddr;
41
42 printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK)
43 >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1);
44
45 if (ddr->sdram_cfg & SDRAM_CFG_32_BE)
46 puts(", 32-bit");
47 else
48 puts(", 64-bit");
49
50 if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN)
51 puts(", ECC on)");
52 else
53 puts(", ECC off)");
54
55 #if defined(CFG_LB_SDRAM) && defined(CFG_LBC_SDRAM_SIZE)
56 puts("\nSDRAM: ");
57 print_size (CFG_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)");
58 #endif
59 }
60
61 #ifdef CONFIG_SPD_EEPROM
62
63 DECLARE_GLOBAL_DATA_PTR;
64
65 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRC)
66 extern void dma_init(void);
67 extern uint dma_check(void);
68 extern int dma_xfer(void *dest, uint count, void *src);
69 #endif
70
71 #ifndef CFG_READ_SPD
72 #define CFG_READ_SPD i2c_read
73 #endif
74
75 /*
76 * Convert picoseconds into clock cycles (rounding up if needed).
77 */
78 int
79 picos_to_clk(int picos)
80 {
81 unsigned int ddr_bus_clk;
82 int clks;
83
84 ddr_bus_clk = gd->ddr_clk >> 1;
85 clks = picos / (1000000000 / (ddr_bus_clk / 1000));
86 if (picos % (1000000000 / (ddr_bus_clk / 1000)) != 0)
87 clks++;
88
89 return clks;
90 }
91
92 unsigned int banksize(unsigned char row_dens)
93 {
94 return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24;
95 }
96
97 int read_spd(uint addr)
98 {
99 return ((int) addr);
100 }
101
102 #undef SPD_DEBUG
103 #ifdef SPD_DEBUG
104 static void spd_debug(spd_eeprom_t *spd)
105 {
106 printf ("\nDIMM type: %-18.18s\n", spd->mpart);
107 printf ("SPD size: %d\n", spd->info_size);
108 printf ("EEPROM size: %d\n", 1 << spd->chip_size);
109 printf ("Memory type: %d\n", spd->mem_type);
110 printf ("Row addr: %d\n", spd->nrow_addr);
111 printf ("Column addr: %d\n", spd->ncol_addr);
112 printf ("# of rows: %d\n", spd->nrows);
113 printf ("Row density: %d\n", spd->row_dens);
114 printf ("# of banks: %d\n", spd->nbanks);
115 printf ("Data width: %d\n",
116 256 * spd->dataw_msb + spd->dataw_lsb);
117 printf ("Chip width: %d\n", spd->primw);
118 printf ("Refresh rate: %02X\n", spd->refresh);
119 printf ("CAS latencies: %02X\n", spd->cas_lat);
120 printf ("Write latencies: %02X\n", spd->write_lat);
121 printf ("tRP: %d\n", spd->trp);
122 printf ("tRCD: %d\n", spd->trcd);
123 printf ("\n");
124 }
125 #endif /* SPD_DEBUG */
126
127 long int spd_sdram()
128 {
129 volatile immap_t *immap = (immap_t *)CFG_IMMR;
130 volatile ddr83xx_t *ddr = &immap->ddr;
131 volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0];
132 spd_eeprom_t spd;
133 unsigned int n_ranks;
134 unsigned int odt_rd_cfg, odt_wr_cfg;
135 unsigned char twr_clk, twtr_clk;
136 unsigned int sdram_type;
137 unsigned int memsize;
138 unsigned int law_size;
139 unsigned char caslat, caslat_ctrl;
140 unsigned int trfc, trfc_clk, trfc_low, trfc_high;
141 unsigned int trcd_clk, trtp_clk;
142 unsigned char cke_min_clk;
143 unsigned char add_lat, wr_lat;
144 unsigned char wr_data_delay;
145 unsigned char four_act;
146 unsigned char cpo;
147 unsigned char burstlen;
148 unsigned char odt_cfg, mode_odt_enable;
149 unsigned int max_bus_clk;
150 unsigned int max_data_rate, effective_data_rate;
151 unsigned int ddrc_clk;
152 unsigned int refresh_clk;
153 unsigned int sdram_cfg;
154 unsigned int ddrc_ecc_enable;
155 unsigned int pvr = get_pvr();
156
157 /* Read SPD parameters with I2C */
158 CFG_READ_SPD(SPD_EEPROM_ADDRESS, 0, 1, (uchar *) & spd, sizeof (spd));
159 #ifdef SPD_DEBUG
160 spd_debug(&spd);
161 #endif
162 /* Check the memory type */
163 if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) {
164 debug("DDR: Module mem type is %02X\n", spd.mem_type);
165 return 0;
166 }
167
168 /* Check the number of physical bank */
169 if (spd.mem_type == SPD_MEMTYPE_DDR) {
170 n_ranks = spd.nrows;
171 } else {
172 n_ranks = (spd.nrows & 0x7) + 1;
173 }
174
175 if (n_ranks > 2) {
176 printf("DDR: The number of physical bank is %02X\n", n_ranks);
177 return 0;
178 }
179
180 /* Check if the number of row of the module is in the range of DDRC */
181 if (spd.nrow_addr < 12 || spd.nrow_addr > 15) {
182 printf("DDR: Row number is out of range of DDRC, row=%02X\n",
183 spd.nrow_addr);
184 return 0;
185 }
186
187 /* Check if the number of col of the module is in the range of DDRC */
188 if (spd.ncol_addr < 8 || spd.ncol_addr > 11) {
189 printf("DDR: Col number is out of range of DDRC, col=%02X\n",
190 spd.ncol_addr);
191 return 0;
192 }
193
194 #ifdef CFG_DDRCDR_VALUE
195 /*
196 * Adjust DDR II IO voltage biasing. It just makes it work.
197 */
198 if(spd.mem_type == SPD_MEMTYPE_DDR2) {
199 immap->sysconf.ddrcdr = CFG_DDRCDR_VALUE;
200 }
201 #endif
202
203 /*
204 * ODT configuration recommendation from DDR Controller Chapter.
205 */
206 odt_rd_cfg = 0; /* Never assert ODT */
207 odt_wr_cfg = 0; /* Never assert ODT */
208 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
209 odt_wr_cfg = 1; /* Assert ODT on writes to CSn */
210 }
211
212 /* Setup DDR chip select register */
213 #ifdef CFG_83XX_DDR_USES_CS0
214 ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1;
215 ddr->cs_config[0] = ( 1 << 31
216 | (odt_rd_cfg << 20)
217 | (odt_wr_cfg << 16)
218 | (spd.nrow_addr - 12) << 8
219 | (spd.ncol_addr - 8) );
220 debug("\n");
221 debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds);
222 debug("cs0_config = 0x%08x\n",ddr->cs_config[0]);
223
224 if (n_ranks == 2) {
225 ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8)
226 | ((banksize(spd.row_dens) >> 23) - 1) );
227 ddr->cs_config[1] = ( 1<<31
228 | (odt_rd_cfg << 20)
229 | (odt_wr_cfg << 16)
230 | (spd.nrow_addr-12) << 8
231 | (spd.ncol_addr-8) );
232 debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds);
233 debug("cs1_config = 0x%08x\n",ddr->cs_config[1]);
234 }
235
236 #else
237 ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1;
238 ddr->cs_config[2] = ( 1 << 31
239 | (odt_rd_cfg << 20)
240 | (odt_wr_cfg << 16)
241 | (spd.nrow_addr - 12) << 8
242 | (spd.ncol_addr - 8) );
243 debug("\n");
244 debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds);
245 debug("cs2_config = 0x%08x\n",ddr->cs_config[2]);
246
247 if (n_ranks == 2) {
248 ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8)
249 | ((banksize(spd.row_dens) >> 23) - 1) );
250 ddr->cs_config[3] = ( 1<<31
251 | (odt_rd_cfg << 20)
252 | (odt_wr_cfg << 16)
253 | (spd.nrow_addr-12) << 8
254 | (spd.ncol_addr-8) );
255 debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds);
256 debug("cs3_config = 0x%08x\n",ddr->cs_config[3]);
257 }
258 #endif
259
260 /*
261 * Figure out memory size in Megabytes.
262 */
263 memsize = n_ranks * banksize(spd.row_dens) / 0x100000;
264
265 /*
266 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23.
267 */
268 law_size = 19 + __ilog2(memsize);
269
270 /*
271 * Set up LAWBAR for all of DDR.
272 */
273 ecm->bar = ((CFG_DDR_SDRAM_BASE>>12) & 0xfffff);
274 ecm->ar = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size));
275 debug("DDR:bar=0x%08x\n", ecm->bar);
276 debug("DDR:ar=0x%08x\n", ecm->ar);
277
278 /*
279 * Find the largest CAS by locating the highest 1 bit
280 * in the spd.cas_lat field. Translate it to a DDR
281 * controller field value:
282 *
283 * CAS Lat DDR I DDR II Ctrl
284 * Clocks SPD Bit SPD Bit Value
285 * ------- ------- ------- -----
286 * 1.0 0 0001
287 * 1.5 1 0010
288 * 2.0 2 2 0011
289 * 2.5 3 0100
290 * 3.0 4 3 0101
291 * 3.5 5 0110
292 * 4.0 6 4 0111
293 * 4.5 1000
294 * 5.0 5 1001
295 */
296 caslat = __ilog2(spd.cas_lat);
297 if ((spd.mem_type == SPD_MEMTYPE_DDR)
298 && (caslat > 6)) {
299 printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat);
300 return 0;
301 } else if (spd.mem_type == SPD_MEMTYPE_DDR2
302 && (caslat < 2 || caslat > 5)) {
303 printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n",
304 spd.cas_lat);
305 return 0;
306 }
307 debug("DDR: caslat SPD bit is %d\n", caslat);
308
309 max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10
310 + (spd.clk_cycle & 0x0f));
311 max_data_rate = max_bus_clk * 2;
312
313 debug("DDR:Module maximum data rate is: %dMhz\n", max_data_rate);
314
315 ddrc_clk = gd->ddr_clk / 1000000;
316 effective_data_rate = 0;
317
318 if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */
319 if (ddrc_clk <= 460 && ddrc_clk > 350) {
320 /* DDR controller clk at 350~460 */
321 effective_data_rate = 400; /* 5ns */
322 caslat = caslat;
323 } else if (ddrc_clk <= 350 && ddrc_clk > 280) {
324 /* DDR controller clk at 280~350 */
325 effective_data_rate = 333; /* 6ns */
326 if (spd.clk_cycle2 == 0x60)
327 caslat = caslat - 1;
328 else
329 caslat = caslat;
330 } else if (ddrc_clk <= 280 && ddrc_clk > 230) {
331 /* DDR controller clk at 230~280 */
332 effective_data_rate = 266; /* 7.5ns */
333 if (spd.clk_cycle3 == 0x75)
334 caslat = caslat - 2;
335 else if (spd.clk_cycle2 == 0x75)
336 caslat = caslat - 1;
337 else
338 caslat = caslat;
339 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
340 /* DDR controller clk at 90~230 */
341 effective_data_rate = 200; /* 10ns */
342 if (spd.clk_cycle3 == 0xa0)
343 caslat = caslat - 2;
344 else if (spd.clk_cycle2 == 0xa0)
345 caslat = caslat - 1;
346 else
347 caslat = caslat;
348 }
349 } else if (max_data_rate >= 323) { /* it is DDR 333 */
350 if (ddrc_clk <= 350 && ddrc_clk > 280) {
351 /* DDR controller clk at 280~350 */
352 effective_data_rate = 333; /* 6ns */
353 caslat = caslat;
354 } else if (ddrc_clk <= 280 && ddrc_clk > 230) {
355 /* DDR controller clk at 230~280 */
356 effective_data_rate = 266; /* 7.5ns */
357 if (spd.clk_cycle2 == 0x75)
358 caslat = caslat - 1;
359 else
360 caslat = caslat;
361 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
362 /* DDR controller clk at 90~230 */
363 effective_data_rate = 200; /* 10ns */
364 if (spd.clk_cycle3 == 0xa0)
365 caslat = caslat - 2;
366 else if (spd.clk_cycle2 == 0xa0)
367 caslat = caslat - 1;
368 else
369 caslat = caslat;
370 }
371 } else if (max_data_rate >= 256) { /* it is DDR 266 */
372 if (ddrc_clk <= 350 && ddrc_clk > 280) {
373 /* DDR controller clk at 280~350 */
374 printf("DDR: DDR controller freq is more than "
375 "max data rate of the module\n");
376 return 0;
377 } else if (ddrc_clk <= 280 && ddrc_clk > 230) {
378 /* DDR controller clk at 230~280 */
379 effective_data_rate = 266; /* 7.5ns */
380 caslat = caslat;
381 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
382 /* DDR controller clk at 90~230 */
383 effective_data_rate = 200; /* 10ns */
384 if (spd.clk_cycle2 == 0xa0)
385 caslat = caslat - 1;
386 }
387 } else if (max_data_rate >= 190) { /* it is DDR 200 */
388 if (ddrc_clk <= 350 && ddrc_clk > 230) {
389 /* DDR controller clk at 230~350 */
390 printf("DDR: DDR controller freq is more than "
391 "max data rate of the module\n");
392 return 0;
393 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
394 /* DDR controller clk at 90~230 */
395 effective_data_rate = 200; /* 10ns */
396 caslat = caslat;
397 }
398 }
399
400 debug("DDR:Effective data rate is: %dMhz\n", effective_data_rate);
401 debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat);
402
403 /*
404 * Errata DDR6 work around: input enable 2 cycles earlier.
405 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2.
406 */
407 if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){
408 if (caslat == 2)
409 ddr->debug_reg = 0x201c0000; /* CL=2 */
410 else if (caslat == 3)
411 ddr->debug_reg = 0x202c0000; /* CL=2.5 */
412 else if (caslat == 4)
413 ddr->debug_reg = 0x202c0000; /* CL=3.0 */
414
415 __asm__ __volatile__ ("sync");
416
417 debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg);
418 }
419
420 /*
421 * Convert caslat clocks to DDR controller value.
422 * Force caslat_ctrl to be DDR Controller field-sized.
423 */
424 if (spd.mem_type == SPD_MEMTYPE_DDR) {
425 caslat_ctrl = (caslat + 1) & 0x07;
426 } else {
427 caslat_ctrl = (2 * caslat - 1) & 0x0f;
428 }
429
430 debug("DDR: effective data rate is %d MHz\n", effective_data_rate);
431 debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n",
432 caslat, caslat_ctrl);
433
434 /*
435 * Timing Config 0.
436 * Avoid writing for DDR I.
437 */
438 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
439 unsigned char taxpd_clk = 8; /* By the book. */
440 unsigned char tmrd_clk = 2; /* By the book. */
441 unsigned char act_pd_exit = 2; /* Empirical? */
442 unsigned char pre_pd_exit = 6; /* Empirical? */
443
444 ddr->timing_cfg_0 = (0
445 | ((act_pd_exit & 0x7) << 20) /* ACT_PD_EXIT */
446 | ((pre_pd_exit & 0x7) << 16) /* PRE_PD_EXIT */
447 | ((taxpd_clk & 0xf) << 8) /* ODT_PD_EXIT */
448 | ((tmrd_clk & 0xf) << 0) /* MRS_CYC */
449 );
450 debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0);
451 }
452
453 /*
454 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD,
455 * use conservative value.
456 * For DDR II, they are bytes 36 and 37, in quarter nanos.
457 */
458
459 if (spd.mem_type == SPD_MEMTYPE_DDR) {
460 twr_clk = 3; /* Clocks */
461 twtr_clk = 1; /* Clocks */
462 } else {
463 twr_clk = picos_to_clk(spd.twr * 250);
464 twtr_clk = picos_to_clk(spd.twtr * 250);
465 }
466
467 /*
468 * Calculate Trfc, in picos.
469 * DDR I: Byte 42 straight up in ns.
470 * DDR II: Byte 40 and 42 swizzled some, in ns.
471 */
472 if (spd.mem_type == SPD_MEMTYPE_DDR) {
473 trfc = spd.trfc * 1000; /* up to ps */
474 } else {
475 unsigned int byte40_table_ps[8] = {
476 0,
477 250,
478 330,
479 500,
480 660,
481 750,
482 0,
483 0
484 };
485
486 trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000
487 + byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7];
488 }
489 trfc_clk = picos_to_clk(trfc);
490
491 /*
492 * Trcd, Byte 29, from quarter nanos to ps and clocks.
493 */
494 trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7;
495
496 /*
497 * Convert trfc_clk to DDR controller fields. DDR I should
498 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the
499 * 83xx controller has an extended REFREC field of three bits.
500 * The controller automatically adds 8 clocks to this value,
501 * so preadjust it down 8 first before splitting it up.
502 */
503 trfc_low = (trfc_clk - 8) & 0xf;
504 trfc_high = ((trfc_clk - 8) >> 4) & 0x3;
505
506 ddr->timing_cfg_1 =
507 (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) | /* PRETOACT */
508 ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */
509 (trcd_clk << 20 ) | /* ACTTORW */
510 (caslat_ctrl << 16 ) | /* CASLAT */
511 (trfc_low << 12 ) | /* REFEC */
512 ((twr_clk & 0x07) << 8) | /* WRRREC */
513 ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) | /* ACTTOACT */
514 ((twtr_clk & 0x07) << 0) /* WRTORD */
515 );
516
517 /*
518 * Additive Latency
519 * For DDR I, 0.
520 * For DDR II, with ODT enabled, use "a value" less than ACTTORW,
521 * which comes from Trcd, and also note that:
522 * add_lat + caslat must be >= 4
523 */
524 add_lat = 0;
525 if (spd.mem_type == SPD_MEMTYPE_DDR2
526 && (odt_wr_cfg || odt_rd_cfg)
527 && (caslat < 4)) {
528 add_lat = trcd_clk - 1;
529 if ((add_lat + caslat) < 4) {
530 add_lat = 0;
531 }
532 }
533
534 /*
535 * Write Data Delay
536 * Historically 0x2 == 4/8 clock delay.
537 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266.
538 */
539 wr_data_delay = 2;
540
541 /*
542 * Write Latency
543 * Read to Precharge
544 * Minimum CKE Pulse Width.
545 * Four Activate Window
546 */
547 if (spd.mem_type == SPD_MEMTYPE_DDR) {
548 /*
549 * This is a lie. It should really be 1, but if it is
550 * set to 1, bits overlap into the old controller's
551 * otherwise unused ACSM field. If we leave it 0, then
552 * the HW will magically treat it as 1 for DDR 1. Oh Yea.
553 */
554 wr_lat = 0;
555
556 trtp_clk = 2; /* By the book. */
557 cke_min_clk = 1; /* By the book. */
558 four_act = 1; /* By the book. */
559
560 } else {
561 wr_lat = caslat - 1;
562
563 /* Convert SPD value from quarter nanos to picos. */
564 trtp_clk = picos_to_clk(spd.trtp * 250);
565
566 cke_min_clk = 3; /* By the book. */
567 four_act = picos_to_clk(37500); /* By the book. 1k pages? */
568 }
569
570 /*
571 * Empirically set ~MCAS-to-preamble override for DDR 2.
572 * Your milage will vary.
573 */
574 cpo = 0;
575 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
576 if (effective_data_rate == 266 || effective_data_rate == 333) {
577 cpo = 0x7; /* READ_LAT + 5/4 */
578 } else if (effective_data_rate == 400) {
579 cpo = 0x9; /* READ_LAT + 7/4 */
580 } else {
581 /* Automatic calibration */
582 cpo = 0x1f;
583 }
584 }
585
586 ddr->timing_cfg_2 = (0
587 | ((add_lat & 0x7) << 28) /* ADD_LAT */
588 | ((cpo & 0x1f) << 23) /* CPO */
589 | ((wr_lat & 0x7) << 19) /* WR_LAT */
590 | ((trtp_clk & 0x7) << 13) /* RD_TO_PRE */
591 | ((wr_data_delay & 0x7) << 10) /* WR_DATA_DELAY */
592 | ((cke_min_clk & 0x7) << 6) /* CKE_PLS */
593 | ((four_act & 0x1f) << 0) /* FOUR_ACT */
594 );
595
596 debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1);
597 debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2);
598
599 /* Check DIMM data bus width */
600 if (spd.dataw_lsb == 0x20) {
601 if (spd.mem_type == SPD_MEMTYPE_DDR)
602 burstlen = 0x03; /* 32 bit data bus, burst len is 8 */
603 else
604 burstlen = 0x02; /* 32 bit data bus, burst len is 4 */
605 debug("\n DDR DIMM: data bus width is 32 bit");
606 } else {
607 burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */
608 debug("\n DDR DIMM: data bus width is 64 bit");
609 }
610
611 /* Is this an ECC DDR chip? */
612 if (spd.config == 0x02)
613 debug(" with ECC\n");
614 else
615 debug(" without ECC\n");
616
617 /* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus,
618 Burst type is sequential
619 */
620 if (spd.mem_type == SPD_MEMTYPE_DDR) {
621 switch (caslat) {
622 case 1:
623 ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */
624 break;
625 case 2:
626 ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */
627 break;
628 case 3:
629 ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */
630 break;
631 case 4:
632 ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */
633 break;
634 default:
635 printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n");
636 return 0;
637 }
638 } else {
639 mode_odt_enable = 0x0; /* Default disabled */
640 if (odt_wr_cfg || odt_rd_cfg) {
641 /*
642 * Bits 6 and 2 in Extended MRS(1)
643 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules.
644 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module.
645 */
646 mode_odt_enable = 0x40; /* 150 Ohm */
647 }
648
649 ddr->sdram_mode =
650 (0
651 | (1 << (16 + 10)) /* DQS Differential disable */
652 | (add_lat << (16 + 3)) /* Additive Latency in EMRS1 */
653 | (mode_odt_enable << 16) /* ODT Enable in EMRS1 */
654 | ((twr_clk - 1) << 9) /* Write Recovery Autopre */
655 | (caslat << 4) /* caslat */
656 | (burstlen << 0) /* Burst length */
657 );
658 }
659 debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode);
660
661 /*
662 * Clear EMRS2 and EMRS3.
663 */
664 ddr->sdram_mode2 = 0;
665 debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2);
666
667 switch (spd.refresh) {
668 case 0x00:
669 case 0x80:
670 refresh_clk = picos_to_clk(15625000);
671 break;
672 case 0x01:
673 case 0x81:
674 refresh_clk = picos_to_clk(3900000);
675 break;
676 case 0x02:
677 case 0x82:
678 refresh_clk = picos_to_clk(7800000);
679 break;
680 case 0x03:
681 case 0x83:
682 refresh_clk = picos_to_clk(31300000);
683 break;
684 case 0x04:
685 case 0x84:
686 refresh_clk = picos_to_clk(62500000);
687 break;
688 case 0x05:
689 case 0x85:
690 refresh_clk = picos_to_clk(125000000);
691 break;
692 default:
693 refresh_clk = 0x512;
694 break;
695 }
696
697 /*
698 * Set BSTOPRE to 0x100 for page mode
699 * If auto-charge is used, set BSTOPRE = 0
700 */
701 ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100;
702 debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval);
703
704 /*
705 * SDRAM Cfg 2
706 */
707 odt_cfg = 0;
708 if (odt_rd_cfg | odt_wr_cfg) {
709 odt_cfg = 0x2; /* ODT to IOs during reads */
710 }
711 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
712 ddr->sdram_cfg2 = (0
713 | (0 << 26) /* True DQS */
714 | (odt_cfg << 21) /* ODT only read */
715 | (1 << 12) /* 1 refresh at a time */
716 );
717
718 debug("DDR: sdram_cfg2 = 0x%08x\n", ddr->sdram_cfg2);
719 }
720
721 #ifdef CFG_DDR_SDRAM_CLK_CNTL /* Optional platform specific value */
722 ddr->sdram_clk_cntl = CFG_DDR_SDRAM_CLK_CNTL;
723 #endif
724 debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl);
725
726 asm("sync;isync");
727
728 udelay(600);
729
730 /*
731 * Figure out the settings for the sdram_cfg register. Build up
732 * the value in 'sdram_cfg' before writing since the write into
733 * the register will actually enable the memory controller, and all
734 * settings must be done before enabling.
735 *
736 * sdram_cfg[0] = 1 (ddr sdram logic enable)
737 * sdram_cfg[1] = 1 (self-refresh-enable)
738 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
739 * 010 DDR 1 SDRAM
740 * 011 DDR 2 SDRAM
741 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode)
742 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts)
743 */
744 if (spd.mem_type == SPD_MEMTYPE_DDR)
745 sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1;
746 else
747 sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1;
748
749 sdram_cfg = (0
750 | SDRAM_CFG_MEM_EN /* DDR enable */
751 | SDRAM_CFG_SREN /* Self refresh */
752 | sdram_type /* SDRAM type */
753 );
754
755 /* sdram_cfg[3] = RD_EN - registered DIMM enable */
756 if (spd.mod_attr & 0x02)
757 sdram_cfg |= SDRAM_CFG_RD_EN;
758
759 /* The DIMM is 32bit width */
760 if (spd.dataw_lsb == 0x20) {
761 if (spd.mem_type == SPD_MEMTYPE_DDR)
762 sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE;
763 if (spd.mem_type == SPD_MEMTYPE_DDR2)
764 sdram_cfg |= SDRAM_CFG_32_BE;
765 }
766
767 ddrc_ecc_enable = 0;
768
769 #if defined(CONFIG_DDR_ECC)
770 /* Enable ECC with sdram_cfg[2] */
771 if (spd.config == 0x02) {
772 sdram_cfg |= 0x20000000;
773 ddrc_ecc_enable = 1;
774 /* disable error detection */
775 ddr->err_disable = ~ECC_ERROR_ENABLE;
776 /* set single bit error threshold to maximum value,
777 * reset counter to zero */
778 ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) |
779 (0 << ECC_ERROR_MAN_SBEC_SHIFT);
780 }
781
782 debug("DDR:err_disable=0x%08x\n", ddr->err_disable);
783 debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe);
784 #endif
785 debug(" DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF");
786
787 #if defined(CONFIG_DDR_2T_TIMING)
788 /*
789 * Enable 2T timing by setting sdram_cfg[16].
790 */
791 sdram_cfg |= SDRAM_CFG_2T_EN;
792 #endif
793 /* Enable controller, and GO! */
794 ddr->sdram_cfg = sdram_cfg;
795 asm("sync;isync");
796 udelay(500);
797
798 debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg);
799 return memsize; /*in MBytes*/
800 }
801 #endif /* CONFIG_SPD_EEPROM */
802
803 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRC)
804 /*
805 * Use timebase counter, get_timer() is not availabe
806 * at this point of initialization yet.
807 */
808 static __inline__ unsigned long get_tbms (void)
809 {
810 unsigned long tbl;
811 unsigned long tbu1, tbu2;
812 unsigned long ms;
813 unsigned long long tmp;
814
815 ulong tbclk = get_tbclk();
816
817 /* get the timebase ticks */
818 do {
819 asm volatile ("mftbu %0":"=r" (tbu1):);
820 asm volatile ("mftb %0":"=r" (tbl):);
821 asm volatile ("mftbu %0":"=r" (tbu2):);
822 } while (tbu1 != tbu2);
823
824 /* convert ticks to ms */
825 tmp = (unsigned long long)(tbu1);
826 tmp = (tmp << 32);
827 tmp += (unsigned long long)(tbl);
828 ms = tmp/(tbclk/1000);
829
830 return ms;
831 }
832
833 /*
834 * Initialize all of memory for ECC, then enable errors.
835 */
836 /* #define CONFIG_DDR_ECC_INIT_VIA_DMA */
837 void ddr_enable_ecc(unsigned int dram_size)
838 {
839 volatile immap_t *immap = (immap_t *)CFG_IMMR;
840 volatile ddr83xx_t *ddr= &immap->ddr;
841 unsigned long t_start, t_end;
842 register u64 *p;
843 register uint size;
844 unsigned int pattern[2];
845 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
846 uint i;
847 #endif
848 icache_enable();
849 t_start = get_tbms();
850 pattern[0] = 0xdeadbeef;
851 pattern[1] = 0xdeadbeef;
852
853 #if !defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
854 debug("ddr init: CPU FP write method\n");
855 size = dram_size;
856 for (p = 0; p < (u64*)(size); p++) {
857 ppcDWstore((u32*)p, pattern);
858 }
859 __asm__ __volatile__ ("sync");
860 #else
861 debug("ddr init: DMA method\n");
862 size = 0x2000;
863 for (p = 0; p < (u64*)(size); p++) {
864 ppcDWstore((u32*)p, pattern);
865 }
866 __asm__ __volatile__ ("sync");
867
868 /* Initialise DMA for direct transfer */
869 dma_init();
870 /* Start DMA to transfer */
871 dma_xfer((uint *)0x2000, 0x2000, (uint *)0); /* 8K */
872 dma_xfer((uint *)0x4000, 0x4000, (uint *)0); /* 16K */
873 dma_xfer((uint *)0x8000, 0x8000, (uint *)0); /* 32K */
874 dma_xfer((uint *)0x10000, 0x10000, (uint *)0); /* 64K */
875 dma_xfer((uint *)0x20000, 0x20000, (uint *)0); /* 128K */
876 dma_xfer((uint *)0x40000, 0x40000, (uint *)0); /* 256K */
877 dma_xfer((uint *)0x80000, 0x80000, (uint *)0); /* 512K */
878 dma_xfer((uint *)0x100000, 0x100000, (uint *)0); /* 1M */
879 dma_xfer((uint *)0x200000, 0x200000, (uint *)0); /* 2M */
880 dma_xfer((uint *)0x400000, 0x400000, (uint *)0); /* 4M */
881
882 for (i = 1; i < dram_size / 0x800000; i++) {
883 dma_xfer((uint *)(0x800000*i), 0x800000, (uint *)0);
884 }
885 #endif
886
887 t_end = get_tbms();
888 icache_disable();
889
890 debug("\nREADY!!\n");
891 debug("ddr init duration: %ld ms\n", t_end - t_start);
892
893 /* Clear All ECC Errors */
894 if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME)
895 ddr->err_detect |= ECC_ERROR_DETECT_MME;
896 if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE)
897 ddr->err_detect |= ECC_ERROR_DETECT_MBE;
898 if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE)
899 ddr->err_detect |= ECC_ERROR_DETECT_SBE;
900 if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE)
901 ddr->err_detect |= ECC_ERROR_DETECT_MSE;
902
903 /* Disable ECC-Interrupts */
904 ddr->err_int_en &= ECC_ERR_INT_DISABLE;
905
906 /* Enable errors for ECC */
907 ddr->err_disable &= ECC_ERROR_ENABLE;
908
909 __asm__ __volatile__ ("sync");
910 __asm__ __volatile__ ("isync");
911 }
912 #endif /* CONFIG_DDR_ECC */