]> git.ipfire.org Git - people/ms/u-boot.git/blob - cpu/mpc83xx/spd_sdram.c
Merge with /home/wd/git/u-boot/custodian/u-boot-microblaze
[people/ms/u-boot.git] / cpu / mpc83xx / spd_sdram.c
1 /*
2 * (C) Copyright 2006 Freescale Semiconductor, Inc.
3 *
4 * (C) Copyright 2006
5 * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
6 *
7 * Copyright (C) 2004-2006 Freescale Semiconductor, Inc.
8 * (C) Copyright 2003 Motorola Inc.
9 * Xianghua Xiao (X.Xiao@motorola.com)
10 *
11 * See file CREDITS for list of people who contributed to this
12 * project.
13 *
14 * This program is free software; you can redistribute it and/or
15 * modify it under the terms of the GNU General Public License as
16 * published by the Free Software Foundation; either version 2 of
17 * the License, or (at your option) any later version.
18 *
19 * This program is distributed in the hope that it will be useful,
20 * but WITHOUT ANY WARRANTY; without even the implied warranty of
21 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 * GNU General Public License for more details.
23 *
24 * You should have received a copy of the GNU General Public License
25 * along with this program; if not, write to the Free Software
26 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
27 * MA 02111-1307 USA
28 */
29
30 #include <common.h>
31 #include <asm/processor.h>
32 #include <i2c.h>
33 #include <spd.h>
34 #include <asm/mmu.h>
35 #include <spd_sdram.h>
36
37 #ifdef CONFIG_SPD_EEPROM
38
39 DECLARE_GLOBAL_DATA_PTR;
40
41 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRC)
42 extern void dma_init(void);
43 extern uint dma_check(void);
44 extern int dma_xfer(void *dest, uint count, void *src);
45 #endif
46
47 #ifndef CFG_READ_SPD
48 #define CFG_READ_SPD i2c_read
49 #endif
50
51 /*
52 * Convert picoseconds into clock cycles (rounding up if needed).
53 */
54 int
55 picos_to_clk(int picos)
56 {
57 unsigned int ddr_bus_clk;
58 int clks;
59
60 ddr_bus_clk = gd->ddr_clk >> 1;
61 clks = picos / ((1000000000 / ddr_bus_clk) * 1000);
62 if (picos % ((1000000000 / ddr_bus_clk) * 1000) != 0)
63 clks++;
64
65 return clks;
66 }
67
68 unsigned int banksize(unsigned char row_dens)
69 {
70 return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24;
71 }
72
73 int read_spd(uint addr)
74 {
75 return ((int) addr);
76 }
77
78 #undef SPD_DEBUG
79 #ifdef SPD_DEBUG
80 static void spd_debug(spd_eeprom_t *spd)
81 {
82 printf ("\nDIMM type: %-18.18s\n", spd->mpart);
83 printf ("SPD size: %d\n", spd->info_size);
84 printf ("EEPROM size: %d\n", 1 << spd->chip_size);
85 printf ("Memory type: %d\n", spd->mem_type);
86 printf ("Row addr: %d\n", spd->nrow_addr);
87 printf ("Column addr: %d\n", spd->ncol_addr);
88 printf ("# of rows: %d\n", spd->nrows);
89 printf ("Row density: %d\n", spd->row_dens);
90 printf ("# of banks: %d\n", spd->nbanks);
91 printf ("Data width: %d\n",
92 256 * spd->dataw_msb + spd->dataw_lsb);
93 printf ("Chip width: %d\n", spd->primw);
94 printf ("Refresh rate: %02X\n", spd->refresh);
95 printf ("CAS latencies: %02X\n", spd->cas_lat);
96 printf ("Write latencies: %02X\n", spd->write_lat);
97 printf ("tRP: %d\n", spd->trp);
98 printf ("tRCD: %d\n", spd->trcd);
99 printf ("\n");
100 }
101 #endif /* SPD_DEBUG */
102
103 long int spd_sdram()
104 {
105 volatile immap_t *immap = (immap_t *)CFG_IMMR;
106 volatile ddr83xx_t *ddr = &immap->ddr;
107 volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0];
108 spd_eeprom_t spd;
109 unsigned int n_ranks;
110 unsigned int odt_rd_cfg, odt_wr_cfg;
111 unsigned char twr_clk, twtr_clk;
112 unsigned char sdram_type;
113 unsigned int memsize;
114 unsigned int law_size;
115 unsigned char caslat, caslat_ctrl;
116 unsigned int trfc, trfc_clk, trfc_low, trfc_high;
117 unsigned int trcd_clk, trtp_clk;
118 unsigned char cke_min_clk;
119 unsigned char add_lat, wr_lat;
120 unsigned char wr_data_delay;
121 unsigned char four_act;
122 unsigned char cpo;
123 unsigned char burstlen;
124 unsigned char odt_cfg, mode_odt_enable;
125 unsigned int max_bus_clk;
126 unsigned int max_data_rate, effective_data_rate;
127 unsigned int ddrc_clk;
128 unsigned int refresh_clk;
129 unsigned int sdram_cfg;
130 unsigned int ddrc_ecc_enable;
131 unsigned int pvr = get_pvr();
132
133 /* Read SPD parameters with I2C */
134 CFG_READ_SPD(SPD_EEPROM_ADDRESS, 0, 1, (uchar *) & spd, sizeof (spd));
135 #ifdef SPD_DEBUG
136 spd_debug(&spd);
137 #endif
138 /* Check the memory type */
139 if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) {
140 printf("DDR: Module mem type is %02X\n", spd.mem_type);
141 return 0;
142 }
143
144 /* Check the number of physical bank */
145 if (spd.mem_type == SPD_MEMTYPE_DDR) {
146 n_ranks = spd.nrows;
147 } else {
148 n_ranks = (spd.nrows & 0x7) + 1;
149 }
150
151 if (n_ranks > 2) {
152 printf("DDR: The number of physical bank is %02X\n", n_ranks);
153 return 0;
154 }
155
156 /* Check if the number of row of the module is in the range of DDRC */
157 if (spd.nrow_addr < 12 || spd.nrow_addr > 15) {
158 printf("DDR: Row number is out of range of DDRC, row=%02X\n",
159 spd.nrow_addr);
160 return 0;
161 }
162
163 /* Check if the number of col of the module is in the range of DDRC */
164 if (spd.ncol_addr < 8 || spd.ncol_addr > 11) {
165 printf("DDR: Col number is out of range of DDRC, col=%02X\n",
166 spd.ncol_addr);
167 return 0;
168 }
169
170 #ifdef CFG_DDRCDR_VALUE
171 /*
172 * Adjust DDR II IO voltage biasing. It just makes it work.
173 */
174 if(spd.mem_type == SPD_MEMTYPE_DDR2) {
175 immap->sysconf.ddrcdr = CFG_DDRCDR_VALUE;
176 }
177 #endif
178
179 /*
180 * ODT configuration recommendation from DDR Controller Chapter.
181 */
182 odt_rd_cfg = 0; /* Never assert ODT */
183 odt_wr_cfg = 0; /* Never assert ODT */
184 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
185 odt_wr_cfg = 1; /* Assert ODT on writes to CSn */
186 }
187
188 /* Setup DDR chip select register */
189 #ifdef CFG_83XX_DDR_USES_CS0
190 ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1;
191 ddr->cs_config[0] = ( 1 << 31
192 | (odt_rd_cfg << 20)
193 | (odt_wr_cfg << 16)
194 | (spd.nrow_addr - 12) << 8
195 | (spd.ncol_addr - 8) );
196 debug("\n");
197 debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds);
198 debug("cs0_config = 0x%08x\n",ddr->cs_config[0]);
199
200 if (n_ranks == 2) {
201 ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8)
202 | ((banksize(spd.row_dens) >> 23) - 1) );
203 ddr->cs_config[1] = ( 1<<31
204 | (odt_rd_cfg << 20)
205 | (odt_wr_cfg << 16)
206 | (spd.nrow_addr-12) << 8
207 | (spd.ncol_addr-8) );
208 debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds);
209 debug("cs1_config = 0x%08x\n",ddr->cs_config[1]);
210 }
211
212 #else
213 ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1;
214 ddr->cs_config[2] = ( 1 << 31
215 | (odt_rd_cfg << 20)
216 | (odt_wr_cfg << 16)
217 | (spd.nrow_addr - 12) << 8
218 | (spd.ncol_addr - 8) );
219 debug("\n");
220 debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds);
221 debug("cs2_config = 0x%08x\n",ddr->cs_config[2]);
222
223 if (n_ranks == 2) {
224 ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8)
225 | ((banksize(spd.row_dens) >> 23) - 1) );
226 ddr->cs_config[3] = ( 1<<31
227 | (odt_rd_cfg << 20)
228 | (odt_wr_cfg << 16)
229 | (spd.nrow_addr-12) << 8
230 | (spd.ncol_addr-8) );
231 debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds);
232 debug("cs3_config = 0x%08x\n",ddr->cs_config[3]);
233 }
234 #endif
235
236 /*
237 * Figure out memory size in Megabytes.
238 */
239 memsize = n_ranks * banksize(spd.row_dens) / 0x100000;
240
241 /*
242 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23.
243 */
244 law_size = 19 + __ilog2(memsize);
245
246 /*
247 * Set up LAWBAR for all of DDR.
248 */
249 ecm->bar = ((CFG_DDR_SDRAM_BASE>>12) & 0xfffff);
250 ecm->ar = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size));
251 debug("DDR:bar=0x%08x\n", ecm->bar);
252 debug("DDR:ar=0x%08x\n", ecm->ar);
253
254 /*
255 * Find the largest CAS by locating the highest 1 bit
256 * in the spd.cas_lat field. Translate it to a DDR
257 * controller field value:
258 *
259 * CAS Lat DDR I DDR II Ctrl
260 * Clocks SPD Bit SPD Bit Value
261 * ------- ------- ------- -----
262 * 1.0 0 0001
263 * 1.5 1 0010
264 * 2.0 2 2 0011
265 * 2.5 3 0100
266 * 3.0 4 3 0101
267 * 3.5 5 0110
268 * 4.0 6 4 0111
269 * 4.5 1000
270 * 5.0 5 1001
271 */
272 caslat = __ilog2(spd.cas_lat);
273 if ((spd.mem_type == SPD_MEMTYPE_DDR)
274 && (caslat > 6)) {
275 printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat);
276 return 0;
277 } else if (spd.mem_type == SPD_MEMTYPE_DDR2
278 && (caslat < 2 || caslat > 5)) {
279 printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n",
280 spd.cas_lat);
281 return 0;
282 }
283 debug("DDR: caslat SPD bit is %d\n", caslat);
284
285 max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10
286 + (spd.clk_cycle & 0x0f));
287 max_data_rate = max_bus_clk * 2;
288
289 debug("DDR:Module maximum data rate is: %dMhz\n", max_data_rate);
290
291 ddrc_clk = gd->ddr_clk / 1000000;
292 effective_data_rate = 0;
293
294 if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */
295 if (ddrc_clk <= 460 && ddrc_clk > 350) {
296 /* DDR controller clk at 350~460 */
297 effective_data_rate = 400; /* 5ns */
298 caslat = caslat;
299 } else if (ddrc_clk <= 350 && ddrc_clk > 280) {
300 /* DDR controller clk at 280~350 */
301 effective_data_rate = 333; /* 6ns */
302 if (spd.clk_cycle2 == 0x60)
303 caslat = caslat - 1;
304 else
305 caslat = caslat;
306 } else if (ddrc_clk <= 280 && ddrc_clk > 230) {
307 /* DDR controller clk at 230~280 */
308 effective_data_rate = 266; /* 7.5ns */
309 if (spd.clk_cycle3 == 0x75)
310 caslat = caslat - 2;
311 else if (spd.clk_cycle2 == 0x75)
312 caslat = caslat - 1;
313 else
314 caslat = caslat;
315 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
316 /* DDR controller clk at 90~230 */
317 effective_data_rate = 200; /* 10ns */
318 if (spd.clk_cycle3 == 0xa0)
319 caslat = caslat - 2;
320 else if (spd.clk_cycle2 == 0xa0)
321 caslat = caslat - 1;
322 else
323 caslat = caslat;
324 }
325 } else if (max_data_rate >= 323) { /* it is DDR 333 */
326 if (ddrc_clk <= 350 && ddrc_clk > 280) {
327 /* DDR controller clk at 280~350 */
328 effective_data_rate = 333; /* 6ns */
329 caslat = caslat;
330 } else if (ddrc_clk <= 280 && ddrc_clk > 230) {
331 /* DDR controller clk at 230~280 */
332 effective_data_rate = 266; /* 7.5ns */
333 if (spd.clk_cycle2 == 0x75)
334 caslat = caslat - 1;
335 else
336 caslat = caslat;
337 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
338 /* DDR controller clk at 90~230 */
339 effective_data_rate = 200; /* 10ns */
340 if (spd.clk_cycle3 == 0xa0)
341 caslat = caslat - 2;
342 else if (spd.clk_cycle2 == 0xa0)
343 caslat = caslat - 1;
344 else
345 caslat = caslat;
346 }
347 } else if (max_data_rate >= 256) { /* it is DDR 266 */
348 if (ddrc_clk <= 350 && ddrc_clk > 280) {
349 /* DDR controller clk at 280~350 */
350 printf("DDR: DDR controller freq is more than "
351 "max data rate of the module\n");
352 return 0;
353 } else if (ddrc_clk <= 280 && ddrc_clk > 230) {
354 /* DDR controller clk at 230~280 */
355 effective_data_rate = 266; /* 7.5ns */
356 caslat = caslat;
357 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
358 /* DDR controller clk at 90~230 */
359 effective_data_rate = 200; /* 10ns */
360 if (spd.clk_cycle2 == 0xa0)
361 caslat = caslat - 1;
362 }
363 } else if (max_data_rate >= 190) { /* it is DDR 200 */
364 if (ddrc_clk <= 350 && ddrc_clk > 230) {
365 /* DDR controller clk at 230~350 */
366 printf("DDR: DDR controller freq is more than "
367 "max data rate of the module\n");
368 return 0;
369 } else if (ddrc_clk <= 230 && ddrc_clk > 90) {
370 /* DDR controller clk at 90~230 */
371 effective_data_rate = 200; /* 10ns */
372 caslat = caslat;
373 }
374 }
375
376 debug("DDR:Effective data rate is: %dMhz\n", effective_data_rate);
377 debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat);
378
379 /*
380 * Errata DDR6 work around: input enable 2 cycles earlier.
381 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2.
382 */
383 if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){
384 if (caslat == 2)
385 ddr->debug_reg = 0x201c0000; /* CL=2 */
386 else if (caslat == 3)
387 ddr->debug_reg = 0x202c0000; /* CL=2.5 */
388 else if (caslat == 4)
389 ddr->debug_reg = 0x202c0000; /* CL=3.0 */
390
391 __asm__ __volatile__ ("sync");
392
393 debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg);
394 }
395
396 /*
397 * Convert caslat clocks to DDR controller value.
398 * Force caslat_ctrl to be DDR Controller field-sized.
399 */
400 if (spd.mem_type == SPD_MEMTYPE_DDR) {
401 caslat_ctrl = (caslat + 1) & 0x07;
402 } else {
403 caslat_ctrl = (2 * caslat - 1) & 0x0f;
404 }
405
406 debug("DDR: effective data rate is %d MHz\n", effective_data_rate);
407 debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n",
408 caslat, caslat_ctrl);
409
410 /*
411 * Timing Config 0.
412 * Avoid writing for DDR I.
413 */
414 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
415 unsigned char taxpd_clk = 8; /* By the book. */
416 unsigned char tmrd_clk = 2; /* By the book. */
417 unsigned char act_pd_exit = 2; /* Empirical? */
418 unsigned char pre_pd_exit = 6; /* Empirical? */
419
420 ddr->timing_cfg_0 = (0
421 | ((act_pd_exit & 0x7) << 20) /* ACT_PD_EXIT */
422 | ((pre_pd_exit & 0x7) << 16) /* PRE_PD_EXIT */
423 | ((taxpd_clk & 0xf) << 8) /* ODT_PD_EXIT */
424 | ((tmrd_clk & 0xf) << 0) /* MRS_CYC */
425 );
426 debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0);
427 }
428
429 /*
430 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD,
431 * use conservative value.
432 * For DDR II, they are bytes 36 and 37, in quarter nanos.
433 */
434
435 if (spd.mem_type == SPD_MEMTYPE_DDR) {
436 twr_clk = 3; /* Clocks */
437 twtr_clk = 1; /* Clocks */
438 } else {
439 twr_clk = picos_to_clk(spd.twr * 250);
440 twtr_clk = picos_to_clk(spd.twtr * 250);
441 }
442
443 /*
444 * Calculate Trfc, in picos.
445 * DDR I: Byte 42 straight up in ns.
446 * DDR II: Byte 40 and 42 swizzled some, in ns.
447 */
448 if (spd.mem_type == SPD_MEMTYPE_DDR) {
449 trfc = spd.trfc * 1000; /* up to ps */
450 } else {
451 unsigned int byte40_table_ps[8] = {
452 0,
453 250,
454 330,
455 500,
456 660,
457 750,
458 0,
459 0
460 };
461
462 trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000
463 + byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7];
464 }
465 trfc_clk = picos_to_clk(trfc);
466
467 /*
468 * Trcd, Byte 29, from quarter nanos to ps and clocks.
469 */
470 trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7;
471
472 /*
473 * Convert trfc_clk to DDR controller fields. DDR I should
474 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the
475 * 83xx controller has an extended REFREC field of three bits.
476 * The controller automatically adds 8 clocks to this value,
477 * so preadjust it down 8 first before splitting it up.
478 */
479 trfc_low = (trfc_clk - 8) & 0xf;
480 trfc_high = ((trfc_clk - 8) >> 4) & 0x3;
481
482 ddr->timing_cfg_1 =
483 (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) | /* PRETOACT */
484 ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */
485 (trcd_clk << 20 ) | /* ACTTORW */
486 (caslat_ctrl << 16 ) | /* CASLAT */
487 (trfc_low << 12 ) | /* REFEC */
488 ((twr_clk & 0x07) << 8) | /* WRRREC */
489 ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) | /* ACTTOACT */
490 ((twtr_clk & 0x07) << 0) /* WRTORD */
491 );
492
493 /*
494 * Additive Latency
495 * For DDR I, 0.
496 * For DDR II, with ODT enabled, use "a value" less than ACTTORW,
497 * which comes from Trcd, and also note that:
498 * add_lat + caslat must be >= 4
499 */
500 add_lat = 0;
501 if (spd.mem_type == SPD_MEMTYPE_DDR2
502 && (odt_wr_cfg || odt_rd_cfg)
503 && (caslat < 4)) {
504 add_lat = trcd_clk - 1;
505 if ((add_lat + caslat) < 4) {
506 add_lat = 0;
507 }
508 }
509
510 /*
511 * Write Data Delay
512 * Historically 0x2 == 4/8 clock delay.
513 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266.
514 */
515 wr_data_delay = 2;
516
517 /*
518 * Write Latency
519 * Read to Precharge
520 * Minimum CKE Pulse Width.
521 * Four Activate Window
522 */
523 if (spd.mem_type == SPD_MEMTYPE_DDR) {
524 /*
525 * This is a lie. It should really be 1, but if it is
526 * set to 1, bits overlap into the old controller's
527 * otherwise unused ACSM field. If we leave it 0, then
528 * the HW will magically treat it as 1 for DDR 1. Oh Yea.
529 */
530 wr_lat = 0;
531
532 trtp_clk = 2; /* By the book. */
533 cke_min_clk = 1; /* By the book. */
534 four_act = 1; /* By the book. */
535
536 } else {
537 wr_lat = caslat - 1;
538
539 /* Convert SPD value from quarter nanos to picos. */
540 trtp_clk = picos_to_clk(spd.trtp * 250);
541
542 cke_min_clk = 3; /* By the book. */
543 four_act = picos_to_clk(37500); /* By the book. 1k pages? */
544 }
545
546 /*
547 * Empirically set ~MCAS-to-preamble override for DDR 2.
548 * Your milage will vary.
549 */
550 cpo = 0;
551 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
552 if (effective_data_rate == 266 || effective_data_rate == 333) {
553 cpo = 0x7; /* READ_LAT + 5/4 */
554 } else if (effective_data_rate == 400) {
555 cpo = 0x9; /* READ_LAT + 7/4 */
556 } else {
557 /* Automatic calibration */
558 cpo = 0x1f;
559 }
560 }
561
562 ddr->timing_cfg_2 = (0
563 | ((add_lat & 0x7) << 28) /* ADD_LAT */
564 | ((cpo & 0x1f) << 23) /* CPO */
565 | ((wr_lat & 0x7) << 19) /* WR_LAT */
566 | ((trtp_clk & 0x7) << 13) /* RD_TO_PRE */
567 | ((wr_data_delay & 0x7) << 10) /* WR_DATA_DELAY */
568 | ((cke_min_clk & 0x7) << 6) /* CKE_PLS */
569 | ((four_act & 0x1f) << 0) /* FOUR_ACT */
570 );
571
572 debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1);
573 debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2);
574
575 /* Check DIMM data bus width */
576 if (spd.dataw_lsb == 0x20) {
577 burstlen = 0x03; /* 32 bit data bus, burst len is 8 */
578 printf("\n DDR DIMM: data bus width is 32 bit");
579 } else {
580 burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */
581 printf("\n DDR DIMM: data bus width is 64 bit");
582 }
583
584 /* Is this an ECC DDR chip? */
585 if (spd.config == 0x02)
586 printf(" with ECC\n");
587 else
588 printf(" without ECC\n");
589
590 /* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus,
591 Burst type is sequential
592 */
593 if (spd.mem_type == SPD_MEMTYPE_DDR) {
594 switch (caslat) {
595 case 1:
596 ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */
597 break;
598 case 2:
599 ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */
600 break;
601 case 3:
602 ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */
603 break;
604 case 4:
605 ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */
606 break;
607 default:
608 printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n");
609 return 0;
610 }
611 } else {
612 mode_odt_enable = 0x0; /* Default disabled */
613 if (odt_wr_cfg || odt_rd_cfg) {
614 /*
615 * Bits 6 and 2 in Extended MRS(1)
616 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules.
617 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module.
618 */
619 mode_odt_enable = 0x40; /* 150 Ohm */
620 }
621
622 ddr->sdram_mode =
623 (0
624 | (1 << (16 + 10)) /* DQS Differential disable */
625 | (add_lat << (16 + 3)) /* Additive Latency in EMRS1 */
626 | (mode_odt_enable << 16) /* ODT Enable in EMRS1 */
627 | ((twr_clk >> 1) << 9) /* Write Recovery Autopre */
628 | (caslat << 4) /* caslat */
629 | (burstlen << 0) /* Burst length */
630 );
631 }
632 debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode);
633
634 /*
635 * Clear EMRS2 and EMRS3.
636 */
637 ddr->sdram_mode2 = 0;
638 debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2);
639
640 switch (spd.refresh) {
641 case 0x00:
642 case 0x80:
643 refresh_clk = picos_to_clk(15625000);
644 break;
645 case 0x01:
646 case 0x81:
647 refresh_clk = picos_to_clk(3900000);
648 break;
649 case 0x02:
650 case 0x82:
651 refresh_clk = picos_to_clk(7800000);
652 break;
653 case 0x03:
654 case 0x83:
655 refresh_clk = picos_to_clk(31300000);
656 break;
657 case 0x04:
658 case 0x84:
659 refresh_clk = picos_to_clk(62500000);
660 break;
661 case 0x05:
662 case 0x85:
663 refresh_clk = picos_to_clk(125000000);
664 break;
665 default:
666 refresh_clk = 0x512;
667 break;
668 }
669
670 /*
671 * Set BSTOPRE to 0x100 for page mode
672 * If auto-charge is used, set BSTOPRE = 0
673 */
674 ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100;
675 debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval);
676
677 /*
678 * SDRAM Cfg 2
679 */
680 odt_cfg = 0;
681 if (odt_rd_cfg | odt_wr_cfg) {
682 odt_cfg = 0x2; /* ODT to IOs during reads */
683 }
684 if (spd.mem_type == SPD_MEMTYPE_DDR2) {
685 ddr->sdram_cfg2 = (0
686 | (0 << 26) /* True DQS */
687 | (odt_cfg << 21) /* ODT only read */
688 | (1 << 12) /* 1 refresh at a time */
689 );
690
691 debug("DDR: sdram_cfg2 = 0x%08x\n", ddr->sdram_cfg2);
692 }
693
694 #ifdef CFG_DDR_SDRAM_CLK_CNTL /* Optional platform specific value */
695 ddr->sdram_clk_cntl = CFG_DDR_SDRAM_CLK_CNTL;
696 #else
697 /* SS_EN = 0, source synchronous disable
698 * CLK_ADJST = 0, MCK/MCK# is launched aligned with addr/cmd
699 */
700 ddr->sdram_clk_cntl = 0x00000000;
701 #endif
702 debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl);
703
704 asm("sync;isync");
705
706 udelay(600);
707
708 /*
709 * Figure out the settings for the sdram_cfg register. Build up
710 * the value in 'sdram_cfg' before writing since the write into
711 * the register will actually enable the memory controller, and all
712 * settings must be done before enabling.
713 *
714 * sdram_cfg[0] = 1 (ddr sdram logic enable)
715 * sdram_cfg[1] = 1 (self-refresh-enable)
716 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
717 * 010 DDR 1 SDRAM
718 * 011 DDR 2 SDRAM
719 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode)
720 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts)
721 */
722 if (spd.mem_type == SPD_MEMTYPE_DDR)
723 sdram_type = 2;
724 else
725 sdram_type = 3;
726
727 sdram_cfg = (0
728 | (1 << 31) /* DDR enable */
729 | (1 << 30) /* Self refresh */
730 | (sdram_type << 24) /* SDRAM type */
731 );
732
733 /* sdram_cfg[3] = RD_EN - registered DIMM enable */
734 if (spd.mod_attr & 0x02)
735 sdram_cfg |= 0x10000000;
736
737 /* The DIMM is 32bit width */
738 if (spd.dataw_lsb == 0x20)
739 sdram_cfg |= 0x000C0000;
740
741 ddrc_ecc_enable = 0;
742
743 #if defined(CONFIG_DDR_ECC)
744 /* Enable ECC with sdram_cfg[2] */
745 if (spd.config == 0x02) {
746 sdram_cfg |= 0x20000000;
747 ddrc_ecc_enable = 1;
748 /* disable error detection */
749 ddr->err_disable = ~ECC_ERROR_ENABLE;
750 /* set single bit error threshold to maximum value,
751 * reset counter to zero */
752 ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) |
753 (0 << ECC_ERROR_MAN_SBEC_SHIFT);
754 }
755
756 debug("DDR:err_disable=0x%08x\n", ddr->err_disable);
757 debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe);
758 #endif
759 printf(" DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF");
760
761 #if defined(CONFIG_DDR_2T_TIMING)
762 /*
763 * Enable 2T timing by setting sdram_cfg[16].
764 */
765 sdram_cfg |= SDRAM_CFG_2T_EN;
766 #endif
767 /* Enable controller, and GO! */
768 ddr->sdram_cfg = sdram_cfg;
769 asm("sync;isync");
770 udelay(500);
771
772 debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg);
773 return memsize; /*in MBytes*/
774 }
775 #endif /* CONFIG_SPD_EEPROM */
776
777 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRC)
778 /*
779 * Use timebase counter, get_timer() is not availabe
780 * at this point of initialization yet.
781 */
782 static __inline__ unsigned long get_tbms (void)
783 {
784 unsigned long tbl;
785 unsigned long tbu1, tbu2;
786 unsigned long ms;
787 unsigned long long tmp;
788
789 ulong tbclk = get_tbclk();
790
791 /* get the timebase ticks */
792 do {
793 asm volatile ("mftbu %0":"=r" (tbu1):);
794 asm volatile ("mftb %0":"=r" (tbl):);
795 asm volatile ("mftbu %0":"=r" (tbu2):);
796 } while (tbu1 != tbu2);
797
798 /* convert ticks to ms */
799 tmp = (unsigned long long)(tbu1);
800 tmp = (tmp << 32);
801 tmp += (unsigned long long)(tbl);
802 ms = tmp/(tbclk/1000);
803
804 return ms;
805 }
806
807 /*
808 * Initialize all of memory for ECC, then enable errors.
809 */
810 /* #define CONFIG_DDR_ECC_INIT_VIA_DMA */
811 void ddr_enable_ecc(unsigned int dram_size)
812 {
813 volatile immap_t *immap = (immap_t *)CFG_IMMR;
814 volatile ddr83xx_t *ddr= &immap->ddr;
815 unsigned long t_start, t_end;
816 register u64 *p;
817 register uint size;
818 unsigned int pattern[2];
819 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
820 uint i;
821 #endif
822 icache_enable();
823 t_start = get_tbms();
824 pattern[0] = 0xdeadbeef;
825 pattern[1] = 0xdeadbeef;
826
827 #if !defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
828 debug("ddr init: CPU FP write method\n");
829 size = dram_size;
830 for (p = 0; p < (u64*)(size); p++) {
831 ppcDWstore((u32*)p, pattern);
832 }
833 __asm__ __volatile__ ("sync");
834 #else
835 debug("ddr init: DMA method\n");
836 size = 0x2000;
837 for (p = 0; p < (u64*)(size); p++) {
838 ppcDWstore((u32*)p, pattern);
839 }
840 __asm__ __volatile__ ("sync");
841
842 /* Initialise DMA for direct transfer */
843 dma_init();
844 /* Start DMA to transfer */
845 dma_xfer((uint *)0x2000, 0x2000, (uint *)0); /* 8K */
846 dma_xfer((uint *)0x4000, 0x4000, (uint *)0); /* 16K */
847 dma_xfer((uint *)0x8000, 0x8000, (uint *)0); /* 32K */
848 dma_xfer((uint *)0x10000, 0x10000, (uint *)0); /* 64K */
849 dma_xfer((uint *)0x20000, 0x20000, (uint *)0); /* 128K */
850 dma_xfer((uint *)0x40000, 0x40000, (uint *)0); /* 256K */
851 dma_xfer((uint *)0x80000, 0x80000, (uint *)0); /* 512K */
852 dma_xfer((uint *)0x100000, 0x100000, (uint *)0); /* 1M */
853 dma_xfer((uint *)0x200000, 0x200000, (uint *)0); /* 2M */
854 dma_xfer((uint *)0x400000, 0x400000, (uint *)0); /* 4M */
855
856 for (i = 1; i < dram_size / 0x800000; i++) {
857 dma_xfer((uint *)(0x800000*i), 0x800000, (uint *)0);
858 }
859 #endif
860
861 t_end = get_tbms();
862 icache_disable();
863
864 debug("\nREADY!!\n");
865 debug("ddr init duration: %ld ms\n", t_end - t_start);
866
867 /* Clear All ECC Errors */
868 if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME)
869 ddr->err_detect |= ECC_ERROR_DETECT_MME;
870 if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE)
871 ddr->err_detect |= ECC_ERROR_DETECT_MBE;
872 if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE)
873 ddr->err_detect |= ECC_ERROR_DETECT_SBE;
874 if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE)
875 ddr->err_detect |= ECC_ERROR_DETECT_MSE;
876
877 /* Disable ECC-Interrupts */
878 ddr->err_int_en &= ECC_ERR_INT_DISABLE;
879
880 /* Enable errors for ECC */
881 ddr->err_disable &= ECC_ERROR_ENABLE;
882
883 __asm__ __volatile__ ("sync");
884 __asm__ __volatile__ ("isync");
885 }
886 #endif /* CONFIG_DDR_ECC */