]>
Commit | Line | Data |
---|---|---|
83d290c5 | 1 | // SPDX-License-Identifier: Intel |
b829f12a BM |
2 | /* |
3 | * Copyright (C) 2013, Intel Corporation | |
4 | * Copyright (C) 2015, Bin Meng <bmeng.cn@gmail.com> | |
5 | * | |
6 | * Ported from Intel released Quark UEFI BIOS | |
7 | * QuarkSocPkg/QuarkNorthCluster/MemoryInit/Pei | |
b829f12a BM |
8 | */ |
9 | ||
10 | #include <common.h> | |
11 | #include <pci.h> | |
12 | #include <asm/arch/device.h> | |
13 | #include <asm/arch/mrc.h> | |
14 | #include <asm/arch/msg_port.h> | |
15 | #include "mrc_util.h" | |
16 | #include "hte.h" | |
17 | #include "smc.h" | |
18 | ||
b829f12a BM |
19 | /* t_ck clock period in picoseconds per speed index 800, 1066, 1333 */ |
20 | static const uint32_t t_ck[3] = { | |
21 | 2500, | |
22 | 1875, | |
23 | 1500 | |
24 | }; | |
25 | ||
26 | /* Global variables */ | |
27 | static const uint16_t ddr_wclk[] = {193, 158}; | |
d0c0752a | 28 | #ifdef BACKUP_WCTL |
b829f12a | 29 | static const uint16_t ddr_wctl[] = {1, 217}; |
d0c0752a TR |
30 | #endif |
31 | #ifdef BACKUP_WCMD | |
b829f12a | 32 | static const uint16_t ddr_wcmd[] = {1, 220}; |
d0c0752a | 33 | #endif |
b829f12a BM |
34 | |
35 | #ifdef BACKUP_RCVN | |
36 | static const uint16_t ddr_rcvn[] = {129, 498}; | |
37 | #endif | |
38 | ||
39 | #ifdef BACKUP_WDQS | |
40 | static const uint16_t ddr_wdqs[] = {65, 289}; | |
41 | #endif | |
42 | ||
43 | #ifdef BACKUP_RDQS | |
44 | static const uint8_t ddr_rdqs[] = {32, 24}; | |
45 | #endif | |
46 | ||
47 | #ifdef BACKUP_WDQ | |
48 | static const uint16_t ddr_wdq[] = {32, 257}; | |
49 | #endif | |
50 | ||
51 | /* Stop self refresh driven by MCU */ | |
52 | void clear_self_refresh(struct mrc_params *mrc_params) | |
53 | { | |
54 | ENTERFN(); | |
55 | ||
56 | /* clear the PMSTS Channel Self Refresh bits */ | |
312cc39e | 57 | mrc_write_mask(MEM_CTLR, PMSTS, PMSTS_DISR, PMSTS_DISR); |
b829f12a BM |
58 | |
59 | LEAVEFN(); | |
60 | } | |
61 | ||
62 | /* It will initialize timing registers in the MCU (DTR0..DTR4) */ | |
63 | void prog_ddr_timing_control(struct mrc_params *mrc_params) | |
64 | { | |
65 | uint8_t tcl, wl; | |
66 | uint8_t trp, trcd, tras, twr, twtr, trrd, trtp, tfaw; | |
67 | uint32_t tck; | |
68 | u32 dtr0, dtr1, dtr2, dtr3, dtr4; | |
69 | u32 tmp1, tmp2; | |
70 | ||
71 | ENTERFN(); | |
72 | ||
73 | /* mcu_init starts */ | |
74 | mrc_post_code(0x02, 0x00); | |
75 | ||
76 | dtr0 = msg_port_read(MEM_CTLR, DTR0); | |
77 | dtr1 = msg_port_read(MEM_CTLR, DTR1); | |
78 | dtr2 = msg_port_read(MEM_CTLR, DTR2); | |
79 | dtr3 = msg_port_read(MEM_CTLR, DTR3); | |
80 | dtr4 = msg_port_read(MEM_CTLR, DTR4); | |
81 | ||
82 | tck = t_ck[mrc_params->ddr_speed]; /* Clock in picoseconds */ | |
83 | tcl = mrc_params->params.cl; /* CAS latency in clocks */ | |
84 | trp = tcl; /* Per CAT MRC */ | |
85 | trcd = tcl; /* Per CAT MRC */ | |
86 | tras = MCEIL(mrc_params->params.ras, tck); | |
87 | ||
88 | /* Per JEDEC: tWR=15000ps DDR2/3 from 800-1600 */ | |
89 | twr = MCEIL(15000, tck); | |
90 | ||
91 | twtr = MCEIL(mrc_params->params.wtr, tck); | |
92 | trrd = MCEIL(mrc_params->params.rrd, tck); | |
93 | trtp = 4; /* Valid for 800 and 1066, use 5 for 1333 */ | |
94 | tfaw = MCEIL(mrc_params->params.faw, tck); | |
95 | ||
96 | wl = 5 + mrc_params->ddr_speed; | |
97 | ||
312cc39e | 98 | dtr0 &= ~DTR0_DFREQ_MASK; |
b829f12a | 99 | dtr0 |= mrc_params->ddr_speed; |
312cc39e | 100 | dtr0 &= ~DTR0_TCL_MASK; |
b829f12a BM |
101 | tmp1 = tcl - 5; |
102 | dtr0 |= ((tcl - 5) << 12); | |
312cc39e | 103 | dtr0 &= ~DTR0_TRP_MASK; |
b829f12a | 104 | dtr0 |= ((trp - 5) << 4); /* 5 bit DRAM Clock */ |
312cc39e | 105 | dtr0 &= ~DTR0_TRCD_MASK; |
b829f12a BM |
106 | dtr0 |= ((trcd - 5) << 8); /* 5 bit DRAM Clock */ |
107 | ||
312cc39e | 108 | dtr1 &= ~DTR1_TWCL_MASK; |
b829f12a BM |
109 | tmp2 = wl - 3; |
110 | dtr1 |= (wl - 3); | |
312cc39e | 111 | dtr1 &= ~DTR1_TWTP_MASK; |
b829f12a | 112 | dtr1 |= ((wl + 4 + twr - 14) << 8); /* Change to tWTP */ |
312cc39e | 113 | dtr1 &= ~DTR1_TRTP_MASK; |
b829f12a | 114 | dtr1 |= ((MMAX(trtp, 4) - 3) << 28); /* 4 bit DRAM Clock */ |
312cc39e | 115 | dtr1 &= ~DTR1_TRRD_MASK; |
b829f12a | 116 | dtr1 |= ((trrd - 4) << 24); /* 4 bit DRAM Clock */ |
312cc39e | 117 | dtr1 &= ~DTR1_TCMD_MASK; |
b829f12a | 118 | dtr1 |= (1 << 4); |
312cc39e | 119 | dtr1 &= ~DTR1_TRAS_MASK; |
b829f12a | 120 | dtr1 |= ((tras - 14) << 20); /* 6 bit DRAM Clock */ |
312cc39e | 121 | dtr1 &= ~DTR1_TFAW_MASK; |
b829f12a BM |
122 | dtr1 |= ((((tfaw + 1) >> 1) - 5) << 16);/* 4 bit DRAM Clock */ |
123 | /* Set 4 Clock CAS to CAS delay (multi-burst) */ | |
312cc39e | 124 | dtr1 &= ~DTR1_TCCD_MASK; |
b829f12a | 125 | |
312cc39e | 126 | dtr2 &= ~DTR2_TRRDR_MASK; |
b829f12a | 127 | dtr2 |= 1; |
312cc39e | 128 | dtr2 &= ~DTR2_TWWDR_MASK; |
b829f12a | 129 | dtr2 |= (2 << 8); |
312cc39e | 130 | dtr2 &= ~DTR2_TRWDR_MASK; |
b829f12a BM |
131 | dtr2 |= (2 << 16); |
132 | ||
312cc39e | 133 | dtr3 &= ~DTR3_TWRDR_MASK; |
b829f12a | 134 | dtr3 |= 2; |
312cc39e | 135 | dtr3 &= ~DTR3_TXXXX_MASK; |
b829f12a BM |
136 | dtr3 |= (2 << 4); |
137 | ||
312cc39e | 138 | dtr3 &= ~DTR3_TRWSR_MASK; |
b829f12a BM |
139 | if (mrc_params->ddr_speed == DDRFREQ_800) { |
140 | /* Extended RW delay (+1) */ | |
141 | dtr3 |= ((tcl - 5 + 1) << 8); | |
142 | } else if (mrc_params->ddr_speed == DDRFREQ_1066) { | |
143 | /* Extended RW delay (+1) */ | |
144 | dtr3 |= ((tcl - 5 + 1) << 8); | |
145 | } | |
146 | ||
312cc39e | 147 | dtr3 &= ~DTR3_TWRSR_MASK; |
b829f12a BM |
148 | dtr3 |= ((4 + wl + twtr - 11) << 13); |
149 | ||
312cc39e | 150 | dtr3 &= ~DTR3_TXP_MASK; |
b829f12a BM |
151 | if (mrc_params->ddr_speed == DDRFREQ_800) |
152 | dtr3 |= ((MMAX(0, 1 - 1)) << 22); | |
153 | else | |
154 | dtr3 |= ((MMAX(0, 2 - 1)) << 22); | |
155 | ||
312cc39e | 156 | dtr4 &= ~DTR4_WRODTSTRT_MASK; |
b829f12a | 157 | dtr4 |= 1; |
312cc39e | 158 | dtr4 &= ~DTR4_WRODTSTOP_MASK; |
b829f12a | 159 | dtr4 |= (1 << 4); |
312cc39e | 160 | dtr4 &= ~DTR4_XXXX1_MASK; |
b829f12a | 161 | dtr4 |= ((1 + tmp1 - tmp2 + 2) << 8); |
312cc39e | 162 | dtr4 &= ~DTR4_XXXX2_MASK; |
b829f12a | 163 | dtr4 |= ((1 + tmp1 - tmp2 + 2) << 12); |
312cc39e | 164 | dtr4 &= ~(DTR4_ODTDIS | DTR4_TRGSTRDIS); |
b829f12a BM |
165 | |
166 | msg_port_write(MEM_CTLR, DTR0, dtr0); | |
167 | msg_port_write(MEM_CTLR, DTR1, dtr1); | |
168 | msg_port_write(MEM_CTLR, DTR2, dtr2); | |
169 | msg_port_write(MEM_CTLR, DTR3, dtr3); | |
170 | msg_port_write(MEM_CTLR, DTR4, dtr4); | |
171 | ||
172 | LEAVEFN(); | |
173 | } | |
174 | ||
175 | /* Configure MCU before jedec init sequence */ | |
176 | void prog_decode_before_jedec(struct mrc_params *mrc_params) | |
177 | { | |
178 | u32 drp; | |
179 | u32 drfc; | |
180 | u32 dcal; | |
181 | u32 dsch; | |
182 | u32 dpmc0; | |
183 | ||
184 | ENTERFN(); | |
185 | ||
186 | /* Disable power saving features */ | |
187 | dpmc0 = msg_port_read(MEM_CTLR, DPMC0); | |
312cc39e BM |
188 | dpmc0 |= (DPMC0_CLKGTDIS | DPMC0_DISPWRDN); |
189 | dpmc0 &= ~DPMC0_PCLSTO_MASK; | |
190 | dpmc0 &= ~DPMC0_DYNSREN; | |
b829f12a BM |
191 | msg_port_write(MEM_CTLR, DPMC0, dpmc0); |
192 | ||
193 | /* Disable out of order transactions */ | |
194 | dsch = msg_port_read(MEM_CTLR, DSCH); | |
312cc39e | 195 | dsch |= (DSCH_OOODIS | DSCH_NEWBYPDIS); |
b829f12a BM |
196 | msg_port_write(MEM_CTLR, DSCH, dsch); |
197 | ||
198 | /* Disable issuing the REF command */ | |
199 | drfc = msg_port_read(MEM_CTLR, DRFC); | |
312cc39e | 200 | drfc &= ~DRFC_TREFI_MASK; |
b829f12a BM |
201 | msg_port_write(MEM_CTLR, DRFC, drfc); |
202 | ||
203 | /* Disable ZQ calibration short */ | |
204 | dcal = msg_port_read(MEM_CTLR, DCAL); | |
312cc39e BM |
205 | dcal &= ~DCAL_ZQCINT_MASK; |
206 | dcal &= ~DCAL_SRXZQCL_MASK; | |
b829f12a BM |
207 | msg_port_write(MEM_CTLR, DCAL, dcal); |
208 | ||
209 | /* | |
210 | * Training performed in address mode 0, rank population has limited | |
211 | * impact, however simulator complains if enabled non-existing rank. | |
212 | */ | |
213 | drp = 0; | |
214 | if (mrc_params->rank_enables & 1) | |
312cc39e | 215 | drp |= DRP_RKEN0; |
b829f12a | 216 | if (mrc_params->rank_enables & 2) |
312cc39e | 217 | drp |= DRP_RKEN1; |
b829f12a BM |
218 | msg_port_write(MEM_CTLR, DRP, drp); |
219 | ||
220 | LEAVEFN(); | |
221 | } | |
222 | ||
223 | /* | |
224 | * After Cold Reset, BIOS should set COLDWAKE bit to 1 before | |
225 | * sending the WAKE message to the Dunit. | |
226 | * | |
227 | * For Standby Exit, or any other mode in which the DRAM is in | |
228 | * SR, this bit must be set to 0. | |
229 | */ | |
230 | void perform_ddr_reset(struct mrc_params *mrc_params) | |
231 | { | |
232 | ENTERFN(); | |
233 | ||
234 | /* Set COLDWAKE bit before sending the WAKE message */ | |
312cc39e | 235 | mrc_write_mask(MEM_CTLR, DRMC, DRMC_COLDWAKE, DRMC_COLDWAKE); |
b829f12a BM |
236 | |
237 | /* Send wake command to DUNIT (MUST be done before JEDEC) */ | |
238 | dram_wake_command(); | |
239 | ||
240 | /* Set default value */ | |
241 | msg_port_write(MEM_CTLR, DRMC, | |
312cc39e | 242 | mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0); |
b829f12a BM |
243 | |
244 | LEAVEFN(); | |
245 | } | |
246 | ||
247 | ||
248 | /* | |
249 | * This function performs some initialization on the DDRIO unit. | |
250 | * This function is dependent on BOARD_ID, DDR_SPEED, and CHANNEL_ENABLES. | |
251 | */ | |
252 | void ddrphy_init(struct mrc_params *mrc_params) | |
253 | { | |
254 | uint32_t temp; | |
255 | uint8_t ch; /* channel counter */ | |
256 | uint8_t rk; /* rank counter */ | |
257 | uint8_t bl_grp; /* byte lane group counter (2 BLs per module) */ | |
258 | uint8_t bl_divisor = 1; /* byte lane divisor */ | |
259 | /* For DDR3 --> 0 == 800, 1 == 1066, 2 == 1333 */ | |
312cc39e | 260 | uint8_t speed = mrc_params->ddr_speed & 3; |
b829f12a BM |
261 | uint8_t cas; |
262 | uint8_t cwl; | |
263 | ||
264 | ENTERFN(); | |
265 | ||
266 | cas = mrc_params->params.cl; | |
267 | cwl = 5 + mrc_params->ddr_speed; | |
268 | ||
269 | /* ddrphy_init starts */ | |
270 | mrc_post_code(0x03, 0x00); | |
271 | ||
272 | /* | |
273 | * HSD#231531 | |
274 | * Make sure IOBUFACT is deasserted before initializing the DDR PHY | |
275 | * | |
276 | * HSD#234845 | |
277 | * Make sure WRPTRENABLE is deasserted before initializing the DDR PHY | |
278 | */ | |
279 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
280 | if (mrc_params->channel_enables & (1 << ch)) { | |
281 | /* Deassert DDRPHY Initialization Complete */ | |
282 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
283 | CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET, |
284 | ~(1 << 20), 1 << 20); /* SPID_INIT_COMPLETE=0 */ | |
b829f12a BM |
285 | /* Deassert IOBUFACT */ |
286 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
287 | CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET, |
288 | ~(1 << 2), 1 << 2); /* IOBUFACTRST_N=0 */ | |
b829f12a BM |
289 | /* Disable WRPTR */ |
290 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
291 | CMDPTRREG + ch * DDRIOCCC_CH_OFFSET, |
292 | ~(1 << 0), 1 << 0); /* WRPTRENABLE=0 */ | |
b829f12a BM |
293 | } |
294 | } | |
295 | ||
296 | /* Put PHY in reset */ | |
312cc39e | 297 | mrc_alt_write_mask(DDRPHY, MASTERRSTN, 0, 1); |
b829f12a BM |
298 | |
299 | /* Initialize DQ01, DQ23, CMD, CLK-CTL, COMP modules */ | |
300 | ||
301 | /* STEP0 */ | |
302 | mrc_post_code(0x03, 0x10); | |
303 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
304 | if (mrc_params->channel_enables & (1 << ch)) { | |
305 | /* DQ01-DQ23 */ | |
306 | for (bl_grp = 0; | |
312cc39e | 307 | bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2; |
b829f12a BM |
308 | bl_grp++) { |
309 | /* Analog MUX select - IO2xCLKSEL */ | |
310 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
311 | DQOBSCKEBBCTL + |
312 | bl_grp * DDRIODQ_BL_OFFSET + | |
313 | ch * DDRIODQ_CH_OFFSET, | |
314 | bl_grp ? 0 : (1 << 22), 1 << 22); | |
b829f12a BM |
315 | |
316 | /* ODT Strength */ | |
317 | switch (mrc_params->rd_odt_value) { | |
318 | case 1: | |
319 | temp = 0x3; | |
320 | break; /* 60 ohm */ | |
321 | case 2: | |
322 | temp = 0x3; | |
323 | break; /* 120 ohm */ | |
324 | case 3: | |
325 | temp = 0x3; | |
326 | break; /* 180 ohm */ | |
327 | default: | |
328 | temp = 0x3; | |
329 | break; /* 120 ohm */ | |
330 | } | |
331 | ||
332 | /* ODT strength */ | |
333 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
334 | B0RXIOBUFCTL + |
335 | bl_grp * DDRIODQ_BL_OFFSET + | |
336 | ch * DDRIODQ_CH_OFFSET, | |
337 | temp << 5, 0x60); | |
b829f12a BM |
338 | /* ODT strength */ |
339 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
340 | B1RXIOBUFCTL + |
341 | bl_grp * DDRIODQ_BL_OFFSET + | |
342 | ch * DDRIODQ_CH_OFFSET, | |
343 | temp << 5, 0x60); | |
b829f12a BM |
344 | |
345 | /* Dynamic ODT/DIFFAMP */ | |
312cc39e BM |
346 | temp = (cas << 24) | (cas << 16) | |
347 | (cas << 8) | (cas << 0); | |
b829f12a BM |
348 | switch (speed) { |
349 | case 0: | |
350 | temp -= 0x01010101; | |
351 | break; /* 800 */ | |
352 | case 1: | |
353 | temp -= 0x02020202; | |
354 | break; /* 1066 */ | |
355 | case 2: | |
356 | temp -= 0x03030303; | |
357 | break; /* 1333 */ | |
358 | case 3: | |
359 | temp -= 0x04040404; | |
360 | break; /* 1600 */ | |
361 | } | |
362 | ||
363 | /* Launch Time: ODT, DIFFAMP, ODT, DIFFAMP */ | |
364 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
365 | B01LATCTL1 + |
366 | bl_grp * DDRIODQ_BL_OFFSET + | |
367 | ch * DDRIODQ_CH_OFFSET, | |
368 | temp, 0x1f1f1f1f); | |
b829f12a BM |
369 | switch (speed) { |
370 | /* HSD#234715 */ | |
371 | case 0: | |
312cc39e | 372 | temp = (0x06 << 16) | (0x07 << 8); |
b829f12a BM |
373 | break; /* 800 */ |
374 | case 1: | |
312cc39e | 375 | temp = (0x07 << 16) | (0x08 << 8); |
b829f12a BM |
376 | break; /* 1066 */ |
377 | case 2: | |
312cc39e | 378 | temp = (0x09 << 16) | (0x0a << 8); |
b829f12a BM |
379 | break; /* 1333 */ |
380 | case 3: | |
312cc39e | 381 | temp = (0x0a << 16) | (0x0b << 8); |
b829f12a BM |
382 | break; /* 1600 */ |
383 | } | |
384 | ||
385 | /* On Duration: ODT, DIFFAMP */ | |
386 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
387 | B0ONDURCTL + |
388 | bl_grp * DDRIODQ_BL_OFFSET + | |
389 | ch * DDRIODQ_CH_OFFSET, | |
390 | temp, 0x003f3f00); | |
b829f12a BM |
391 | /* On Duration: ODT, DIFFAMP */ |
392 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
393 | B1ONDURCTL + |
394 | bl_grp * DDRIODQ_BL_OFFSET + | |
395 | ch * DDRIODQ_CH_OFFSET, | |
396 | temp, 0x003f3f00); | |
b829f12a BM |
397 | |
398 | switch (mrc_params->rd_odt_value) { | |
399 | case 0: | |
400 | /* override DIFFAMP=on, ODT=off */ | |
312cc39e | 401 | temp = (0x3f << 16) | (0x3f << 10); |
b829f12a BM |
402 | break; |
403 | default: | |
404 | /* override DIFFAMP=on, ODT=on */ | |
312cc39e | 405 | temp = (0x3f << 16) | (0x2a << 10); |
b829f12a BM |
406 | break; |
407 | } | |
408 | ||
409 | /* Override: DIFFAMP, ODT */ | |
410 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
411 | B0OVRCTL + |
412 | bl_grp * DDRIODQ_BL_OFFSET + | |
413 | ch * DDRIODQ_CH_OFFSET, | |
414 | temp, 0x003ffc00); | |
b829f12a BM |
415 | /* Override: DIFFAMP, ODT */ |
416 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
417 | B1OVRCTL + |
418 | bl_grp * DDRIODQ_BL_OFFSET + | |
419 | ch * DDRIODQ_CH_OFFSET, | |
420 | temp, 0x003ffc00); | |
b829f12a BM |
421 | |
422 | /* DLL Setup */ | |
423 | ||
424 | /* 1xCLK Domain Timings: tEDP,RCVEN,WDQS (PO) */ | |
425 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
426 | B0LATCTL0 + |
427 | bl_grp * DDRIODQ_BL_OFFSET + | |
428 | ch * DDRIODQ_CH_OFFSET, | |
429 | ((cas + 7) << 16) | ((cas - 4) << 8) | | |
430 | ((cwl - 2) << 0), 0x003f1f1f); | |
b829f12a | 431 | mrc_alt_write_mask(DDRPHY, |
312cc39e BM |
432 | B1LATCTL0 + |
433 | bl_grp * DDRIODQ_BL_OFFSET + | |
434 | ch * DDRIODQ_CH_OFFSET, | |
435 | ((cas + 7) << 16) | ((cas - 4) << 8) | | |
436 | ((cwl - 2) << 0), 0x003f1f1f); | |
b829f12a BM |
437 | |
438 | /* RCVEN Bypass (PO) */ | |
439 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
440 | B0RXIOBUFCTL + |
441 | bl_grp * DDRIODQ_BL_OFFSET + | |
442 | ch * DDRIODQ_CH_OFFSET, | |
443 | 0, 0x81); | |
b829f12a | 444 | mrc_alt_write_mask(DDRPHY, |
312cc39e BM |
445 | B1RXIOBUFCTL + |
446 | bl_grp * DDRIODQ_BL_OFFSET + | |
447 | ch * DDRIODQ_CH_OFFSET, | |
448 | 0, 0x81); | |
b829f12a BM |
449 | |
450 | /* TX */ | |
451 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
452 | DQCTL + |
453 | bl_grp * DDRIODQ_BL_OFFSET + | |
454 | ch * DDRIODQ_CH_OFFSET, | |
455 | 1 << 16, 1 << 16); | |
b829f12a | 456 | mrc_alt_write_mask(DDRPHY, |
312cc39e BM |
457 | B01PTRCTL1 + |
458 | bl_grp * DDRIODQ_BL_OFFSET + | |
459 | ch * DDRIODQ_CH_OFFSET, | |
460 | 1 << 8, 1 << 8); | |
b829f12a BM |
461 | |
462 | /* RX (PO) */ | |
463 | /* Internal Vref Code, Enable#, Ext_or_Int (1=Ext) */ | |
464 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
465 | B0VREFCTL + |
466 | bl_grp * DDRIODQ_BL_OFFSET + | |
467 | ch * DDRIODQ_CH_OFFSET, | |
468 | (0x03 << 2) | (0x0 << 1) | (0x0 << 0), | |
469 | 0xff); | |
b829f12a BM |
470 | /* Internal Vref Code, Enable#, Ext_or_Int (1=Ext) */ |
471 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
472 | B1VREFCTL + |
473 | bl_grp * DDRIODQ_BL_OFFSET + | |
474 | ch * DDRIODQ_CH_OFFSET, | |
475 | (0x03 << 2) | (0x0 << 1) | (0x0 << 0), | |
476 | 0xff); | |
b829f12a BM |
477 | /* Per-Bit De-Skew Enable */ |
478 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
479 | B0RXIOBUFCTL + |
480 | bl_grp * DDRIODQ_BL_OFFSET + | |
481 | ch * DDRIODQ_CH_OFFSET, | |
482 | 0, 0x10); | |
b829f12a BM |
483 | /* Per-Bit De-Skew Enable */ |
484 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
485 | B1RXIOBUFCTL + |
486 | bl_grp * DDRIODQ_BL_OFFSET + | |
487 | ch * DDRIODQ_CH_OFFSET, | |
488 | 0, 0x10); | |
b829f12a BM |
489 | } |
490 | ||
491 | /* CLKEBB */ | |
492 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
493 | CMDOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET, |
494 | 0, 1 << 23); | |
b829f12a BM |
495 | |
496 | /* Enable tristate control of cmd/address bus */ | |
497 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
498 | CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET, |
499 | 0, 0x03); | |
b829f12a BM |
500 | |
501 | /* ODT RCOMP */ | |
502 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
503 | CMDRCOMPODT + ch * DDRIOCCC_CH_OFFSET, |
504 | (0x03 << 5) | (0x03 << 0), 0x3ff); | |
b829f12a BM |
505 | |
506 | /* CMDPM* registers must be programmed in this order */ | |
507 | ||
508 | /* Turn On Delays: SFR (regulator), MPLL */ | |
509 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
510 | CMDPMDLYREG4 + ch * DDRIOCCC_CH_OFFSET, |
511 | 0xffffffff, 0xffffffff); | |
b829f12a BM |
512 | /* |
513 | * Delays: ASSERT_IOBUFACT_to_ALLON0_for_PM_MSG_3, | |
514 | * VREG (MDLL) Turn On, ALLON0_to_DEASSERT_IOBUFACT | |
515 | * for_PM_MSG_gt0, MDLL Turn On | |
516 | */ | |
517 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
518 | CMDPMDLYREG3 + ch * DDRIOCCC_CH_OFFSET, |
519 | 0xfffff616, 0xffffffff); | |
b829f12a BM |
520 | /* MPLL Divider Reset Delays */ |
521 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
522 | CMDPMDLYREG2 + ch * DDRIOCCC_CH_OFFSET, |
523 | 0xffffffff, 0xffffffff); | |
b829f12a BM |
524 | /* Turn Off Delays: VREG, Staggered MDLL, MDLL, PI */ |
525 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
526 | CMDPMDLYREG1 + ch * DDRIOCCC_CH_OFFSET, |
527 | 0xffffffff, 0xffffffff); | |
b829f12a BM |
528 | /* Turn On Delays: MPLL, Staggered MDLL, PI, IOBUFACT */ |
529 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
530 | CMDPMDLYREG0 + ch * DDRIOCCC_CH_OFFSET, |
531 | 0xffffffff, 0xffffffff); | |
b829f12a BM |
532 | /* Allow PUnit signals */ |
533 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
534 | CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET, |
535 | (0x6 << 8) | (0x1 << 6) | (0x4 << 0), | |
536 | 0xffe00f4f); | |
b829f12a BM |
537 | /* DLL_VREG Bias Trim, VREF Tuning for DLL_VREG */ |
538 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
539 | CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET, |
540 | (0x3 << 4) | (0x7 << 0), 0x7f); | |
b829f12a BM |
541 | |
542 | /* CLK-CTL */ | |
543 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
544 | CCOBSCKEBBCTL + ch * DDRIOCCC_CH_OFFSET, |
545 | 0, 1 << 24); /* CLKEBB */ | |
b829f12a BM |
546 | /* Buffer Enable: CS,CKE,ODT,CLK */ |
547 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
548 | CCCFGREG0 + ch * DDRIOCCC_CH_OFFSET, |
549 | 0x1f, 0x000ffff1); | |
b829f12a BM |
550 | /* ODT RCOMP */ |
551 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
552 | CCRCOMPODT + ch * DDRIOCCC_CH_OFFSET, |
553 | (0x03 << 8) | (0x03 << 0), 0x00001f1f); | |
b829f12a BM |
554 | /* DLL_VREG Bias Trim, VREF Tuning for DLL_VREG */ |
555 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
556 | CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET, |
557 | (0x3 << 4) | (0x7 << 0), 0x7f); | |
b829f12a BM |
558 | |
559 | /* | |
560 | * COMP (RON channel specific) | |
561 | * - DQ/DQS/DM RON: 32 Ohm | |
562 | * - CTRL/CMD RON: 27 Ohm | |
563 | * - CLK RON: 26 Ohm | |
564 | */ | |
565 | /* RCOMP Vref PU/PD */ | |
566 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
567 | DQVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
568 | (0x08 << 24) | (0x03 << 16), 0x3f3f0000); | |
b829f12a BM |
569 | /* RCOMP Vref PU/PD */ |
570 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
571 | CMDVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
572 | (0x0C << 24) | (0x03 << 16), 0x3f3f0000); | |
b829f12a BM |
573 | /* RCOMP Vref PU/PD */ |
574 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
575 | CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
576 | (0x0F << 24) | (0x03 << 16), 0x3f3f0000); | |
b829f12a BM |
577 | /* RCOMP Vref PU/PD */ |
578 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
579 | DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
580 | (0x08 << 24) | (0x03 << 16), 0x3f3f0000); | |
b829f12a BM |
581 | /* RCOMP Vref PU/PD */ |
582 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
583 | CTLVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
584 | (0x0C << 24) | (0x03 << 16), 0x3f3f0000); | |
b829f12a BM |
585 | |
586 | /* DQS Swapped Input Enable */ | |
587 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
588 | COMPEN1CH0 + ch * DDRCOMP_CH_OFFSET, |
589 | (1 << 19) | (1 << 17), 0xc00ac000); | |
b829f12a BM |
590 | |
591 | /* ODT VREF = 1.5 x 274/360+274 = 0.65V (code of ~50) */ | |
592 | /* ODT Vref PU/PD */ | |
593 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
594 | DQVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
595 | (0x32 << 8) | (0x03 << 0), 0x00003f3f); | |
b829f12a BM |
596 | /* ODT Vref PU/PD */ |
597 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
598 | DQSVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
599 | (0x32 << 8) | (0x03 << 0), 0x00003f3f); | |
b829f12a BM |
600 | /* ODT Vref PU/PD */ |
601 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
602 | CLKVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
603 | (0x0E << 8) | (0x05 << 0), 0x00003f3f); | |
b829f12a BM |
604 | |
605 | /* | |
606 | * Slew rate settings are frequency specific, | |
607 | * numbers below are for 800Mhz (speed == 0) | |
608 | * - DQ/DQS/DM/CLK SR: 4V/ns, | |
609 | * - CTRL/CMD SR: 1.5V/ns | |
610 | */ | |
312cc39e BM |
611 | temp = (0x0e << 16) | (0x0e << 12) | (0x08 << 8) | |
612 | (0x0b << 4) | (0x0b << 0); | |
b829f12a BM |
613 | /* DCOMP Delay Select: CTL,CMD,CLK,DQS,DQ */ |
614 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
615 | DLYSELCH0 + ch * DDRCOMP_CH_OFFSET, |
616 | temp, 0x000fffff); | |
b829f12a BM |
617 | /* TCO Vref CLK,DQS,DQ */ |
618 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
619 | TCOVREFCH0 + ch * DDRCOMP_CH_OFFSET, |
620 | (0x05 << 16) | (0x05 << 8) | (0x05 << 0), | |
621 | 0x003f3f3f); | |
b829f12a BM |
622 | /* ODTCOMP CMD/CTL PU/PD */ |
623 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
624 | CCBUFODTCH0 + ch * DDRCOMP_CH_OFFSET, |
625 | (0x03 << 8) | (0x03 << 0), | |
626 | 0x00001f1f); | |
b829f12a BM |
627 | /* COMP */ |
628 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
629 | COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET, |
630 | 0, 0xc0000100); | |
b829f12a BM |
631 | |
632 | #ifdef BACKUP_COMPS | |
633 | /* DQ COMP Overrides */ | |
634 | /* RCOMP PU */ | |
635 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
636 | DQDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
637 | (1 << 31) | (0x0a << 16), | |
638 | 0x801f0000); | |
b829f12a BM |
639 | /* RCOMP PD */ |
640 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
641 | DQDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
642 | (1 << 31) | (0x0a << 16), | |
643 | 0x801f0000); | |
b829f12a BM |
644 | /* DCOMP PU */ |
645 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
646 | DQDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
647 | (1 << 31) | (0x10 << 16), | |
648 | 0x801f0000); | |
b829f12a BM |
649 | /* DCOMP PD */ |
650 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
651 | DQDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
652 | (1 << 31) | (0x10 << 16), | |
653 | 0x801f0000); | |
b829f12a BM |
654 | /* ODTCOMP PU */ |
655 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
656 | DQODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
657 | (1 << 31) | (0x0b << 16), | |
658 | 0x801f0000); | |
b829f12a BM |
659 | /* ODTCOMP PD */ |
660 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
661 | DQODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
662 | (1 << 31) | (0x0b << 16), | |
663 | 0x801f0000); | |
b829f12a BM |
664 | /* TCOCOMP PU */ |
665 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
666 | DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
667 | 1 << 31, 1 << 31); | |
b829f12a BM |
668 | /* TCOCOMP PD */ |
669 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
670 | DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
671 | 1 << 31, 1 << 31); | |
b829f12a BM |
672 | |
673 | /* DQS COMP Overrides */ | |
674 | /* RCOMP PU */ | |
675 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
676 | DQSDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
677 | (1 << 31) | (0x0a << 16), | |
678 | 0x801f0000); | |
b829f12a BM |
679 | /* RCOMP PD */ |
680 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
681 | DQSDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
682 | (1 << 31) | (0x0a << 16), | |
683 | 0x801f0000); | |
b829f12a BM |
684 | /* DCOMP PU */ |
685 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
686 | DQSDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
687 | (1 << 31) | (0x10 << 16), | |
688 | 0x801f0000); | |
b829f12a BM |
689 | /* DCOMP PD */ |
690 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
691 | DQSDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
692 | (1 << 31) | (0x10 << 16), | |
693 | 0x801f0000); | |
b829f12a BM |
694 | /* ODTCOMP PU */ |
695 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
696 | DQSODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
697 | (1 << 31) | (0x0b << 16), | |
698 | 0x801f0000); | |
b829f12a BM |
699 | /* ODTCOMP PD */ |
700 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
701 | DQSODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
702 | (1 << 31) | (0x0b << 16), | |
703 | 0x801f0000); | |
b829f12a BM |
704 | /* TCOCOMP PU */ |
705 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
706 | DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
707 | 1 << 31, 1 << 31); | |
b829f12a BM |
708 | /* TCOCOMP PD */ |
709 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
710 | DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
711 | 1 << 31, 1 << 31); | |
b829f12a BM |
712 | |
713 | /* CLK COMP Overrides */ | |
714 | /* RCOMP PU */ | |
715 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
716 | CLKDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
717 | (1 << 31) | (0x0c << 16), | |
718 | 0x801f0000); | |
b829f12a BM |
719 | /* RCOMP PD */ |
720 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
721 | CLKDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
722 | (1 << 31) | (0x0c << 16), | |
723 | 0x801f0000); | |
b829f12a BM |
724 | /* DCOMP PU */ |
725 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
726 | CLKDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
727 | (1 << 31) | (0x07 << 16), | |
728 | 0x801f0000); | |
b829f12a BM |
729 | /* DCOMP PD */ |
730 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
731 | CLKDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
732 | (1 << 31) | (0x07 << 16), | |
733 | 0x801f0000); | |
b829f12a BM |
734 | /* ODTCOMP PU */ |
735 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
736 | CLKODTPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
737 | (1 << 31) | (0x0b << 16), | |
738 | 0x801f0000); | |
b829f12a BM |
739 | /* ODTCOMP PD */ |
740 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
741 | CLKODTPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
742 | (1 << 31) | (0x0b << 16), | |
743 | 0x801f0000); | |
b829f12a BM |
744 | /* TCOCOMP PU */ |
745 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
746 | CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
747 | 1 << 31, 1 << 31); | |
b829f12a BM |
748 | /* TCOCOMP PD */ |
749 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
750 | CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
751 | 1 << 31, 1 << 31); | |
b829f12a BM |
752 | |
753 | /* CMD COMP Overrides */ | |
754 | /* RCOMP PU */ | |
755 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
756 | CMDDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
757 | (1 << 31) | (0x0d << 16), | |
758 | 0x803f0000); | |
b829f12a BM |
759 | /* RCOMP PD */ |
760 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
761 | CMDDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
762 | (1 << 31) | (0x0d << 16), | |
763 | 0x803f0000); | |
b829f12a BM |
764 | /* DCOMP PU */ |
765 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
766 | CMDDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
767 | (1 << 31) | (0x0a << 16), | |
768 | 0x801f0000); | |
b829f12a BM |
769 | /* DCOMP PD */ |
770 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
771 | CMDDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
772 | (1 << 31) | (0x0a << 16), | |
773 | 0x801f0000); | |
b829f12a BM |
774 | |
775 | /* CTL COMP Overrides */ | |
776 | /* RCOMP PU */ | |
777 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
778 | CTLDRVPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
779 | (1 << 31) | (0x0d << 16), | |
780 | 0x803f0000); | |
b829f12a BM |
781 | /* RCOMP PD */ |
782 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
783 | CTLDRVPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
784 | (1 << 31) | (0x0d << 16), | |
785 | 0x803f0000); | |
b829f12a BM |
786 | /* DCOMP PU */ |
787 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
788 | CTLDLYPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
789 | (1 << 31) | (0x0a << 16), | |
790 | 0x801f0000); | |
b829f12a BM |
791 | /* DCOMP PD */ |
792 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
793 | CTLDLYPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
794 | (1 << 31) | (0x0a << 16), | |
795 | 0x801f0000); | |
b829f12a BM |
796 | #else |
797 | /* DQ TCOCOMP Overrides */ | |
798 | /* TCOCOMP PU */ | |
799 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
800 | DQTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
801 | (1 << 31) | (0x1f << 16), | |
802 | 0x801f0000); | |
b829f12a BM |
803 | /* TCOCOMP PD */ |
804 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
805 | DQTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
806 | (1 << 31) | (0x1f << 16), | |
807 | 0x801f0000); | |
b829f12a BM |
808 | |
809 | /* DQS TCOCOMP Overrides */ | |
810 | /* TCOCOMP PU */ | |
811 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
812 | DQSTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
813 | (1 << 31) | (0x1f << 16), | |
814 | 0x801f0000); | |
b829f12a BM |
815 | /* TCOCOMP PD */ |
816 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
817 | DQSTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
818 | (1 << 31) | (0x1f << 16), | |
819 | 0x801f0000); | |
b829f12a BM |
820 | |
821 | /* CLK TCOCOMP Overrides */ | |
822 | /* TCOCOMP PU */ | |
823 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
824 | CLKTCOPUCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
825 | (1 << 31) | (0x1f << 16), | |
826 | 0x801f0000); | |
b829f12a BM |
827 | /* TCOCOMP PD */ |
828 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
829 | CLKTCOPDCTLCH0 + ch * DDRCOMP_CH_OFFSET, |
830 | (1 << 31) | (0x1f << 16), | |
831 | 0x801f0000); | |
b829f12a BM |
832 | #endif |
833 | ||
834 | /* program STATIC delays */ | |
835 | #ifdef BACKUP_WCMD | |
836 | set_wcmd(ch, ddr_wcmd[PLATFORM_ID]); | |
837 | #else | |
838 | set_wcmd(ch, ddr_wclk[PLATFORM_ID] + HALF_CLK); | |
839 | #endif | |
840 | ||
841 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
312cc39e | 842 | if (mrc_params->rank_enables & (1 << rk)) { |
b829f12a BM |
843 | set_wclk(ch, rk, ddr_wclk[PLATFORM_ID]); |
844 | #ifdef BACKUP_WCTL | |
845 | set_wctl(ch, rk, ddr_wctl[PLATFORM_ID]); | |
846 | #else | |
847 | set_wctl(ch, rk, ddr_wclk[PLATFORM_ID] + HALF_CLK); | |
848 | #endif | |
849 | } | |
850 | } | |
851 | } | |
852 | } | |
853 | ||
854 | /* COMP (non channel specific) */ | |
855 | /* RCOMP: Dither PU Enable */ | |
312cc39e | 856 | mrc_alt_write_mask(DDRPHY, DQANADRVPUCTL, 1 << 30, 1 << 30); |
b829f12a | 857 | /* RCOMP: Dither PD Enable */ |
312cc39e | 858 | mrc_alt_write_mask(DDRPHY, DQANADRVPDCTL, 1 << 30, 1 << 30); |
b829f12a | 859 | /* RCOMP: Dither PU Enable */ |
312cc39e | 860 | mrc_alt_write_mask(DDRPHY, CMDANADRVPUCTL, 1 << 30, 1 << 30); |
b829f12a | 861 | /* RCOMP: Dither PD Enable */ |
312cc39e | 862 | mrc_alt_write_mask(DDRPHY, CMDANADRVPDCTL, 1 << 30, 1 << 30); |
b829f12a | 863 | /* RCOMP: Dither PU Enable */ |
312cc39e | 864 | mrc_alt_write_mask(DDRPHY, CLKANADRVPUCTL, 1 << 30, 1 << 30); |
b829f12a | 865 | /* RCOMP: Dither PD Enable */ |
312cc39e | 866 | mrc_alt_write_mask(DDRPHY, CLKANADRVPDCTL, 1 << 30, 1 << 30); |
b829f12a | 867 | /* RCOMP: Dither PU Enable */ |
312cc39e | 868 | mrc_alt_write_mask(DDRPHY, DQSANADRVPUCTL, 1 << 30, 1 << 30); |
b829f12a | 869 | /* RCOMP: Dither PD Enable */ |
312cc39e | 870 | mrc_alt_write_mask(DDRPHY, DQSANADRVPDCTL, 1 << 30, 1 << 30); |
b829f12a | 871 | /* RCOMP: Dither PU Enable */ |
312cc39e | 872 | mrc_alt_write_mask(DDRPHY, CTLANADRVPUCTL, 1 << 30, 1 << 30); |
b829f12a | 873 | /* RCOMP: Dither PD Enable */ |
312cc39e | 874 | mrc_alt_write_mask(DDRPHY, CTLANADRVPDCTL, 1 << 30, 1 << 30); |
b829f12a | 875 | /* ODT: Dither PU Enable */ |
312cc39e | 876 | mrc_alt_write_mask(DDRPHY, DQANAODTPUCTL, 1 << 30, 1 << 30); |
b829f12a | 877 | /* ODT: Dither PD Enable */ |
312cc39e | 878 | mrc_alt_write_mask(DDRPHY, DQANAODTPDCTL, 1 << 30, 1 << 30); |
b829f12a | 879 | /* ODT: Dither PU Enable */ |
312cc39e | 880 | mrc_alt_write_mask(DDRPHY, CLKANAODTPUCTL, 1 << 30, 1 << 30); |
b829f12a | 881 | /* ODT: Dither PD Enable */ |
312cc39e | 882 | mrc_alt_write_mask(DDRPHY, CLKANAODTPDCTL, 1 << 30, 1 << 30); |
b829f12a | 883 | /* ODT: Dither PU Enable */ |
312cc39e | 884 | mrc_alt_write_mask(DDRPHY, DQSANAODTPUCTL, 1 << 30, 1 << 30); |
b829f12a | 885 | /* ODT: Dither PD Enable */ |
312cc39e | 886 | mrc_alt_write_mask(DDRPHY, DQSANAODTPDCTL, 1 << 30, 1 << 30); |
b829f12a | 887 | /* DCOMP: Dither PU Enable */ |
312cc39e | 888 | mrc_alt_write_mask(DDRPHY, DQANADLYPUCTL, 1 << 30, 1 << 30); |
b829f12a | 889 | /* DCOMP: Dither PD Enable */ |
312cc39e | 890 | mrc_alt_write_mask(DDRPHY, DQANADLYPDCTL, 1 << 30, 1 << 30); |
b829f12a | 891 | /* DCOMP: Dither PU Enable */ |
312cc39e | 892 | mrc_alt_write_mask(DDRPHY, CMDANADLYPUCTL, 1 << 30, 1 << 30); |
b829f12a | 893 | /* DCOMP: Dither PD Enable */ |
312cc39e | 894 | mrc_alt_write_mask(DDRPHY, CMDANADLYPDCTL, 1 << 30, 1 << 30); |
b829f12a | 895 | /* DCOMP: Dither PU Enable */ |
312cc39e | 896 | mrc_alt_write_mask(DDRPHY, CLKANADLYPUCTL, 1 << 30, 1 << 30); |
b829f12a | 897 | /* DCOMP: Dither PD Enable */ |
312cc39e | 898 | mrc_alt_write_mask(DDRPHY, CLKANADLYPDCTL, 1 << 30, 1 << 30); |
b829f12a | 899 | /* DCOMP: Dither PU Enable */ |
312cc39e | 900 | mrc_alt_write_mask(DDRPHY, DQSANADLYPUCTL, 1 << 30, 1 << 30); |
b829f12a | 901 | /* DCOMP: Dither PD Enable */ |
312cc39e | 902 | mrc_alt_write_mask(DDRPHY, DQSANADLYPDCTL, 1 << 30, 1 << 30); |
b829f12a | 903 | /* DCOMP: Dither PU Enable */ |
312cc39e | 904 | mrc_alt_write_mask(DDRPHY, CTLANADLYPUCTL, 1 << 30, 1 << 30); |
b829f12a | 905 | /* DCOMP: Dither PD Enable */ |
312cc39e | 906 | mrc_alt_write_mask(DDRPHY, CTLANADLYPDCTL, 1 << 30, 1 << 30); |
b829f12a | 907 | /* TCO: Dither PU Enable */ |
312cc39e | 908 | mrc_alt_write_mask(DDRPHY, DQANATCOPUCTL, 1 << 30, 1 << 30); |
b829f12a | 909 | /* TCO: Dither PD Enable */ |
312cc39e | 910 | mrc_alt_write_mask(DDRPHY, DQANATCOPDCTL, 1 << 30, 1 << 30); |
b829f12a | 911 | /* TCO: Dither PU Enable */ |
312cc39e | 912 | mrc_alt_write_mask(DDRPHY, CLKANATCOPUCTL, 1 << 30, 1 << 30); |
b829f12a | 913 | /* TCO: Dither PD Enable */ |
312cc39e | 914 | mrc_alt_write_mask(DDRPHY, CLKANATCOPDCTL, 1 << 30, 1 << 30); |
b829f12a | 915 | /* TCO: Dither PU Enable */ |
312cc39e | 916 | mrc_alt_write_mask(DDRPHY, DQSANATCOPUCTL, 1 << 30, 1 << 30); |
b829f12a | 917 | /* TCO: Dither PD Enable */ |
312cc39e | 918 | mrc_alt_write_mask(DDRPHY, DQSANATCOPDCTL, 1 << 30, 1 << 30); |
b829f12a | 919 | /* TCOCOMP: Pulse Count */ |
312cc39e | 920 | mrc_alt_write_mask(DDRPHY, TCOCNTCTRL, 1, 3); |
b829f12a | 921 | /* ODT: CMD/CTL PD/PU */ |
312cc39e BM |
922 | mrc_alt_write_mask(DDRPHY, CHNLBUFSTATIC, |
923 | (0x03 << 24) | (0x03 << 16), 0x1f1f0000); | |
b829f12a | 924 | /* Set 1us counter */ |
312cc39e BM |
925 | mrc_alt_write_mask(DDRPHY, MSCNTR, 0x64, 0xff); |
926 | mrc_alt_write_mask(DDRPHY, LATCH1CTL, 0x1 << 28, 0x70000000); | |
b829f12a BM |
927 | |
928 | /* Release PHY from reset */ | |
312cc39e | 929 | mrc_alt_write_mask(DDRPHY, MASTERRSTN, 1, 1); |
b829f12a BM |
930 | |
931 | /* STEP1 */ | |
932 | mrc_post_code(0x03, 0x11); | |
933 | ||
934 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
935 | if (mrc_params->channel_enables & (1 << ch)) { | |
936 | /* DQ01-DQ23 */ | |
937 | for (bl_grp = 0; | |
312cc39e | 938 | bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2; |
b829f12a BM |
939 | bl_grp++) { |
940 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
941 | DQMDLLCTL + |
942 | bl_grp * DDRIODQ_BL_OFFSET + | |
943 | ch * DDRIODQ_CH_OFFSET, | |
944 | 1 << 13, | |
945 | 1 << 13); /* Enable VREG */ | |
b829f12a BM |
946 | delay_n(3); |
947 | } | |
948 | ||
949 | /* ECC */ | |
312cc39e BM |
950 | mrc_alt_write_mask(DDRPHY, ECCMDLLCTL, |
951 | 1 << 13, 1 << 13); /* Enable VREG */ | |
b829f12a BM |
952 | delay_n(3); |
953 | /* CMD */ | |
954 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
955 | CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET, |
956 | 1 << 13, 1 << 13); /* Enable VREG */ | |
b829f12a BM |
957 | delay_n(3); |
958 | /* CLK-CTL */ | |
959 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
960 | CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET, |
961 | 1 << 13, 1 << 13); /* Enable VREG */ | |
b829f12a BM |
962 | delay_n(3); |
963 | } | |
964 | } | |
965 | ||
966 | /* STEP2 */ | |
967 | mrc_post_code(0x03, 0x12); | |
968 | delay_n(200); | |
969 | ||
970 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
971 | if (mrc_params->channel_enables & (1 << ch)) { | |
972 | /* DQ01-DQ23 */ | |
973 | for (bl_grp = 0; | |
312cc39e | 974 | bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2; |
b829f12a BM |
975 | bl_grp++) { |
976 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
977 | DQMDLLCTL + |
978 | bl_grp * DDRIODQ_BL_OFFSET + | |
979 | ch * DDRIODQ_CH_OFFSET, | |
980 | 1 << 17, | |
981 | 1 << 17); /* Enable MCDLL */ | |
b829f12a BM |
982 | delay_n(50); |
983 | } | |
984 | ||
985 | /* ECC */ | |
312cc39e BM |
986 | mrc_alt_write_mask(DDRPHY, ECCMDLLCTL, |
987 | 1 << 17, 1 << 17); /* Enable MCDLL */ | |
b829f12a BM |
988 | delay_n(50); |
989 | /* CMD */ | |
990 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
991 | CMDMDLLCTL + ch * DDRIOCCC_CH_OFFSET, |
992 | 1 << 18, 1 << 18); /* Enable MCDLL */ | |
b829f12a BM |
993 | delay_n(50); |
994 | /* CLK-CTL */ | |
995 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
996 | CCMDLLCTL + ch * DDRIOCCC_CH_OFFSET, |
997 | 1 << 18, 1 << 18); /* Enable MCDLL */ | |
b829f12a BM |
998 | delay_n(50); |
999 | } | |
1000 | } | |
1001 | ||
1002 | /* STEP3: */ | |
1003 | mrc_post_code(0x03, 0x13); | |
1004 | delay_n(100); | |
1005 | ||
1006 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1007 | if (mrc_params->channel_enables & (1 << ch)) { | |
1008 | /* DQ01-DQ23 */ | |
1009 | for (bl_grp = 0; | |
312cc39e | 1010 | bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2; |
b829f12a BM |
1011 | bl_grp++) { |
1012 | #ifdef FORCE_16BIT_DDRIO | |
312cc39e | 1013 | temp = (bl_grp && |
b829f12a | 1014 | (mrc_params->channel_width == X16)) ? |
312cc39e | 1015 | 0x11ff : 0xffff; |
b829f12a | 1016 | #else |
312cc39e | 1017 | temp = 0xffff; |
b829f12a BM |
1018 | #endif |
1019 | /* Enable TXDLL */ | |
1020 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1021 | DQDLLTXCTL + |
1022 | bl_grp * DDRIODQ_BL_OFFSET + | |
1023 | ch * DDRIODQ_CH_OFFSET, | |
1024 | temp, 0xffff); | |
b829f12a BM |
1025 | delay_n(3); |
1026 | /* Enable RXDLL */ | |
1027 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1028 | DQDLLRXCTL + |
1029 | bl_grp * DDRIODQ_BL_OFFSET + | |
1030 | ch * DDRIODQ_CH_OFFSET, | |
1031 | 0xf, 0xf); | |
b829f12a BM |
1032 | delay_n(3); |
1033 | /* Enable RXDLL Overrides BL0 */ | |
1034 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1035 | B0OVRCTL + |
1036 | bl_grp * DDRIODQ_BL_OFFSET + | |
1037 | ch * DDRIODQ_CH_OFFSET, | |
1038 | 0xf, 0xf); | |
b829f12a BM |
1039 | } |
1040 | ||
1041 | /* ECC */ | |
312cc39e BM |
1042 | temp = 0xffff; |
1043 | mrc_alt_write_mask(DDRPHY, ECCDLLTXCTL, | |
1044 | temp, 0xffff); | |
b829f12a BM |
1045 | delay_n(3); |
1046 | ||
1047 | /* CMD (PO) */ | |
1048 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1049 | CMDDLLTXCTL + ch * DDRIOCCC_CH_OFFSET, |
1050 | temp, 0xffff); | |
b829f12a BM |
1051 | delay_n(3); |
1052 | } | |
1053 | } | |
1054 | ||
1055 | /* STEP4 */ | |
1056 | mrc_post_code(0x03, 0x14); | |
1057 | ||
1058 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1059 | if (mrc_params->channel_enables & (1 << ch)) { | |
1060 | /* Host To Memory Clock Alignment (HMC) for 800/1066 */ | |
1061 | for (bl_grp = 0; | |
312cc39e | 1062 | bl_grp < (NUM_BYTE_LANES / bl_divisor) / 2; |
b829f12a BM |
1063 | bl_grp++) { |
1064 | /* CLK_ALIGN_MOD_ID */ | |
1065 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1066 | DQCLKALIGNREG2 + |
1067 | bl_grp * DDRIODQ_BL_OFFSET + | |
1068 | ch * DDRIODQ_CH_OFFSET, | |
1069 | bl_grp ? 3 : 1, | |
1070 | 0xf); | |
b829f12a BM |
1071 | } |
1072 | ||
1073 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1074 | ECCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET, |
1075 | 0x2, 0xf); | |
b829f12a | 1076 | mrc_alt_write_mask(DDRPHY, |
312cc39e BM |
1077 | CMDCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET, |
1078 | 0x0, 0xf); | |
b829f12a | 1079 | mrc_alt_write_mask(DDRPHY, |
312cc39e BM |
1080 | CCCLKALIGNREG2 + ch * DDRIODQ_CH_OFFSET, |
1081 | 0x2, 0xf); | |
b829f12a | 1082 | mrc_alt_write_mask(DDRPHY, |
312cc39e BM |
1083 | CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET, |
1084 | 0x20, 0x30); | |
b829f12a BM |
1085 | /* |
1086 | * NUM_SAMPLES, MAX_SAMPLES, | |
1087 | * MACRO_PI_STEP, MICRO_PI_STEP | |
1088 | */ | |
1089 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1090 | CMDCLKALIGNREG1 + ch * DDRIOCCC_CH_OFFSET, |
1091 | (0x18 << 16) | (0x10 << 8) | | |
1092 | (0x8 << 2) | (0x1 << 0), | |
1093 | 0x007f7fff); | |
b829f12a BM |
1094 | /* TOTAL_NUM_MODULES, FIRST_U_PARTITION */ |
1095 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1096 | CMDCLKALIGNREG2 + ch * DDRIOCCC_CH_OFFSET, |
1097 | (0x10 << 16) | (0x4 << 8) | (0x2 << 4), | |
1098 | 0x001f0ff0); | |
b829f12a BM |
1099 | #ifdef HMC_TEST |
1100 | /* START_CLK_ALIGN=1 */ | |
1101 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1102 | CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET, |
1103 | 1 << 24, 1 << 24); | |
b829f12a | 1104 | while (msg_port_alt_read(DDRPHY, |
312cc39e BM |
1105 | CMDCLKALIGNREG0 + ch * DDRIOCCC_CH_OFFSET) & |
1106 | (1 << 24)) | |
b829f12a BM |
1107 | ; /* wait for START_CLK_ALIGN=0 */ |
1108 | #endif | |
1109 | ||
1110 | /* Set RD/WR Pointer Seperation & COUNTEN & FIFOPTREN */ | |
1111 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1112 | CMDPTRREG + ch * DDRIOCCC_CH_OFFSET, |
1113 | 1, 1); /* WRPTRENABLE=1 */ | |
b829f12a BM |
1114 | |
1115 | /* COMP initial */ | |
1116 | /* enable bypass for CLK buffer (PO) */ | |
1117 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1118 | COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET, |
1119 | 1 << 5, 1 << 5); | |
b829f12a | 1120 | /* Initial COMP Enable */ |
312cc39e | 1121 | mrc_alt_write_mask(DDRPHY, CMPCTRL, 1, 1); |
b829f12a | 1122 | /* wait for Initial COMP Enable = 0 */ |
312cc39e | 1123 | while (msg_port_alt_read(DDRPHY, CMPCTRL) & 1) |
b829f12a BM |
1124 | ; |
1125 | /* disable bypass for CLK buffer (PO) */ | |
1126 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1127 | COMPEN0CH0 + ch * DDRCOMP_CH_OFFSET, |
1128 | ~(1 << 5), 1 << 5); | |
b829f12a BM |
1129 | |
1130 | /* IOBUFACT */ | |
1131 | ||
1132 | /* STEP4a */ | |
1133 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1134 | CMDCFGREG0 + ch * DDRIOCCC_CH_OFFSET, |
1135 | 1 << 2, 1 << 2); /* IOBUFACTRST_N=1 */ | |
b829f12a BM |
1136 | |
1137 | /* DDRPHY initialization complete */ | |
1138 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1139 | CMDPMCONFIG0 + ch * DDRIOCCC_CH_OFFSET, |
1140 | 1 << 20, 1 << 20); /* SPID_INIT_COMPLETE=1 */ | |
b829f12a BM |
1141 | } |
1142 | } | |
1143 | ||
1144 | LEAVEFN(); | |
1145 | } | |
1146 | ||
1147 | /* This function performs JEDEC initialization on all enabled channels */ | |
1148 | void perform_jedec_init(struct mrc_params *mrc_params) | |
1149 | { | |
1150 | uint8_t twr, wl, rank; | |
1151 | uint32_t tck; | |
1152 | u32 dtr0; | |
1153 | u32 drp; | |
1154 | u32 drmc; | |
1155 | u32 mrs0_cmd = 0; | |
1156 | u32 emrs1_cmd = 0; | |
1157 | u32 emrs2_cmd = 0; | |
1158 | u32 emrs3_cmd = 0; | |
1159 | ||
1160 | ENTERFN(); | |
1161 | ||
1162 | /* jedec_init starts */ | |
1163 | mrc_post_code(0x04, 0x00); | |
1164 | ||
1165 | /* DDR3_RESET_SET=0, DDR3_RESET_RESET=1 */ | |
312cc39e | 1166 | mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 2, 0x102); |
b829f12a BM |
1167 | |
1168 | /* Assert RESET# for 200us */ | |
1169 | delay_u(200); | |
1170 | ||
1171 | /* DDR3_RESET_SET=1, DDR3_RESET_RESET=0 */ | |
312cc39e | 1172 | mrc_alt_write_mask(DDRPHY, CCDDR3RESETCTL, 0x100, 0x102); |
b829f12a BM |
1173 | |
1174 | dtr0 = msg_port_read(MEM_CTLR, DTR0); | |
1175 | ||
1176 | /* | |
1177 | * Set CKEVAL for populated ranks | |
1178 | * then send NOP to each rank (#4550197) | |
1179 | */ | |
1180 | ||
1181 | drp = msg_port_read(MEM_CTLR, DRP); | |
1182 | drp &= 0x3; | |
1183 | ||
1184 | drmc = msg_port_read(MEM_CTLR, DRMC); | |
312cc39e BM |
1185 | drmc &= 0xfffffffc; |
1186 | drmc |= (DRMC_CKEMODE | drp); | |
b829f12a BM |
1187 | |
1188 | msg_port_write(MEM_CTLR, DRMC, drmc); | |
1189 | ||
1190 | for (rank = 0; rank < NUM_RANKS; rank++) { | |
1191 | /* Skip to next populated rank */ | |
1192 | if ((mrc_params->rank_enables & (1 << rank)) == 0) | |
1193 | continue; | |
1194 | ||
1195 | dram_init_command(DCMD_NOP(rank)); | |
1196 | } | |
1197 | ||
1198 | msg_port_write(MEM_CTLR, DRMC, | |
312cc39e | 1199 | (mrc_params->rd_odt_value == 0 ? DRMC_ODTMODE : 0)); |
b829f12a BM |
1200 | |
1201 | /* | |
1202 | * setup for emrs 2 | |
1203 | * BIT[15:11] --> Always "0" | |
1204 | * BIT[10:09] --> Rtt_WR: want "Dynamic ODT Off" (0) | |
1205 | * BIT[08] --> Always "0" | |
1206 | * BIT[07] --> SRT: use sr_temp_range | |
1207 | * BIT[06] --> ASR: want "Manual SR Reference" (0) | |
1208 | * BIT[05:03] --> CWL: use oem_tCWL | |
1209 | * BIT[02:00] --> PASR: want "Full Array" (0) | |
1210 | */ | |
1211 | emrs2_cmd |= (2 << 3); | |
1212 | wl = 5 + mrc_params->ddr_speed; | |
1213 | emrs2_cmd |= ((wl - 5) << 9); | |
1214 | emrs2_cmd |= (mrc_params->sr_temp_range << 13); | |
1215 | ||
1216 | /* | |
1217 | * setup for emrs 3 | |
1218 | * BIT[15:03] --> Always "0" | |
1219 | * BIT[02] --> MPR: want "Normal Operation" (0) | |
1220 | * BIT[01:00] --> MPR_Loc: want "Predefined Pattern" (0) | |
1221 | */ | |
1222 | emrs3_cmd |= (3 << 3); | |
1223 | ||
1224 | /* | |
1225 | * setup for emrs 1 | |
1226 | * BIT[15:13] --> Always "0" | |
1227 | * BIT[12:12] --> Qoff: want "Output Buffer Enabled" (0) | |
1228 | * BIT[11:11] --> TDQS: want "Disabled" (0) | |
1229 | * BIT[10:10] --> Always "0" | |
1230 | * BIT[09,06,02] --> Rtt_nom: use rtt_nom_value | |
1231 | * BIT[08] --> Always "0" | |
1232 | * BIT[07] --> WR_LVL: want "Disabled" (0) | |
1233 | * BIT[05,01] --> DIC: use ron_value | |
1234 | * BIT[04:03] --> AL: additive latency want "0" (0) | |
1235 | * BIT[00] --> DLL: want "Enable" (0) | |
1236 | * | |
1237 | * (BIT5|BIT1) set Ron value | |
1238 | * 00 --> RZQ/6 (40ohm) | |
1239 | * 01 --> RZQ/7 (34ohm) | |
1240 | * 1* --> RESERVED | |
1241 | * | |
1242 | * (BIT9|BIT6|BIT2) set Rtt_nom value | |
1243 | * 000 --> Disabled | |
1244 | * 001 --> RZQ/4 ( 60ohm) | |
1245 | * 010 --> RZQ/2 (120ohm) | |
1246 | * 011 --> RZQ/6 ( 40ohm) | |
1247 | * 1** --> RESERVED | |
1248 | */ | |
1249 | emrs1_cmd |= (1 << 3); | |
312cc39e | 1250 | emrs1_cmd &= ~(1 << 6); |
b829f12a BM |
1251 | |
1252 | if (mrc_params->ron_value == 0) | |
312cc39e | 1253 | emrs1_cmd |= (1 << 7); |
b829f12a | 1254 | else |
312cc39e | 1255 | emrs1_cmd &= ~(1 << 7); |
b829f12a BM |
1256 | |
1257 | if (mrc_params->rtt_nom_value == 0) | |
1258 | emrs1_cmd |= (DDR3_EMRS1_RTTNOM_40 << 6); | |
1259 | else if (mrc_params->rtt_nom_value == 1) | |
1260 | emrs1_cmd |= (DDR3_EMRS1_RTTNOM_60 << 6); | |
1261 | else if (mrc_params->rtt_nom_value == 2) | |
1262 | emrs1_cmd |= (DDR3_EMRS1_RTTNOM_120 << 6); | |
1263 | ||
1264 | /* save MRS1 value (excluding control fields) */ | |
1265 | mrc_params->mrs1 = emrs1_cmd >> 6; | |
1266 | ||
1267 | /* | |
1268 | * setup for mrs 0 | |
1269 | * BIT[15:13] --> Always "0" | |
1270 | * BIT[12] --> PPD: for Quark (1) | |
1271 | * BIT[11:09] --> WR: use oem_tWR | |
1272 | * BIT[08] --> DLL: want "Reset" (1, self clearing) | |
1273 | * BIT[07] --> MODE: want "Normal" (0) | |
1274 | * BIT[06:04,02] --> CL: use oem_tCAS | |
1275 | * BIT[03] --> RD_BURST_TYPE: want "Interleave" (1) | |
1276 | * BIT[01:00] --> BL: want "8 Fixed" (0) | |
1277 | * WR: | |
1278 | * 0 --> 16 | |
1279 | * 1 --> 5 | |
1280 | * 2 --> 6 | |
1281 | * 3 --> 7 | |
1282 | * 4 --> 8 | |
1283 | * 5 --> 10 | |
1284 | * 6 --> 12 | |
1285 | * 7 --> 14 | |
1286 | * CL: | |
1287 | * BIT[02:02] "0" if oem_tCAS <= 11 (1866?) | |
1288 | * BIT[06:04] use oem_tCAS-4 | |
1289 | */ | |
312cc39e BM |
1290 | mrs0_cmd |= (1 << 14); |
1291 | mrs0_cmd |= (1 << 18); | |
b829f12a BM |
1292 | mrs0_cmd |= ((((dtr0 >> 12) & 7) + 1) << 10); |
1293 | ||
1294 | tck = t_ck[mrc_params->ddr_speed]; | |
1295 | /* Per JEDEC: tWR=15000ps DDR2/3 from 800-1600 */ | |
1296 | twr = MCEIL(15000, tck); | |
1297 | mrs0_cmd |= ((twr - 4) << 15); | |
1298 | ||
1299 | for (rank = 0; rank < NUM_RANKS; rank++) { | |
1300 | /* Skip to next populated rank */ | |
1301 | if ((mrc_params->rank_enables & (1 << rank)) == 0) | |
1302 | continue; | |
1303 | ||
1304 | emrs2_cmd |= (rank << 22); | |
1305 | dram_init_command(emrs2_cmd); | |
1306 | ||
1307 | emrs3_cmd |= (rank << 22); | |
1308 | dram_init_command(emrs3_cmd); | |
1309 | ||
1310 | emrs1_cmd |= (rank << 22); | |
1311 | dram_init_command(emrs1_cmd); | |
1312 | ||
1313 | mrs0_cmd |= (rank << 22); | |
1314 | dram_init_command(mrs0_cmd); | |
1315 | ||
1316 | dram_init_command(DCMD_ZQCL(rank)); | |
1317 | } | |
1318 | ||
1319 | LEAVEFN(); | |
1320 | } | |
1321 | ||
1322 | /* | |
1323 | * Dunit Initialization Complete | |
1324 | * | |
1325 | * Indicates that initialization of the Dunit has completed. | |
1326 | * | |
1327 | * Memory accesses are permitted and maintenance operation begins. | |
1328 | * Until this bit is set to a 1, the memory controller will not accept | |
1329 | * DRAM requests from the MEMORY_MANAGER or HTE. | |
1330 | */ | |
1331 | void set_ddr_init_complete(struct mrc_params *mrc_params) | |
1332 | { | |
1333 | u32 dco; | |
1334 | ||
1335 | ENTERFN(); | |
1336 | ||
1337 | dco = msg_port_read(MEM_CTLR, DCO); | |
312cc39e BM |
1338 | dco &= ~DCO_PMICTL; |
1339 | dco |= DCO_IC; | |
b829f12a BM |
1340 | msg_port_write(MEM_CTLR, DCO, dco); |
1341 | ||
1342 | LEAVEFN(); | |
1343 | } | |
1344 | ||
1345 | /* | |
1346 | * This function will retrieve relevant timing data | |
1347 | * | |
1348 | * This data will be used on subsequent boots to speed up boot times | |
1349 | * and is required for Suspend To RAM capabilities. | |
1350 | */ | |
1351 | void restore_timings(struct mrc_params *mrc_params) | |
1352 | { | |
1353 | uint8_t ch, rk, bl; | |
1354 | const struct mrc_timings *mt = &mrc_params->timings; | |
1355 | ||
1356 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1357 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1358 | for (bl = 0; bl < NUM_BYTE_LANES; bl++) { | |
1359 | set_rcvn(ch, rk, bl, mt->rcvn[ch][rk][bl]); | |
1360 | set_rdqs(ch, rk, bl, mt->rdqs[ch][rk][bl]); | |
1361 | set_wdqs(ch, rk, bl, mt->wdqs[ch][rk][bl]); | |
1362 | set_wdq(ch, rk, bl, mt->wdq[ch][rk][bl]); | |
1363 | if (rk == 0) { | |
1364 | /* VREF (RANK0 only) */ | |
1365 | set_vref(ch, bl, mt->vref[ch][bl]); | |
1366 | } | |
1367 | } | |
1368 | set_wctl(ch, rk, mt->wctl[ch][rk]); | |
1369 | } | |
1370 | set_wcmd(ch, mt->wcmd[ch]); | |
1371 | } | |
1372 | } | |
1373 | ||
1374 | /* | |
1375 | * Configure default settings normally set as part of read training | |
1376 | * | |
1377 | * Some defaults have to be set earlier as they may affect earlier | |
1378 | * training steps. | |
1379 | */ | |
1380 | void default_timings(struct mrc_params *mrc_params) | |
1381 | { | |
1382 | uint8_t ch, rk, bl; | |
1383 | ||
1384 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1385 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1386 | for (bl = 0; bl < NUM_BYTE_LANES; bl++) { | |
1387 | set_rdqs(ch, rk, bl, 24); | |
1388 | if (rk == 0) { | |
1389 | /* VREF (RANK0 only) */ | |
1390 | set_vref(ch, bl, 32); | |
1391 | } | |
1392 | } | |
1393 | } | |
1394 | } | |
1395 | } | |
1396 | ||
1397 | /* | |
1398 | * This function will perform our RCVEN Calibration Algorithm. | |
1399 | * We will only use the 2xCLK domain timings to perform RCVEN Calibration. | |
1400 | * All byte lanes will be calibrated "simultaneously" per channel per rank. | |
1401 | */ | |
1402 | void rcvn_cal(struct mrc_params *mrc_params) | |
1403 | { | |
1404 | uint8_t ch; /* channel counter */ | |
1405 | uint8_t rk; /* rank counter */ | |
1406 | uint8_t bl; /* byte lane counter */ | |
1407 | uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1; | |
1408 | ||
1409 | #ifdef R2R_SHARING | |
1410 | /* used to find placement for rank2rank sharing configs */ | |
1411 | uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; | |
1412 | #ifndef BACKUP_RCVN | |
1413 | /* used to find placement for rank2rank sharing configs */ | |
1414 | uint32_t num_ranks_enabled = 0; | |
1415 | #endif | |
1416 | #endif | |
1417 | ||
1418 | #ifdef BACKUP_RCVN | |
1419 | #else | |
1420 | uint32_t temp; | |
1421 | /* absolute PI value to be programmed on the byte lane */ | |
1422 | uint32_t delay[NUM_BYTE_LANES]; | |
1423 | u32 dtr1, dtr1_save; | |
1424 | #endif | |
1425 | ||
1426 | ENTERFN(); | |
1427 | ||
1428 | /* rcvn_cal starts */ | |
1429 | mrc_post_code(0x05, 0x00); | |
1430 | ||
1431 | #ifndef BACKUP_RCVN | |
1432 | /* need separate burst to sample DQS preamble */ | |
1433 | dtr1 = msg_port_read(MEM_CTLR, DTR1); | |
1434 | dtr1_save = dtr1; | |
312cc39e | 1435 | dtr1 |= DTR1_TCCD_12CLK; |
b829f12a BM |
1436 | msg_port_write(MEM_CTLR, DTR1, dtr1); |
1437 | #endif | |
1438 | ||
1439 | #ifdef R2R_SHARING | |
1440 | /* need to set "final_delay[][]" elements to "0" */ | |
1441 | memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay)); | |
1442 | #endif | |
1443 | ||
1444 | /* loop through each enabled channel */ | |
1445 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1446 | if (mrc_params->channel_enables & (1 << ch)) { | |
1447 | /* perform RCVEN Calibration on a per rank basis */ | |
1448 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1449 | if (mrc_params->rank_enables & (1 << rk)) { | |
1450 | /* | |
1451 | * POST_CODE here indicates the current | |
1452 | * channel and rank being calibrated | |
1453 | */ | |
312cc39e | 1454 | mrc_post_code(0x05, 0x10 + ((ch << 4) | rk)); |
b829f12a BM |
1455 | |
1456 | #ifdef BACKUP_RCVN | |
1457 | /* et hard-coded timing values */ | |
1458 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) | |
1459 | set_rcvn(ch, rk, bl, ddr_rcvn[PLATFORM_ID]); | |
1460 | #else | |
1461 | /* enable FIFORST */ | |
1462 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) { | |
1463 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1464 | B01PTRCTL1 + |
1465 | (bl >> 1) * DDRIODQ_BL_OFFSET + | |
1466 | ch * DDRIODQ_CH_OFFSET, | |
1467 | 0, 1 << 8); | |
b829f12a BM |
1468 | } |
1469 | /* initialize the starting delay to 128 PI (cas +1 CLK) */ | |
1470 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1471 | /* 1x CLK domain timing is cas-4 */ | |
1472 | delay[bl] = (4 + 1) * FULL_CLK; | |
1473 | ||
1474 | set_rcvn(ch, rk, bl, delay[bl]); | |
1475 | } | |
1476 | ||
1477 | /* now find the rising edge */ | |
1478 | find_rising_edge(mrc_params, delay, ch, rk, true); | |
1479 | ||
1480 | /* Now increase delay by 32 PI (1/4 CLK) to place in center of high pulse */ | |
1481 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1482 | delay[bl] += QRTR_CLK; | |
1483 | set_rcvn(ch, rk, bl, delay[bl]); | |
1484 | } | |
1485 | /* Now decrement delay by 128 PI (1 CLK) until we sample a "0" */ | |
1486 | do { | |
1487 | temp = sample_dqs(mrc_params, ch, rk, true); | |
1488 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1489 | if (temp & (1 << bl)) { | |
1490 | if (delay[bl] >= FULL_CLK) { | |
1491 | delay[bl] -= FULL_CLK; | |
1492 | set_rcvn(ch, rk, bl, delay[bl]); | |
1493 | } else { | |
1494 | /* not enough delay */ | |
1495 | training_message(ch, rk, bl); | |
312cc39e | 1496 | mrc_post_code(0xee, 0x50); |
b829f12a BM |
1497 | } |
1498 | } | |
1499 | } | |
312cc39e | 1500 | } while (temp & 0xff); |
b829f12a BM |
1501 | |
1502 | #ifdef R2R_SHARING | |
1503 | /* increment "num_ranks_enabled" */ | |
1504 | num_ranks_enabled++; | |
1505 | /* Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble */ | |
1506 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1507 | delay[bl] += QRTR_CLK; | |
1508 | /* add "delay[]" values to "final_delay[][]" for rolling average */ | |
1509 | final_delay[ch][bl] += delay[bl]; | |
1510 | /* set timing based on rolling average values */ | |
312cc39e | 1511 | set_rcvn(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled); |
b829f12a BM |
1512 | } |
1513 | #else | |
1514 | /* Finally increment delay by 32 PI (1/4 CLK) to place in center of preamble */ | |
1515 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1516 | delay[bl] += QRTR_CLK; | |
1517 | set_rcvn(ch, rk, bl, delay[bl]); | |
1518 | } | |
1519 | #endif | |
1520 | ||
1521 | /* disable FIFORST */ | |
1522 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl += 2) { | |
1523 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1524 | B01PTRCTL1 + |
1525 | (bl >> 1) * DDRIODQ_BL_OFFSET + | |
1526 | ch * DDRIODQ_CH_OFFSET, | |
1527 | 1 << 8, 1 << 8); | |
b829f12a BM |
1528 | } |
1529 | #endif | |
1530 | } | |
1531 | } | |
1532 | } | |
1533 | } | |
1534 | ||
1535 | #ifndef BACKUP_RCVN | |
1536 | /* restore original */ | |
1537 | msg_port_write(MEM_CTLR, DTR1, dtr1_save); | |
1538 | #endif | |
1539 | ||
1540 | LEAVEFN(); | |
1541 | } | |
1542 | ||
1543 | /* | |
1544 | * This function will perform the Write Levelling algorithm | |
1545 | * (align WCLK and WDQS). | |
1546 | * | |
1547 | * This algorithm will act on each rank in each channel separately. | |
1548 | */ | |
1549 | void wr_level(struct mrc_params *mrc_params) | |
1550 | { | |
1551 | uint8_t ch; /* channel counter */ | |
1552 | uint8_t rk; /* rank counter */ | |
1553 | uint8_t bl; /* byte lane counter */ | |
1554 | uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1; | |
1555 | ||
1556 | #ifdef R2R_SHARING | |
1557 | /* used to find placement for rank2rank sharing configs */ | |
1558 | uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; | |
1559 | #ifndef BACKUP_WDQS | |
1560 | /* used to find placement for rank2rank sharing configs */ | |
1561 | uint32_t num_ranks_enabled = 0; | |
1562 | #endif | |
1563 | #endif | |
1564 | ||
1565 | #ifdef BACKUP_WDQS | |
1566 | #else | |
1567 | /* determines stop condition for CRS_WR_LVL */ | |
1568 | bool all_edges_found; | |
1569 | /* absolute PI value to be programmed on the byte lane */ | |
1570 | uint32_t delay[NUM_BYTE_LANES]; | |
1571 | /* | |
1572 | * static makes it so the data is loaded in the heap once by shadow(), | |
1573 | * where non-static copies the data onto the stack every time this | |
1574 | * function is called | |
1575 | */ | |
1576 | uint32_t address; /* address to be checked during COARSE_WR_LVL */ | |
1577 | u32 dtr4, dtr4_save; | |
1578 | #endif | |
1579 | ||
1580 | ENTERFN(); | |
1581 | ||
1582 | /* wr_level starts */ | |
1583 | mrc_post_code(0x06, 0x00); | |
1584 | ||
1585 | #ifdef R2R_SHARING | |
1586 | /* need to set "final_delay[][]" elements to "0" */ | |
1587 | memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay)); | |
1588 | #endif | |
1589 | ||
1590 | /* loop through each enabled channel */ | |
1591 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1592 | if (mrc_params->channel_enables & (1 << ch)) { | |
1593 | /* perform WRITE LEVELING algorithm on a per rank basis */ | |
1594 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1595 | if (mrc_params->rank_enables & (1 << rk)) { | |
1596 | /* | |
1597 | * POST_CODE here indicates the current | |
1598 | * rank and channel being calibrated | |
1599 | */ | |
312cc39e | 1600 | mrc_post_code(0x06, 0x10 + ((ch << 4) | rk)); |
b829f12a BM |
1601 | |
1602 | #ifdef BACKUP_WDQS | |
1603 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1604 | set_wdqs(ch, rk, bl, ddr_wdqs[PLATFORM_ID]); | |
312cc39e | 1605 | set_wdq(ch, rk, bl, ddr_wdqs[PLATFORM_ID] - QRTR_CLK); |
b829f12a BM |
1606 | } |
1607 | #else | |
1608 | /* | |
1609 | * perform a single PRECHARGE_ALL command to | |
1610 | * make DRAM state machine go to IDLE state | |
1611 | */ | |
1612 | dram_init_command(DCMD_PREA(rk)); | |
1613 | ||
1614 | /* | |
1615 | * enable Write Levelling Mode | |
1616 | * (EMRS1 w/ Write Levelling Mode Enable) | |
1617 | */ | |
312cc39e | 1618 | dram_init_command(DCMD_MRS1(rk, 0x82)); |
b829f12a BM |
1619 | |
1620 | /* | |
1621 | * set ODT DRAM Full Time Termination | |
1622 | * disable in MCU | |
1623 | */ | |
1624 | ||
1625 | dtr4 = msg_port_read(MEM_CTLR, DTR4); | |
1626 | dtr4_save = dtr4; | |
312cc39e | 1627 | dtr4 |= DTR4_ODTDIS; |
b829f12a BM |
1628 | msg_port_write(MEM_CTLR, DTR4, dtr4); |
1629 | ||
312cc39e | 1630 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) { |
b829f12a BM |
1631 | /* |
1632 | * Enable Sandy Bridge Mode (WDQ Tri-State) & | |
1633 | * Ensure 5 WDQS pulses during Write Leveling | |
1634 | */ | |
1635 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1636 | DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch, |
1637 | 0x10000154, | |
1638 | 0x100003fc); | |
b829f12a BM |
1639 | } |
1640 | ||
1641 | /* Write Leveling Mode enabled in IO */ | |
1642 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1643 | CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch, |
1644 | 1 << 16, 1 << 16); | |
b829f12a BM |
1645 | |
1646 | /* Initialize the starting delay to WCLK */ | |
1647 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1648 | /* | |
1649 | * CLK0 --> RK0 | |
1650 | * CLK1 --> RK1 | |
1651 | */ | |
1652 | delay[bl] = get_wclk(ch, rk); | |
1653 | ||
1654 | set_wdqs(ch, rk, bl, delay[bl]); | |
1655 | } | |
1656 | ||
1657 | /* now find the rising edge */ | |
1658 | find_rising_edge(mrc_params, delay, ch, rk, false); | |
1659 | ||
1660 | /* disable Write Levelling Mode */ | |
1661 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1662 | CCDDR3RESETCTL + DDRIOCCC_CH_OFFSET * ch, |
1663 | 0, 1 << 16); | |
b829f12a | 1664 | |
312cc39e | 1665 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) { |
b829f12a BM |
1666 | /* Disable Sandy Bridge Mode & Ensure 4 WDQS pulses during normal operation */ |
1667 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
1668 | DQCTL + DDRIODQ_BL_OFFSET * bl + DDRIODQ_CH_OFFSET * ch, |
1669 | 0x00000154, | |
1670 | 0x100003fc); | |
b829f12a BM |
1671 | } |
1672 | ||
1673 | /* restore original DTR4 */ | |
1674 | msg_port_write(MEM_CTLR, DTR4, dtr4_save); | |
1675 | ||
1676 | /* | |
1677 | * restore original value | |
1678 | * (Write Levelling Mode Disable) | |
1679 | */ | |
1680 | dram_init_command(DCMD_MRS1(rk, mrc_params->mrs1)); | |
1681 | ||
1682 | /* | |
1683 | * perform a single PRECHARGE_ALL command to | |
1684 | * make DRAM state machine go to IDLE state | |
1685 | */ | |
1686 | dram_init_command(DCMD_PREA(rk)); | |
1687 | ||
312cc39e | 1688 | mrc_post_code(0x06, 0x30 + ((ch << 4) | rk)); |
b829f12a BM |
1689 | |
1690 | /* | |
1691 | * COARSE WRITE LEVEL: | |
1692 | * check that we're on the correct clock edge | |
1693 | */ | |
1694 | ||
1695 | /* hte reconfiguration request */ | |
1696 | mrc_params->hte_setup = 1; | |
1697 | ||
1698 | /* start CRS_WR_LVL with WDQS = WDQS + 128 PI */ | |
1699 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1700 | delay[bl] = get_wdqs(ch, rk, bl) + FULL_CLK; | |
1701 | set_wdqs(ch, rk, bl, delay[bl]); | |
1702 | /* | |
1703 | * program WDQ timings based on WDQS | |
1704 | * (WDQ = WDQS - 32 PI) | |
1705 | */ | |
1706 | set_wdq(ch, rk, bl, (delay[bl] - QRTR_CLK)); | |
1707 | } | |
1708 | ||
1709 | /* get an address in the targeted channel/rank */ | |
1710 | address = get_addr(ch, rk); | |
1711 | do { | |
1712 | uint32_t coarse_result = 0x00; | |
1713 | uint32_t coarse_result_mask = byte_lane_mask(mrc_params); | |
1714 | /* assume pass */ | |
1715 | all_edges_found = true; | |
1716 | ||
1717 | mrc_params->hte_setup = 1; | |
1718 | coarse_result = check_rw_coarse(mrc_params, address); | |
1719 | ||
1720 | /* check for failures and margin the byte lane back 128 PI (1 CLK) */ | |
312cc39e | 1721 | for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) { |
b829f12a BM |
1722 | if (coarse_result & (coarse_result_mask << bl)) { |
1723 | all_edges_found = false; | |
1724 | delay[bl] -= FULL_CLK; | |
1725 | set_wdqs(ch, rk, bl, delay[bl]); | |
1726 | /* program WDQ timings based on WDQS (WDQ = WDQS - 32 PI) */ | |
312cc39e | 1727 | set_wdq(ch, rk, bl, delay[bl] - QRTR_CLK); |
b829f12a BM |
1728 | } |
1729 | } | |
1730 | } while (!all_edges_found); | |
1731 | ||
1732 | #ifdef R2R_SHARING | |
1733 | /* increment "num_ranks_enabled" */ | |
1734 | num_ranks_enabled++; | |
1735 | /* accumulate "final_delay[][]" values from "delay[]" values for rolling average */ | |
312cc39e | 1736 | for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) { |
b829f12a | 1737 | final_delay[ch][bl] += delay[bl]; |
312cc39e | 1738 | set_wdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled); |
b829f12a | 1739 | /* program WDQ timings based on WDQS (WDQ = WDQS - 32 PI) */ |
312cc39e | 1740 | set_wdq(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled - QRTR_CLK); |
b829f12a BM |
1741 | } |
1742 | #endif | |
1743 | #endif | |
1744 | } | |
1745 | } | |
1746 | } | |
1747 | } | |
1748 | ||
1749 | LEAVEFN(); | |
1750 | } | |
1751 | ||
1752 | void prog_page_ctrl(struct mrc_params *mrc_params) | |
1753 | { | |
1754 | u32 dpmc0; | |
1755 | ||
1756 | ENTERFN(); | |
1757 | ||
1758 | dpmc0 = msg_port_read(MEM_CTLR, DPMC0); | |
312cc39e | 1759 | dpmc0 &= ~DPMC0_PCLSTO_MASK; |
b829f12a | 1760 | dpmc0 |= (4 << 16); |
312cc39e | 1761 | dpmc0 |= DPMC0_PREAPWDEN; |
b829f12a BM |
1762 | msg_port_write(MEM_CTLR, DPMC0, dpmc0); |
1763 | } | |
1764 | ||
1765 | /* | |
1766 | * This function will perform the READ TRAINING Algorithm on all | |
1767 | * channels/ranks/byte_lanes simultaneously to minimize execution time. | |
1768 | * | |
1769 | * The idea here is to train the VREF and RDQS (and eventually RDQ) values | |
1770 | * to achieve maximum READ margins. The algorithm will first determine the | |
1771 | * X coordinate (RDQS setting). This is done by collapsing the VREF eye | |
1772 | * until we find a minimum required RDQS eye for VREF_MIN and VREF_MAX. | |
1773 | * Then we take the averages of the RDQS eye at VREF_MIN and VREF_MAX, | |
1774 | * then average those; this will be the final X coordinate. The algorithm | |
1775 | * will then determine the Y coordinate (VREF setting). This is done by | |
1776 | * collapsing the RDQS eye until we find a minimum required VREF eye for | |
1777 | * RDQS_MIN and RDQS_MAX. Then we take the averages of the VREF eye at | |
1778 | * RDQS_MIN and RDQS_MAX, then average those; this will be the final Y | |
1779 | * coordinate. | |
1780 | * | |
1781 | * NOTE: this algorithm assumes the eye curves have a one-to-one relationship, | |
1782 | * meaning for each X the curve has only one Y and vice-a-versa. | |
1783 | */ | |
1784 | void rd_train(struct mrc_params *mrc_params) | |
1785 | { | |
1786 | uint8_t ch; /* channel counter */ | |
1787 | uint8_t rk; /* rank counter */ | |
1788 | uint8_t bl; /* byte lane counter */ | |
1789 | uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1; | |
1790 | #ifdef BACKUP_RDQS | |
1791 | #else | |
1792 | uint8_t side_x; /* tracks LEFT/RIGHT approach vectors */ | |
1793 | uint8_t side_y; /* tracks BOTTOM/TOP approach vectors */ | |
1794 | /* X coordinate data (passing RDQS values) for approach vectors */ | |
1795 | uint8_t x_coordinate[2][2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES]; | |
1796 | /* Y coordinate data (passing VREF values) for approach vectors */ | |
1797 | uint8_t y_coordinate[2][2][NUM_CHANNELS][NUM_BYTE_LANES]; | |
1798 | /* centered X (RDQS) */ | |
1799 | uint8_t x_center[NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES]; | |
1800 | /* centered Y (VREF) */ | |
1801 | uint8_t y_center[NUM_CHANNELS][NUM_BYTE_LANES]; | |
1802 | uint32_t address; /* target address for check_bls_ex() */ | |
1803 | uint32_t result; /* result of check_bls_ex() */ | |
1804 | uint32_t bl_mask; /* byte lane mask for result checking */ | |
1805 | #ifdef R2R_SHARING | |
1806 | /* used to find placement for rank2rank sharing configs */ | |
1807 | uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; | |
1808 | /* used to find placement for rank2rank sharing configs */ | |
1809 | uint32_t num_ranks_enabled = 0; | |
1810 | #endif | |
1811 | #endif | |
1812 | ||
1813 | /* rd_train starts */ | |
1814 | mrc_post_code(0x07, 0x00); | |
1815 | ||
1816 | ENTERFN(); | |
1817 | ||
1818 | #ifdef BACKUP_RDQS | |
1819 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1820 | if (mrc_params->channel_enables & (1 << ch)) { | |
1821 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1822 | if (mrc_params->rank_enables & (1 << rk)) { | |
1823 | for (bl = 0; | |
312cc39e | 1824 | bl < NUM_BYTE_LANES / bl_divisor; |
b829f12a BM |
1825 | bl++) { |
1826 | set_rdqs(ch, rk, bl, ddr_rdqs[PLATFORM_ID]); | |
1827 | } | |
1828 | } | |
1829 | } | |
1830 | } | |
1831 | } | |
1832 | #else | |
1833 | /* initialize x/y_coordinate arrays */ | |
1834 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1835 | if (mrc_params->channel_enables & (1 << ch)) { | |
1836 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1837 | if (mrc_params->rank_enables & (1 << rk)) { | |
1838 | for (bl = 0; | |
312cc39e | 1839 | bl < NUM_BYTE_LANES / bl_divisor; |
b829f12a BM |
1840 | bl++) { |
1841 | /* x_coordinate */ | |
1842 | x_coordinate[L][B][ch][rk][bl] = RDQS_MIN; | |
1843 | x_coordinate[R][B][ch][rk][bl] = RDQS_MAX; | |
1844 | x_coordinate[L][T][ch][rk][bl] = RDQS_MIN; | |
1845 | x_coordinate[R][T][ch][rk][bl] = RDQS_MAX; | |
1846 | /* y_coordinate */ | |
1847 | y_coordinate[L][B][ch][bl] = VREF_MIN; | |
1848 | y_coordinate[R][B][ch][bl] = VREF_MIN; | |
1849 | y_coordinate[L][T][ch][bl] = VREF_MAX; | |
1850 | y_coordinate[R][T][ch][bl] = VREF_MAX; | |
1851 | } | |
1852 | } | |
1853 | } | |
1854 | } | |
1855 | } | |
1856 | ||
1857 | /* initialize other variables */ | |
1858 | bl_mask = byte_lane_mask(mrc_params); | |
1859 | address = get_addr(0, 0); | |
1860 | ||
1861 | #ifdef R2R_SHARING | |
1862 | /* need to set "final_delay[][]" elements to "0" */ | |
1863 | memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay)); | |
1864 | #endif | |
1865 | ||
1866 | /* look for passing coordinates */ | |
1867 | for (side_y = B; side_y <= T; side_y++) { | |
1868 | for (side_x = L; side_x <= R; side_x++) { | |
312cc39e | 1869 | mrc_post_code(0x07, 0x10 + side_y * 2 + side_x); |
b829f12a BM |
1870 | |
1871 | /* find passing values */ | |
1872 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1873 | if (mrc_params->channel_enables & (0x1 << ch)) { | |
1874 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1875 | if (mrc_params->rank_enables & | |
1876 | (0x1 << rk)) { | |
1877 | /* set x/y_coordinate search starting settings */ | |
1878 | for (bl = 0; | |
312cc39e | 1879 | bl < NUM_BYTE_LANES / bl_divisor; |
b829f12a BM |
1880 | bl++) { |
1881 | set_rdqs(ch, rk, bl, | |
1882 | x_coordinate[side_x][side_y][ch][rk][bl]); | |
1883 | set_vref(ch, bl, | |
1884 | y_coordinate[side_x][side_y][ch][bl]); | |
1885 | } | |
1886 | ||
1887 | /* get an address in the target channel/rank */ | |
1888 | address = get_addr(ch, rk); | |
1889 | ||
1890 | /* request HTE reconfiguration */ | |
1891 | mrc_params->hte_setup = 1; | |
1892 | ||
1893 | /* test the settings */ | |
1894 | do { | |
1895 | /* result[07:00] == failing byte lane (MAX 8) */ | |
1896 | result = check_bls_ex(mrc_params, address); | |
1897 | ||
1898 | /* check for failures */ | |
312cc39e | 1899 | if (result & 0xff) { |
b829f12a | 1900 | /* at least 1 byte lane failed */ |
312cc39e | 1901 | for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) { |
b829f12a BM |
1902 | if (result & |
1903 | (bl_mask << bl)) { | |
1904 | /* adjust the RDQS values accordingly */ | |
1905 | if (side_x == L) | |
1906 | x_coordinate[L][side_y][ch][rk][bl] += RDQS_STEP; | |
1907 | else | |
1908 | x_coordinate[R][side_y][ch][rk][bl] -= RDQS_STEP; | |
1909 | ||
1910 | /* check that we haven't closed the RDQS_EYE too much */ | |
1911 | if ((x_coordinate[L][side_y][ch][rk][bl] > (RDQS_MAX - MIN_RDQS_EYE)) || | |
1912 | (x_coordinate[R][side_y][ch][rk][bl] < (RDQS_MIN + MIN_RDQS_EYE)) || | |
1913 | (x_coordinate[L][side_y][ch][rk][bl] == | |
1914 | x_coordinate[R][side_y][ch][rk][bl])) { | |
1915 | /* | |
1916 | * not enough RDQS margin available at this VREF | |
1917 | * update VREF values accordingly | |
1918 | */ | |
1919 | if (side_y == B) | |
1920 | y_coordinate[side_x][B][ch][bl] += VREF_STEP; | |
1921 | else | |
1922 | y_coordinate[side_x][T][ch][bl] -= VREF_STEP; | |
1923 | ||
1924 | /* check that we haven't closed the VREF_EYE too much */ | |
1925 | if ((y_coordinate[side_x][B][ch][bl] > (VREF_MAX - MIN_VREF_EYE)) || | |
1926 | (y_coordinate[side_x][T][ch][bl] < (VREF_MIN + MIN_VREF_EYE)) || | |
1927 | (y_coordinate[side_x][B][ch][bl] == y_coordinate[side_x][T][ch][bl])) { | |
1928 | /* VREF_EYE collapsed below MIN_VREF_EYE */ | |
1929 | training_message(ch, rk, bl); | |
312cc39e | 1930 | mrc_post_code(0xEE, 0x70 + side_y * 2 + side_x); |
b829f12a BM |
1931 | } else { |
1932 | /* update the VREF setting */ | |
1933 | set_vref(ch, bl, y_coordinate[side_x][side_y][ch][bl]); | |
1934 | /* reset the X coordinate to begin the search at the new VREF */ | |
1935 | x_coordinate[side_x][side_y][ch][rk][bl] = | |
312cc39e | 1936 | (side_x == L) ? RDQS_MIN : RDQS_MAX; |
b829f12a BM |
1937 | } |
1938 | } | |
1939 | ||
1940 | /* update the RDQS setting */ | |
1941 | set_rdqs(ch, rk, bl, x_coordinate[side_x][side_y][ch][rk][bl]); | |
1942 | } | |
1943 | } | |
1944 | } | |
312cc39e | 1945 | } while (result & 0xff); |
b829f12a BM |
1946 | } |
1947 | } | |
1948 | } | |
1949 | } | |
1950 | } | |
1951 | } | |
1952 | ||
1953 | mrc_post_code(0x07, 0x20); | |
1954 | ||
1955 | /* find final RDQS (X coordinate) & final VREF (Y coordinate) */ | |
1956 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
1957 | if (mrc_params->channel_enables & (1 << ch)) { | |
1958 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
1959 | if (mrc_params->rank_enables & (1 << rk)) { | |
1960 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
1961 | uint32_t temp1; | |
1962 | uint32_t temp2; | |
1963 | ||
1964 | /* x_coordinate */ | |
1965 | DPF(D_INFO, | |
1966 | "RDQS T/B eye rank%d lane%d : %d-%d %d-%d\n", | |
1967 | rk, bl, | |
1968 | x_coordinate[L][T][ch][rk][bl], | |
1969 | x_coordinate[R][T][ch][rk][bl], | |
1970 | x_coordinate[L][B][ch][rk][bl], | |
1971 | x_coordinate[R][B][ch][rk][bl]); | |
1972 | ||
1973 | /* average the TOP side LEFT & RIGHT values */ | |
1974 | temp1 = (x_coordinate[R][T][ch][rk][bl] + x_coordinate[L][T][ch][rk][bl]) / 2; | |
1975 | /* average the BOTTOM side LEFT & RIGHT values */ | |
1976 | temp2 = (x_coordinate[R][B][ch][rk][bl] + x_coordinate[L][B][ch][rk][bl]) / 2; | |
1977 | /* average the above averages */ | |
1978 | x_center[ch][rk][bl] = (uint8_t) ((temp1 + temp2) / 2); | |
1979 | ||
1980 | /* y_coordinate */ | |
1981 | DPF(D_INFO, | |
1982 | "VREF R/L eye lane%d : %d-%d %d-%d\n", | |
1983 | bl, | |
1984 | y_coordinate[R][B][ch][bl], | |
1985 | y_coordinate[R][T][ch][bl], | |
1986 | y_coordinate[L][B][ch][bl], | |
1987 | y_coordinate[L][T][ch][bl]); | |
1988 | ||
1989 | /* average the RIGHT side TOP & BOTTOM values */ | |
1990 | temp1 = (y_coordinate[R][T][ch][bl] + y_coordinate[R][B][ch][bl]) / 2; | |
1991 | /* average the LEFT side TOP & BOTTOM values */ | |
1992 | temp2 = (y_coordinate[L][T][ch][bl] + y_coordinate[L][B][ch][bl]) / 2; | |
1993 | /* average the above averages */ | |
1994 | y_center[ch][bl] = (uint8_t) ((temp1 + temp2) / 2); | |
1995 | } | |
1996 | } | |
1997 | } | |
1998 | } | |
1999 | } | |
2000 | ||
2001 | #ifdef RX_EYE_CHECK | |
2002 | /* perform an eye check */ | |
2003 | for (side_y = B; side_y <= T; side_y++) { | |
2004 | for (side_x = L; side_x <= R; side_x++) { | |
312cc39e | 2005 | mrc_post_code(0x07, 0x30 + side_y * 2 + side_x); |
b829f12a BM |
2006 | |
2007 | /* update the settings for the eye check */ | |
2008 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2009 | if (mrc_params->channel_enables & (1 << ch)) { | |
2010 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2011 | if (mrc_params->rank_enables & (1 << rk)) { | |
312cc39e | 2012 | for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) { |
b829f12a | 2013 | if (side_x == L) |
312cc39e | 2014 | set_rdqs(ch, rk, bl, x_center[ch][rk][bl] - (MIN_RDQS_EYE / 2)); |
b829f12a | 2015 | else |
312cc39e | 2016 | set_rdqs(ch, rk, bl, x_center[ch][rk][bl] + (MIN_RDQS_EYE / 2)); |
b829f12a BM |
2017 | |
2018 | if (side_y == B) | |
312cc39e | 2019 | set_vref(ch, bl, y_center[ch][bl] - (MIN_VREF_EYE / 2)); |
b829f12a | 2020 | else |
312cc39e | 2021 | set_vref(ch, bl, y_center[ch][bl] + (MIN_VREF_EYE / 2)); |
b829f12a BM |
2022 | } |
2023 | } | |
2024 | } | |
2025 | } | |
2026 | } | |
2027 | ||
2028 | /* request HTE reconfiguration */ | |
2029 | mrc_params->hte_setup = 1; | |
2030 | ||
2031 | /* check the eye */ | |
312cc39e | 2032 | if (check_bls_ex(mrc_params, address) & 0xff) { |
b829f12a | 2033 | /* one or more byte lanes failed */ |
312cc39e | 2034 | mrc_post_code(0xee, 0x74 + side_x * 2 + side_y); |
b829f12a BM |
2035 | } |
2036 | } | |
2037 | } | |
2038 | #endif | |
2039 | ||
2040 | mrc_post_code(0x07, 0x40); | |
2041 | ||
2042 | /* set final placements */ | |
2043 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2044 | if (mrc_params->channel_enables & (1 << ch)) { | |
2045 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2046 | if (mrc_params->rank_enables & (1 << rk)) { | |
2047 | #ifdef R2R_SHARING | |
2048 | /* increment "num_ranks_enabled" */ | |
2049 | num_ranks_enabled++; | |
2050 | #endif | |
2051 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor); bl++) { | |
2052 | /* x_coordinate */ | |
2053 | #ifdef R2R_SHARING | |
2054 | final_delay[ch][bl] += x_center[ch][rk][bl]; | |
312cc39e | 2055 | set_rdqs(ch, rk, bl, final_delay[ch][bl] / num_ranks_enabled); |
b829f12a BM |
2056 | #else |
2057 | set_rdqs(ch, rk, bl, x_center[ch][rk][bl]); | |
2058 | #endif | |
2059 | /* y_coordinate */ | |
2060 | set_vref(ch, bl, y_center[ch][bl]); | |
2061 | } | |
2062 | } | |
2063 | } | |
2064 | } | |
2065 | } | |
2066 | #endif | |
2067 | ||
2068 | LEAVEFN(); | |
2069 | } | |
2070 | ||
2071 | /* | |
2072 | * This function will perform the WRITE TRAINING Algorithm on all | |
2073 | * channels/ranks/byte_lanes simultaneously to minimize execution time. | |
2074 | * | |
2075 | * The idea here is to train the WDQ timings to achieve maximum WRITE margins. | |
2076 | * The algorithm will start with WDQ at the current WDQ setting (tracks WDQS | |
2077 | * in WR_LVL) +/- 32 PIs (+/- 1/4 CLK) and collapse the eye until all data | |
2078 | * patterns pass. This is because WDQS will be aligned to WCLK by the | |
2079 | * Write Leveling algorithm and WDQ will only ever have a 1/2 CLK window | |
2080 | * of validity. | |
2081 | */ | |
2082 | void wr_train(struct mrc_params *mrc_params) | |
2083 | { | |
2084 | uint8_t ch; /* channel counter */ | |
2085 | uint8_t rk; /* rank counter */ | |
2086 | uint8_t bl; /* byte lane counter */ | |
2087 | uint8_t bl_divisor = (mrc_params->channel_width == X16) ? 2 : 1; | |
2088 | #ifdef BACKUP_WDQ | |
2089 | #else | |
2090 | uint8_t side; /* LEFT/RIGHT side indicator (0=L, 1=R) */ | |
2091 | uint32_t temp; /* temporary DWORD */ | |
2092 | /* 2 arrays, for L & R side passing delays */ | |
2093 | uint32_t delay[2][NUM_CHANNELS][NUM_RANKS][NUM_BYTE_LANES]; | |
2094 | uint32_t address; /* target address for check_bls_ex() */ | |
2095 | uint32_t result; /* result of check_bls_ex() */ | |
2096 | uint32_t bl_mask; /* byte lane mask for result checking */ | |
2097 | #ifdef R2R_SHARING | |
2098 | /* used to find placement for rank2rank sharing configs */ | |
2099 | uint32_t final_delay[NUM_CHANNELS][NUM_BYTE_LANES]; | |
2100 | /* used to find placement for rank2rank sharing configs */ | |
2101 | uint32_t num_ranks_enabled = 0; | |
2102 | #endif | |
2103 | #endif | |
2104 | ||
2105 | /* wr_train starts */ | |
2106 | mrc_post_code(0x08, 0x00); | |
2107 | ||
2108 | ENTERFN(); | |
2109 | ||
2110 | #ifdef BACKUP_WDQ | |
2111 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2112 | if (mrc_params->channel_enables & (1 << ch)) { | |
2113 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2114 | if (mrc_params->rank_enables & (1 << rk)) { | |
2115 | for (bl = 0; | |
312cc39e | 2116 | bl < NUM_BYTE_LANES / bl_divisor; |
b829f12a BM |
2117 | bl++) { |
2118 | set_wdq(ch, rk, bl, ddr_wdq[PLATFORM_ID]); | |
2119 | } | |
2120 | } | |
2121 | } | |
2122 | } | |
2123 | } | |
2124 | #else | |
2125 | /* initialize "delay" */ | |
2126 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2127 | if (mrc_params->channel_enables & (1 << ch)) { | |
2128 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2129 | if (mrc_params->rank_enables & (1 << rk)) { | |
2130 | for (bl = 0; | |
312cc39e | 2131 | bl < NUM_BYTE_LANES / bl_divisor; |
b829f12a BM |
2132 | bl++) { |
2133 | /* | |
2134 | * want to start with | |
2135 | * WDQ = (WDQS - QRTR_CLK) | |
2136 | * +/- QRTR_CLK | |
2137 | */ | |
2138 | temp = get_wdqs(ch, rk, bl) - QRTR_CLK; | |
2139 | delay[L][ch][rk][bl] = temp - QRTR_CLK; | |
2140 | delay[R][ch][rk][bl] = temp + QRTR_CLK; | |
2141 | } | |
2142 | } | |
2143 | } | |
2144 | } | |
2145 | } | |
2146 | ||
2147 | /* initialize other variables */ | |
2148 | bl_mask = byte_lane_mask(mrc_params); | |
2149 | address = get_addr(0, 0); | |
2150 | ||
2151 | #ifdef R2R_SHARING | |
2152 | /* need to set "final_delay[][]" elements to "0" */ | |
2153 | memset((void *)(final_delay), 0x00, (size_t)sizeof(final_delay)); | |
2154 | #endif | |
2155 | ||
2156 | /* | |
2157 | * start algorithm on the LEFT side and train each channel/bl | |
2158 | * until no failures are observed, then repeat for the RIGHT side. | |
2159 | */ | |
2160 | for (side = L; side <= R; side++) { | |
312cc39e | 2161 | mrc_post_code(0x08, 0x10 + side); |
b829f12a BM |
2162 | |
2163 | /* set starting values */ | |
2164 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2165 | if (mrc_params->channel_enables & (1 << ch)) { | |
2166 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2167 | if (mrc_params->rank_enables & | |
2168 | (1 << rk)) { | |
2169 | for (bl = 0; | |
312cc39e | 2170 | bl < NUM_BYTE_LANES / bl_divisor; |
b829f12a BM |
2171 | bl++) { |
2172 | set_wdq(ch, rk, bl, delay[side][ch][rk][bl]); | |
2173 | } | |
2174 | } | |
2175 | } | |
2176 | } | |
2177 | } | |
2178 | ||
2179 | /* find passing values */ | |
2180 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2181 | if (mrc_params->channel_enables & (1 << ch)) { | |
2182 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2183 | if (mrc_params->rank_enables & | |
2184 | (1 << rk)) { | |
2185 | /* get an address in the target channel/rank */ | |
2186 | address = get_addr(ch, rk); | |
2187 | ||
2188 | /* request HTE reconfiguration */ | |
2189 | mrc_params->hte_setup = 1; | |
2190 | ||
2191 | /* check the settings */ | |
2192 | do { | |
2193 | /* result[07:00] == failing byte lane (MAX 8) */ | |
2194 | result = check_bls_ex(mrc_params, address); | |
2195 | /* check for failures */ | |
312cc39e | 2196 | if (result & 0xff) { |
b829f12a | 2197 | /* at least 1 byte lane failed */ |
312cc39e | 2198 | for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) { |
b829f12a BM |
2199 | if (result & |
2200 | (bl_mask << bl)) { | |
2201 | if (side == L) | |
2202 | delay[L][ch][rk][bl] += WDQ_STEP; | |
2203 | else | |
2204 | delay[R][ch][rk][bl] -= WDQ_STEP; | |
2205 | ||
2206 | /* check for algorithm failure */ | |
2207 | if (delay[L][ch][rk][bl] != delay[R][ch][rk][bl]) { | |
2208 | /* | |
2209 | * margin available | |
2210 | * update delay setting | |
2211 | */ | |
2212 | set_wdq(ch, rk, bl, | |
2213 | delay[side][ch][rk][bl]); | |
2214 | } else { | |
2215 | /* | |
2216 | * no margin available | |
2217 | * notify the user and halt | |
2218 | */ | |
2219 | training_message(ch, rk, bl); | |
312cc39e | 2220 | mrc_post_code(0xee, 0x80 + side); |
b829f12a BM |
2221 | } |
2222 | } | |
2223 | } | |
2224 | } | |
2225 | /* stop when all byte lanes pass */ | |
312cc39e | 2226 | } while (result & 0xff); |
b829f12a BM |
2227 | } |
2228 | } | |
2229 | } | |
2230 | } | |
2231 | } | |
2232 | ||
2233 | /* program WDQ to the middle of passing window */ | |
2234 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2235 | if (mrc_params->channel_enables & (1 << ch)) { | |
2236 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2237 | if (mrc_params->rank_enables & (1 << rk)) { | |
2238 | #ifdef R2R_SHARING | |
2239 | /* increment "num_ranks_enabled" */ | |
2240 | num_ranks_enabled++; | |
2241 | #endif | |
312cc39e | 2242 | for (bl = 0; bl < NUM_BYTE_LANES / bl_divisor; bl++) { |
b829f12a BM |
2243 | DPF(D_INFO, |
2244 | "WDQ eye rank%d lane%d : %d-%d\n", | |
2245 | rk, bl, | |
2246 | delay[L][ch][rk][bl], | |
2247 | delay[R][ch][rk][bl]); | |
2248 | ||
2249 | temp = (delay[R][ch][rk][bl] + delay[L][ch][rk][bl]) / 2; | |
2250 | ||
2251 | #ifdef R2R_SHARING | |
2252 | final_delay[ch][bl] += temp; | |
2253 | set_wdq(ch, rk, bl, | |
312cc39e | 2254 | final_delay[ch][bl] / num_ranks_enabled); |
b829f12a BM |
2255 | #else |
2256 | set_wdq(ch, rk, bl, temp); | |
2257 | #endif | |
2258 | } | |
2259 | } | |
2260 | } | |
2261 | } | |
2262 | } | |
2263 | #endif | |
2264 | ||
2265 | LEAVEFN(); | |
2266 | } | |
2267 | ||
2268 | /* | |
2269 | * This function will store relevant timing data | |
2270 | * | |
2271 | * This data will be used on subsequent boots to speed up boot times | |
2272 | * and is required for Suspend To RAM capabilities. | |
2273 | */ | |
2274 | void store_timings(struct mrc_params *mrc_params) | |
2275 | { | |
2276 | uint8_t ch, rk, bl; | |
2277 | struct mrc_timings *mt = &mrc_params->timings; | |
2278 | ||
2279 | for (ch = 0; ch < NUM_CHANNELS; ch++) { | |
2280 | for (rk = 0; rk < NUM_RANKS; rk++) { | |
2281 | for (bl = 0; bl < NUM_BYTE_LANES; bl++) { | |
2282 | mt->rcvn[ch][rk][bl] = get_rcvn(ch, rk, bl); | |
2283 | mt->rdqs[ch][rk][bl] = get_rdqs(ch, rk, bl); | |
2284 | mt->wdqs[ch][rk][bl] = get_wdqs(ch, rk, bl); | |
2285 | mt->wdq[ch][rk][bl] = get_wdq(ch, rk, bl); | |
2286 | ||
2287 | if (rk == 0) | |
2288 | mt->vref[ch][bl] = get_vref(ch, bl); | |
2289 | } | |
2290 | ||
2291 | mt->wctl[ch][rk] = get_wctl(ch, rk); | |
2292 | } | |
2293 | ||
2294 | mt->wcmd[ch] = get_wcmd(ch); | |
2295 | } | |
2296 | ||
2297 | /* need to save for a case of changing frequency after warm reset */ | |
2298 | mt->ddr_speed = mrc_params->ddr_speed; | |
2299 | } | |
2300 | ||
2301 | /* | |
2302 | * The purpose of this function is to ensure the SEC comes out of reset | |
2303 | * and IA initiates the SEC enabling Memory Scrambling. | |
2304 | */ | |
2305 | void enable_scrambling(struct mrc_params *mrc_params) | |
2306 | { | |
2307 | uint32_t lfsr = 0; | |
2308 | uint8_t i; | |
2309 | ||
2310 | if (mrc_params->scrambling_enables == 0) | |
2311 | return; | |
2312 | ||
2313 | ENTERFN(); | |
2314 | ||
2315 | /* 32 bit seed is always stored in BIOS NVM */ | |
2316 | lfsr = mrc_params->timings.scrambler_seed; | |
2317 | ||
2318 | if (mrc_params->boot_mode == BM_COLD) { | |
2319 | /* | |
2320 | * factory value is 0 and in first boot, | |
2321 | * a clock based seed is loaded. | |
2322 | */ | |
2323 | if (lfsr == 0) { | |
2324 | /* | |
2325 | * get seed from system clock | |
2326 | * and make sure it is not all 1's | |
2327 | */ | |
312cc39e | 2328 | lfsr = rdtsc() & 0x0fffffff; |
b829f12a BM |
2329 | } else { |
2330 | /* | |
2331 | * Need to replace scrambler | |
2332 | * | |
2333 | * get next 32bit LFSR 16 times which is the last | |
2334 | * part of the previous scrambler vector | |
2335 | */ | |
2336 | for (i = 0; i < 16; i++) | |
2337 | lfsr32(&lfsr); | |
2338 | } | |
2339 | ||
2340 | /* save new seed */ | |
2341 | mrc_params->timings.scrambler_seed = lfsr; | |
2342 | } | |
2343 | ||
2344 | /* | |
2345 | * In warm boot or S3 exit, we have the previous seed. | |
2346 | * In cold boot, we have the last 32bit LFSR which is the new seed. | |
2347 | */ | |
2348 | lfsr32(&lfsr); /* shift to next value */ | |
312cc39e | 2349 | msg_port_write(MEM_CTLR, SCRMSEED, (lfsr & 0x0003ffff)); |
b829f12a BM |
2350 | |
2351 | for (i = 0; i < 2; i++) | |
312cc39e | 2352 | msg_port_write(MEM_CTLR, SCRMLO + i, (lfsr & 0xaaaaaaaa)); |
b829f12a BM |
2353 | |
2354 | LEAVEFN(); | |
2355 | } | |
2356 | ||
2357 | /* | |
2358 | * Configure MCU Power Management Control Register | |
2359 | * and Scheduler Control Register | |
2360 | */ | |
2361 | void prog_ddr_control(struct mrc_params *mrc_params) | |
2362 | { | |
2363 | u32 dsch; | |
2364 | u32 dpmc0; | |
2365 | ||
2366 | ENTERFN(); | |
2367 | ||
2368 | dsch = msg_port_read(MEM_CTLR, DSCH); | |
312cc39e | 2369 | dsch &= ~(DSCH_OOODIS | DSCH_OOOST3DIS | DSCH_NEWBYPDIS); |
b829f12a BM |
2370 | msg_port_write(MEM_CTLR, DSCH, dsch); |
2371 | ||
2372 | dpmc0 = msg_port_read(MEM_CTLR, DPMC0); | |
312cc39e | 2373 | dpmc0 &= ~DPMC0_DISPWRDN; |
b829f12a | 2374 | dpmc0 |= (mrc_params->power_down_disable << 25); |
312cc39e BM |
2375 | dpmc0 &= ~DPMC0_CLKGTDIS; |
2376 | dpmc0 &= ~DPMC0_PCLSTO_MASK; | |
b829f12a | 2377 | dpmc0 |= (4 << 16); |
312cc39e | 2378 | dpmc0 |= DPMC0_PREAPWDEN; |
b829f12a BM |
2379 | msg_port_write(MEM_CTLR, DPMC0, dpmc0); |
2380 | ||
2381 | /* CMDTRIST = 2h - CMD/ADDR are tristated when no valid command */ | |
312cc39e | 2382 | mrc_write_mask(MEM_CTLR, DPMC1, 0x20, 0x30); |
b829f12a BM |
2383 | |
2384 | LEAVEFN(); | |
2385 | } | |
2386 | ||
2387 | /* | |
2388 | * After training complete configure MCU Rank Population Register | |
2389 | * specifying: ranks enabled, device width, density, address mode | |
2390 | */ | |
2391 | void prog_dra_drb(struct mrc_params *mrc_params) | |
2392 | { | |
2393 | u32 drp; | |
2394 | u32 dco; | |
2395 | u8 density = mrc_params->params.density; | |
2396 | ||
2397 | ENTERFN(); | |
2398 | ||
2399 | dco = msg_port_read(MEM_CTLR, DCO); | |
312cc39e | 2400 | dco &= ~DCO_IC; |
b829f12a BM |
2401 | msg_port_write(MEM_CTLR, DCO, dco); |
2402 | ||
2403 | drp = 0; | |
2404 | if (mrc_params->rank_enables & 1) | |
312cc39e | 2405 | drp |= DRP_RKEN0; |
b829f12a | 2406 | if (mrc_params->rank_enables & 2) |
312cc39e | 2407 | drp |= DRP_RKEN1; |
b829f12a BM |
2408 | if (mrc_params->dram_width == X16) { |
2409 | drp |= (1 << 4); | |
2410 | drp |= (1 << 9); | |
2411 | } | |
2412 | ||
2413 | /* | |
2414 | * Density encoding in struct dram_params: 0=512Mb, 1=Gb, 2=2Gb, 3=4Gb | |
2415 | * has to be mapped RANKDENSx encoding (0=1Gb) | |
2416 | */ | |
2417 | if (density == 0) | |
2418 | density = 4; | |
2419 | ||
2420 | drp |= ((density - 1) << 6); | |
2421 | drp |= ((density - 1) << 11); | |
2422 | ||
2423 | /* Address mode can be overwritten if ECC enabled */ | |
2424 | drp |= (mrc_params->address_mode << 14); | |
2425 | ||
2426 | msg_port_write(MEM_CTLR, DRP, drp); | |
2427 | ||
312cc39e BM |
2428 | dco &= ~DCO_PMICTL; |
2429 | dco |= DCO_IC; | |
b829f12a BM |
2430 | msg_port_write(MEM_CTLR, DCO, dco); |
2431 | ||
2432 | LEAVEFN(); | |
2433 | } | |
2434 | ||
2435 | /* Send DRAM wake command */ | |
2436 | void perform_wake(struct mrc_params *mrc_params) | |
2437 | { | |
2438 | ENTERFN(); | |
2439 | ||
2440 | dram_wake_command(); | |
2441 | ||
2442 | LEAVEFN(); | |
2443 | } | |
2444 | ||
2445 | /* | |
2446 | * Configure refresh rate and short ZQ calibration interval | |
2447 | * Activate dynamic self refresh | |
2448 | */ | |
2449 | void change_refresh_period(struct mrc_params *mrc_params) | |
2450 | { | |
2451 | u32 drfc; | |
2452 | u32 dcal; | |
2453 | u32 dpmc0; | |
2454 | ||
2455 | ENTERFN(); | |
2456 | ||
2457 | drfc = msg_port_read(MEM_CTLR, DRFC); | |
312cc39e | 2458 | drfc &= ~DRFC_TREFI_MASK; |
b829f12a | 2459 | drfc |= (mrc_params->refresh_rate << 12); |
312cc39e | 2460 | drfc |= DRFC_REFDBTCLR; |
b829f12a BM |
2461 | msg_port_write(MEM_CTLR, DRFC, drfc); |
2462 | ||
2463 | dcal = msg_port_read(MEM_CTLR, DCAL); | |
312cc39e | 2464 | dcal &= ~DCAL_ZQCINT_MASK; |
b829f12a BM |
2465 | dcal |= (3 << 8); /* 63ms */ |
2466 | msg_port_write(MEM_CTLR, DCAL, dcal); | |
2467 | ||
2468 | dpmc0 = msg_port_read(MEM_CTLR, DPMC0); | |
312cc39e | 2469 | dpmc0 |= (DPMC0_DYNSREN | DPMC0_ENPHYCLKGATE); |
b829f12a BM |
2470 | msg_port_write(MEM_CTLR, DPMC0, dpmc0); |
2471 | ||
2472 | LEAVEFN(); | |
2473 | } | |
2474 | ||
2475 | /* | |
2476 | * Configure DDRPHY for Auto-Refresh, Periodic Compensations, | |
2477 | * Dynamic Diff-Amp, ZQSPERIOD, Auto-Precharge, CKE Power-Down | |
2478 | */ | |
2479 | void set_auto_refresh(struct mrc_params *mrc_params) | |
2480 | { | |
2481 | uint32_t channel; | |
2482 | uint32_t rank; | |
2483 | uint32_t bl; | |
2484 | uint32_t bl_divisor = 1; | |
2485 | uint32_t temp; | |
2486 | ||
2487 | ENTERFN(); | |
2488 | ||
2489 | /* | |
2490 | * Enable Auto-Refresh, Periodic Compensations, Dynamic Diff-Amp, | |
2491 | * ZQSPERIOD, Auto-Precharge, CKE Power-Down | |
2492 | */ | |
2493 | for (channel = 0; channel < NUM_CHANNELS; channel++) { | |
2494 | if (mrc_params->channel_enables & (1 << channel)) { | |
2495 | /* Enable Periodic RCOMPS */ | |
312cc39e | 2496 | mrc_alt_write_mask(DDRPHY, CMPCTRL, 2, 2); |
b829f12a BM |
2497 | |
2498 | /* Enable Dynamic DiffAmp & Set Read ODT Value */ | |
2499 | switch (mrc_params->rd_odt_value) { | |
2500 | case 0: | |
312cc39e | 2501 | temp = 0x3f; /* OFF */ |
b829f12a BM |
2502 | break; |
2503 | default: | |
2504 | temp = 0x00; /* Auto */ | |
2505 | break; | |
2506 | } | |
2507 | ||
312cc39e | 2508 | for (bl = 0; bl < (NUM_BYTE_LANES / bl_divisor) / 2; bl++) { |
b829f12a BM |
2509 | /* Override: DIFFAMP, ODT */ |
2510 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
2511 | B0OVRCTL + bl * DDRIODQ_BL_OFFSET + |
2512 | channel * DDRIODQ_CH_OFFSET, | |
2513 | temp << 10, | |
2514 | 0x003ffc00); | |
b829f12a BM |
2515 | |
2516 | /* Override: DIFFAMP, ODT */ | |
2517 | mrc_alt_write_mask(DDRPHY, | |
312cc39e BM |
2518 | B1OVRCTL + bl * DDRIODQ_BL_OFFSET + |
2519 | channel * DDRIODQ_CH_OFFSET, | |
2520 | temp << 10, | |
2521 | 0x003ffc00); | |
b829f12a BM |
2522 | } |
2523 | ||
2524 | /* Issue ZQCS command */ | |
2525 | for (rank = 0; rank < NUM_RANKS; rank++) { | |
2526 | if (mrc_params->rank_enables & (1 << rank)) | |
2527 | dram_init_command(DCMD_ZQCS(rank)); | |
2528 | } | |
2529 | } | |
2530 | } | |
2531 | ||
2532 | clear_pointers(); | |
2533 | ||
2534 | LEAVEFN(); | |
2535 | } | |
2536 | ||
2537 | /* | |
2538 | * Depending on configuration enables ECC support | |
2539 | * | |
2540 | * Available memory size is decreased, and updated with 0s | |
2541 | * in order to clear error status. Address mode 2 forced. | |
2542 | */ | |
2543 | void ecc_enable(struct mrc_params *mrc_params) | |
2544 | { | |
2545 | u32 drp; | |
2546 | u32 dsch; | |
2547 | u32 ecc_ctrl; | |
2548 | ||
2549 | if (mrc_params->ecc_enables == 0) | |
2550 | return; | |
2551 | ||
2552 | ENTERFN(); | |
2553 | ||
2554 | /* Configuration required in ECC mode */ | |
2555 | drp = msg_port_read(MEM_CTLR, DRP); | |
312cc39e BM |
2556 | drp &= ~DRP_ADDRMAP_MASK; |
2557 | drp |= DRP_ADDRMAP_MAP1; | |
2558 | drp |= DRP_PRI64BSPLITEN; | |
b829f12a BM |
2559 | msg_port_write(MEM_CTLR, DRP, drp); |
2560 | ||
2561 | /* Disable new request bypass */ | |
2562 | dsch = msg_port_read(MEM_CTLR, DSCH); | |
312cc39e | 2563 | dsch |= DSCH_NEWBYPDIS; |
b829f12a BM |
2564 | msg_port_write(MEM_CTLR, DSCH, dsch); |
2565 | ||
2566 | /* Enable ECC */ | |
312cc39e | 2567 | ecc_ctrl = (DECCCTRL_SBEEN | DECCCTRL_DBEEN | DECCCTRL_ENCBGEN); |
b829f12a BM |
2568 | msg_port_write(MEM_CTLR, DECCCTRL, ecc_ctrl); |
2569 | ||
2570 | /* Assume 8 bank memory, one bank is gone for ECC */ | |
2571 | mrc_params->mem_size -= mrc_params->mem_size / 8; | |
2572 | ||
2573 | /* For S3 resume memory content has to be preserved */ | |
2574 | if (mrc_params->boot_mode != BM_S3) { | |
2575 | select_hte(); | |
2576 | hte_mem_init(mrc_params, MRC_MEM_INIT); | |
2577 | select_mem_mgr(); | |
2578 | } | |
2579 | ||
2580 | LEAVEFN(); | |
2581 | } | |
2582 | ||
2583 | /* | |
2584 | * Execute memory test | |
2585 | * if error detected it is indicated in mrc_params->status | |
2586 | */ | |
2587 | void memory_test(struct mrc_params *mrc_params) | |
2588 | { | |
2589 | uint32_t result = 0; | |
2590 | ||
2591 | ENTERFN(); | |
2592 | ||
2593 | select_hte(); | |
2594 | result = hte_mem_init(mrc_params, MRC_MEM_TEST); | |
2595 | select_mem_mgr(); | |
2596 | ||
2597 | DPF(D_INFO, "Memory test result %x\n", result); | |
2598 | mrc_params->status = ((result == 0) ? MRC_SUCCESS : MRC_E_MEMTEST); | |
2599 | LEAVEFN(); | |
2600 | } | |
2601 | ||
2602 | /* Lock MCU registers at the end of initialization sequence */ | |
2603 | void lock_registers(struct mrc_params *mrc_params) | |
2604 | { | |
2605 | u32 dco; | |
2606 | ||
2607 | ENTERFN(); | |
2608 | ||
2609 | dco = msg_port_read(MEM_CTLR, DCO); | |
312cc39e BM |
2610 | dco &= ~(DCO_PMICTL | DCO_PMIDIS); |
2611 | dco |= (DCO_DRPLOCK | DCO_CPGCLOCK); | |
b829f12a BM |
2612 | msg_port_write(MEM_CTLR, DCO, dco); |
2613 | ||
2614 | LEAVEFN(); | |
2615 | } |