]> git.ipfire.org Git - people/ms/u-boot.git/blame - drivers/mtd/nand/lpc32xx_nand_slc.c
mtd: nand: Add page argument to write_page() etc.
[people/ms/u-boot.git] / drivers / mtd / nand / lpc32xx_nand_slc.c
CommitLineData
dcfd37e5
VZ
1/*
2 * LPC32xx SLC NAND flash controller driver
3 *
4 * (C) Copyright 2015 Vladimir Zapolskiy <vz@mleia.com>
5 *
5f63bf3e
SL
6 * Hardware ECC support original source code
7 * Copyright (C) 2008 by NXP Semiconductors
8 * Author: Kevin Wells
9 *
10 * Copyright (c) 2015 Tyco Fire Protection Products.
11 *
dcfd37e5
VZ
12 * SPDX-License-Identifier: GPL-2.0+
13 */
14
15#include <common.h>
16#include <nand.h>
5f63bf3e 17#include <linux/mtd/nand_ecc.h>
dcfd37e5
VZ
18#include <asm/errno.h>
19#include <asm/io.h>
5f63bf3e 20#include <asm/arch/config.h>
dcfd37e5
VZ
21#include <asm/arch/clk.h>
22#include <asm/arch/sys_proto.h>
5f63bf3e
SL
23#include <asm/arch/dma.h>
24#include <asm/arch/cpu.h>
25
26#if defined(CONFIG_DMA_LPC32XX) && defined(CONFIG_SPL_BUILD)
27#warning "DMA support in SPL image is not tested"
28#endif
dcfd37e5
VZ
29
30struct lpc32xx_nand_slc_regs {
31 u32 data;
32 u32 addr;
33 u32 cmd;
34 u32 stop;
35 u32 ctrl;
36 u32 cfg;
37 u32 stat;
38 u32 int_stat;
39 u32 ien;
40 u32 isr;
41 u32 icr;
42 u32 tac;
43 u32 tc;
44 u32 ecc;
45 u32 dma_data;
46};
47
48/* CFG register */
49#define CFG_CE_LOW (1 << 5)
5f63bf3e
SL
50#define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
51#define CFG_ECC_EN (1 << 3) /* ECC enable bit */
52#define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
53#define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
dcfd37e5
VZ
54
55/* CTRL register */
56#define CTRL_SW_RESET (1 << 2)
5f63bf3e
SL
57#define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
58#define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
dcfd37e5
VZ
59
60/* STAT register */
5f63bf3e 61#define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
dcfd37e5
VZ
62#define STAT_NAND_READY (1 << 0)
63
64/* INT_STAT register */
65#define INT_STAT_TC (1 << 1)
66#define INT_STAT_RDY (1 << 0)
67
68/* TAC register bits, be aware of overflows */
69#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
70#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
71#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
72#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
73#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
74#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
75#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
76#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
77
30cb3bf4
SL
78/* NAND ECC Layout for small page NAND devices
79 * Note: For large page devices, the default layouts are used. */
80static struct nand_ecclayout lpc32xx_nand_oob_16 = {
81 .eccbytes = 6,
82 .eccpos = {10, 11, 12, 13, 14, 15},
83 .oobfree = {
84 {.offset = 0,
85 . length = 4},
86 {.offset = 6,
87 . length = 4}
88 }
89};
90
5f63bf3e
SL
91#if defined(CONFIG_DMA_LPC32XX)
92#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
93
94/*
95 * DMA Descriptors
96 * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
97 * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
98 */
99static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
100static u32 ecc_buffer[8]; /* MAX ECC size */
101static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
102
103/*
104 * Helper macro for the DMA client (i.e. NAND SLC):
105 * - to write the next DMA linked list item address
106 * (see arch/include/asm/arch-lpc32xx/dma.h).
107 * - to assign the DMA data register to DMA source or destination address.
108 * - to assign the ECC register to DMA source or destination address.
109 */
110#define lpc32xx_dmac_next_lli(x) ((u32)x)
111#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
112#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
113#endif
114
dcfd37e5
VZ
115static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
116 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
117
118static void lpc32xx_nand_init(void)
119{
120 uint32_t hclk = get_hclk_clk_rate();
121
122 /* Reset SLC NAND controller */
123 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
124
125 /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
126 writel(0, &lpc32xx_nand_slc_regs->cfg);
127
128 /* Interrupts disabled and cleared */
129 writel(0, &lpc32xx_nand_slc_regs->ien);
130 writel(INT_STAT_TC | INT_STAT_RDY,
131 &lpc32xx_nand_slc_regs->icr);
132
133 /* Configure NAND flash timings */
134 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
135 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
136 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
137 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
138 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
139 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
140 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
141 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
142 &lpc32xx_nand_slc_regs->tac);
143}
144
145static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
146 int cmd, unsigned int ctrl)
147{
148 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
149
150 if (ctrl & NAND_NCE)
151 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
152 else
153 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
154
155 if (cmd == NAND_CMD_NONE)
156 return;
157
158 if (ctrl & NAND_CLE)
159 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
160 else if (ctrl & NAND_ALE)
161 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
162}
163
164static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
165{
166 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
167}
168
5f63bf3e
SL
169#if defined(CONFIG_DMA_LPC32XX)
170/*
171 * Prepares DMA descriptors for NAND RD/WR operations
172 * If the size is < 256 Bytes then it is assumed to be
173 * an OOB transfer
174 */
175static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
176 const u8 *buffer, int size,
177 int read)
178{
179 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
180 struct lpc32xx_dmac_ll *dmalist_cur;
181 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
182
183 /*
184 * CTRL descriptor entry for reading ECC
185 * Copy Multiple times to sync DMA with Flash Controller
186 */
187 ecc_ctrl = 0x5 |
188 DMAC_CHAN_SRC_BURST_1 |
189 DMAC_CHAN_DEST_BURST_1 |
190 DMAC_CHAN_SRC_WIDTH_32 |
191 DMAC_CHAN_DEST_WIDTH_32 |
192 DMAC_CHAN_DEST_AHB1;
193
194 /* CTRL descriptor entry for reading/writing Data */
195 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
196 DMAC_CHAN_SRC_BURST_4 |
197 DMAC_CHAN_DEST_BURST_4 |
198 DMAC_CHAN_SRC_WIDTH_32 |
199 DMAC_CHAN_DEST_WIDTH_32 |
200 DMAC_CHAN_DEST_AHB1;
201
202 /* CTRL descriptor entry for reading/writing Spare Area */
203 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
204 DMAC_CHAN_SRC_BURST_4 |
205 DMAC_CHAN_DEST_BURST_4 |
206 DMAC_CHAN_SRC_WIDTH_32 |
207 DMAC_CHAN_DEST_WIDTH_32 |
208 DMAC_CHAN_DEST_AHB1;
209
210 if (read) {
211 dmasrc = lpc32xx_dmac_set_dma_data();
212 dmadst = (u32)buffer;
213 ctrl |= DMAC_CHAN_DEST_AUTOINC;
214 } else {
215 dmadst = lpc32xx_dmac_set_dma_data();
216 dmasrc = (u32)buffer;
217 ctrl |= DMAC_CHAN_SRC_AUTOINC;
218 }
219
220 /*
221 * Write Operation Sequence for Small Block NAND
222 * ----------------------------------------------------------
223 * 1. X'fer 256 bytes of data from Memory to Flash.
224 * 2. Copy generated ECC data from Register to Spare Area
225 * 3. X'fer next 256 bytes of data from Memory to Flash.
226 * 4. Copy generated ECC data from Register to Spare Area.
227 * 5. X'fer 16 byets of Spare area from Memory to Flash.
228 * Read Operation Sequence for Small Block NAND
229 * ----------------------------------------------------------
230 * 1. X'fer 256 bytes of data from Flash to Memory.
231 * 2. Copy generated ECC data from Register to ECC calc Buffer.
232 * 3. X'fer next 256 bytes of data from Flash to Memory.
233 * 4. Copy generated ECC data from Register to ECC calc Buffer.
234 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
235 * Write Operation Sequence for Large Block NAND
236 * ----------------------------------------------------------
237 * 1. Steps(1-4) of Write Operations repeate for four times
238 * which generates 16 DMA descriptors to X'fer 2048 bytes of
239 * data & 32 bytes of ECC data.
240 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
241 * Read Operation Sequence for Large Block NAND
242 * ----------------------------------------------------------
243 * 1. Steps(1-4) of Read Operations repeate for four times
244 * which generates 16 DMA descriptors to X'fer 2048 bytes of
245 * data & 32 bytes of ECC data.
246 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
247 */
248
249 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
250 dmalist_cur = &dmalist[i * 2];
251 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
252
253 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
254 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
255 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
256 dmalist_cur->next_ctrl = ctrl;
257
258 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
259 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
260 dmalist_cur_ecc->next_lli =
261 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
262 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
263 }
264
265 if (i) { /* Data only transfer */
266 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
267 dmalist_cur_ecc->next_lli = 0;
268 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
269 return;
270 }
271
272 /* OOB only transfer */
273 if (read) {
274 dmasrc = lpc32xx_dmac_set_dma_data();
275 dmadst = (u32)buffer;
276 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
277 } else {
278 dmadst = lpc32xx_dmac_set_dma_data();
279 dmasrc = (u32)buffer;
280 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
281 }
282
283 /* Read/ Write Spare Area Data To/From Flash */
284 dmalist_cur = &dmalist[i * 2];
285 dmalist_cur->dma_src = dmasrc;
286 dmalist_cur->dma_dest = dmadst;
287 dmalist_cur->next_lli = 0;
288 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
289}
290
291static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
292 int len, int read)
293{
17cb4b8f 294 struct nand_chip *chip = mtd_to_nand(mtd);
5f63bf3e
SL
295 u32 config;
296 int ret;
297
298 /* DMA Channel Configuration */
299 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
300 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
301 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
302 DMAC_CHAN_ENABLE;
303
304 /* Prepare DMA descriptors */
305 lpc32xx_nand_dma_configure(chip, buf, len, read);
306
307 /* Setup SLC controller and start transfer */
308 if (read)
309 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
310 else /* NAND_ECC_WRITE */
311 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
312 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
313
314 /* Write length for new transfers */
315 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
316 readl(&lpc32xx_nand_slc_regs->tc))) {
317 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
318 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
319 }
320
321 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
322
323 /* Start DMA transfers */
324 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
325 if (unlikely(ret < 0))
326 BUG();
327
328
329 /* Wait for NAND to be ready */
330 while (!lpc32xx_nand_dev_ready(mtd))
331 ;
332
333 /* Wait till DMA transfer is DONE */
334 if (lpc32xx_dma_wait_status(dmachan))
335 pr_err("NAND DMA transfer error!\r\n");
336
337 /* Stop DMA & HW ECC */
338 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
339 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
340 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
341}
342
343static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
344{
345 int i;
346 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
347 i += CONFIG_SYS_NAND_ECCBYTES) {
348 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
349 ce = ~(ce << 2) & 0xFFFFFF;
350 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
351 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
352 spare[i] = (u8)(ce & 0xFF);
353 }
354 return 0;
355}
356
357static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
358 uint8_t *ecc_code)
359{
360 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
361}
362
363/*
364 * Enables and prepares SLC NAND controller
365 * for doing data transfers with H/W ECC enabled.
366 */
367static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
368{
369 /* Clear ECC */
370 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
371
372 /* Setup SLC controller for H/W ECC operations */
373 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
374}
375
376/*
377 * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
378 * mtd: MTD block structure
379 * dat: raw data read from the chip
380 * read_ecc: ECC from the chip
381 * calc_ecc: the ECC calculated from raw data
382 *
383 * Detect and correct a 1 bit error for 256 byte block
384 */
385int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
386 u_char *read_ecc, u_char *calc_ecc)
387{
388 unsigned int i;
389 int ret1, ret2 = 0;
390 u_char *r = read_ecc;
391 u_char *c = calc_ecc;
392 u16 data_offset = 0;
393
394 for (i = 0 ; i < ECCSTEPS ; i++) {
395 r += CONFIG_SYS_NAND_ECCBYTES;
396 c += CONFIG_SYS_NAND_ECCBYTES;
397 data_offset += CONFIG_SYS_NAND_ECCSIZE;
398
399 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
400 if (ret1 < 0)
401 return -EBADMSG;
402 else
403 ret2 += ret1;
404 }
405
406 return ret2;
407}
408#endif
409
410#if defined(CONFIG_DMA_LPC32XX)
411static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
412{
413 lpc32xx_nand_xfer(mtd, buf, len, 1);
414}
415#else
dcfd37e5
VZ
416static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
417{
418 while (len-- > 0)
419 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
420}
5f63bf3e 421#endif
dcfd37e5
VZ
422
423static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
424{
425 return readl(&lpc32xx_nand_slc_regs->data);
426}
427
5f63bf3e
SL
428#if defined(CONFIG_DMA_LPC32XX)
429static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
430 int len)
431{
432 lpc32xx_nand_xfer(mtd, buf, len, 0);
433}
434#else
dcfd37e5
VZ
435static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
436{
437 while (len-- > 0)
438 writel(*buf++, &lpc32xx_nand_slc_regs->data);
439}
5f63bf3e 440#endif
dcfd37e5
VZ
441
442static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
443{
444 writel(byte, &lpc32xx_nand_slc_regs->data);
445}
446
5f63bf3e
SL
447#if defined(CONFIG_DMA_LPC32XX)
448/* Reuse the logic from "nand_read_page_hwecc()" */
449static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
450 uint8_t *buf, int oob_required, int page)
451{
452 int i;
453 int stat;
454 uint8_t *p = buf;
455 uint8_t *ecc_calc = chip->buffers->ecccalc;
456 uint8_t *ecc_code = chip->buffers->ecccode;
457 uint32_t *eccpos = chip->ecc.layout->eccpos;
458 unsigned int max_bitflips = 0;
459
460 /*
461 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
462 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
463 * of a page size using DMA controller scatter/gather mode through
464 * linked list; the ECC read is done without any software intervention.
465 */
466
467 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
468 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
469 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
470 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
471
472 for (i = 0; i < chip->ecc.total; i++)
473 ecc_code[i] = chip->oob_poi[eccpos[i]];
474
475 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
476 if (stat < 0)
477 mtd->ecc_stats.failed++;
478 else {
479 mtd->ecc_stats.corrected += stat;
480 max_bitflips = max_t(unsigned int, max_bitflips, stat);
481 }
482
483 return max_bitflips;
484}
485
486/* Reuse the logic from "nand_write_page_hwecc()" */
487static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
488 struct nand_chip *chip,
81c77252
SW
489 const uint8_t *buf, int oob_required,
490 int page)
5f63bf3e
SL
491{
492 int i;
493 uint8_t *ecc_calc = chip->buffers->ecccalc;
494 const uint8_t *p = buf;
495 uint32_t *eccpos = chip->ecc.layout->eccpos;
496
497 /*
498 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
499 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
500 * of a page size using DMA controller scatter/gather mode through
501 * linked list; the ECC read is done without any software intervention.
502 */
503
504 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
505 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
506 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
507
508 for (i = 0; i < chip->ecc.total; i++)
509 chip->oob_poi[eccpos[i]] = ecc_calc[i];
510
511 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
512
513 return 0;
514}
515#endif
516
dcfd37e5
VZ
517/*
518 * LPC32xx has only one SLC NAND controller, don't utilize
519 * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
a187559e 520 * both in SPL NAND and U-Boot images.
dcfd37e5
VZ
521 */
522int board_nand_init(struct nand_chip *lpc32xx_chip)
523{
5f63bf3e
SL
524#if defined(CONFIG_DMA_LPC32XX)
525 int ret;
526
527 /* Acquire a channel for our use */
528 ret = lpc32xx_dma_get_channel();
529 if (unlikely(ret < 0)) {
530 pr_info("Unable to get free DMA channel for NAND transfers\n");
531 return -1;
532 }
533 dmachan = (unsigned int)ret;
534#endif
535
dcfd37e5
VZ
536 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
537 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
538
5f63bf3e
SL
539 /*
540 * The implementation of these functions is quite common, but
541 * they MUST be defined, because access to data register
542 * is strictly 32-bit aligned.
543 */
544 lpc32xx_chip->read_byte = lpc32xx_read_byte;
545 lpc32xx_chip->write_byte = lpc32xx_write_byte;
546
547#if defined(CONFIG_DMA_LPC32XX)
548 /* Hardware ECC calculation is supported when DMA driver is selected */
549 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
550
551 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
552 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
553
554 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
555 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
556 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
557 lpc32xx_chip->chip_delay = 2000;
558
559 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
560 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
561 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
562#else
dcfd37e5
VZ
563 /*
564 * Hardware ECC calculation is not supported by the driver,
565 * because it requires DMA support, see LPC32x0 User Manual,
566 * note after SLC_ECC register description (UM10326, p.198)
567 */
568 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
569
570 /*
571 * The implementation of these functions is quite common, but
572 * they MUST be defined, because access to data register
573 * is strictly 32-bit aligned.
574 */
575 lpc32xx_chip->read_buf = lpc32xx_read_buf;
dcfd37e5 576 lpc32xx_chip->write_buf = lpc32xx_write_buf;
5f63bf3e 577#endif
dcfd37e5
VZ
578
579 /*
30cb3bf4 580 * These values are predefined
dcfd37e5
VZ
581 * for both small and large page NAND flash devices.
582 */
5f63bf3e
SL
583 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
584 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
dcfd37e5
VZ
585 lpc32xx_chip->ecc.strength = 1;
586
30cb3bf4
SL
587 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
588 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
589
dcfd37e5
VZ
590#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
591 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
592#endif
593
594 /* Initialize NAND interface */
595 lpc32xx_nand_init();
596
597 return 0;
598}