]> git.ipfire.org Git - thirdparty/u-boot.git/blame - drivers/mtd/nand/raw/lpc32xx_nand_slc.c
Revert "Merge patch series "arm: dts: am62-beagleplay: Fix Beagleplay Ethernet""
[thirdparty/u-boot.git] / drivers / mtd / nand / raw / lpc32xx_nand_slc.c
CommitLineData
83d290c5 1// SPDX-License-Identifier: GPL-2.0+
dcfd37e5
VZ
2/*
3 * LPC32xx SLC NAND flash controller driver
4 *
44cdfc0e
VZ
5 * (C) Copyright 2015-2018 Vladimir Zapolskiy <vz@mleia.com>
6 * Copyright (c) 2015 Tyco Fire Protection Products.
dcfd37e5 7 *
5f63bf3e
SL
8 * Hardware ECC support original source code
9 * Copyright (C) 2008 by NXP Semiconductors
10 * Author: Kevin Wells
dcfd37e5
VZ
11 */
12
d678a59d 13#include <common.h>
f7ae49fc 14#include <log.h>
dcfd37e5 15#include <nand.h>
eb41d8a1 16#include <linux/bug.h>
5f63bf3e 17#include <linux/mtd/nand_ecc.h>
1cefed1e 18#include <linux/mtd/rawnand.h>
1221ce45 19#include <linux/errno.h>
dcfd37e5 20#include <asm/io.h>
5f63bf3e 21#include <asm/arch/config.h>
dcfd37e5
VZ
22#include <asm/arch/clk.h>
23#include <asm/arch/sys_proto.h>
5f63bf3e
SL
24#include <asm/arch/dma.h>
25#include <asm/arch/cpu.h>
1e94b46f 26#include <linux/printk.h>
5f63bf3e 27
dcfd37e5
VZ
28struct lpc32xx_nand_slc_regs {
29 u32 data;
30 u32 addr;
31 u32 cmd;
32 u32 stop;
33 u32 ctrl;
34 u32 cfg;
35 u32 stat;
36 u32 int_stat;
37 u32 ien;
38 u32 isr;
39 u32 icr;
40 u32 tac;
41 u32 tc;
42 u32 ecc;
43 u32 dma_data;
44};
45
46/* CFG register */
47#define CFG_CE_LOW (1 << 5)
5f63bf3e
SL
48#define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
49#define CFG_ECC_EN (1 << 3) /* ECC enable bit */
50#define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
51#define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
dcfd37e5
VZ
52
53/* CTRL register */
54#define CTRL_SW_RESET (1 << 2)
5f63bf3e
SL
55#define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
56#define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
dcfd37e5
VZ
57
58/* STAT register */
5f63bf3e 59#define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
dcfd37e5
VZ
60#define STAT_NAND_READY (1 << 0)
61
62/* INT_STAT register */
63#define INT_STAT_TC (1 << 1)
64#define INT_STAT_RDY (1 << 0)
65
66/* TAC register bits, be aware of overflows */
67#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
68#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
69#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
70#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
71#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
72#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
73#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
74#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
75
30cb3bf4
SL
76/* NAND ECC Layout for small page NAND devices
77 * Note: For large page devices, the default layouts are used. */
78static struct nand_ecclayout lpc32xx_nand_oob_16 = {
79 .eccbytes = 6,
44cdfc0e 80 .eccpos = { 10, 11, 12, 13, 14, 15, },
30cb3bf4 81 .oobfree = {
44cdfc0e
VZ
82 { .offset = 0, .length = 4, },
83 { .offset = 6, .length = 4, },
84 }
30cb3bf4
SL
85};
86
44cdfc0e 87#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
4e590945 88#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CFG_SYS_NAND_ECCSIZE)
5f63bf3e
SL
89
90/*
91 * DMA Descriptors
92 * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
93 * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
94 */
95static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
96static u32 ecc_buffer[8]; /* MAX ECC size */
97static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
98
99/*
100 * Helper macro for the DMA client (i.e. NAND SLC):
101 * - to write the next DMA linked list item address
102 * (see arch/include/asm/arch-lpc32xx/dma.h).
103 * - to assign the DMA data register to DMA source or destination address.
104 * - to assign the ECC register to DMA source or destination address.
105 */
106#define lpc32xx_dmac_next_lli(x) ((u32)x)
107#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
108#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
109#endif
110
dcfd37e5
VZ
111static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
112 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
113
114static void lpc32xx_nand_init(void)
115{
116 uint32_t hclk = get_hclk_clk_rate();
117
118 /* Reset SLC NAND controller */
119 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
120
121 /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
122 writel(0, &lpc32xx_nand_slc_regs->cfg);
123
124 /* Interrupts disabled and cleared */
125 writel(0, &lpc32xx_nand_slc_regs->ien);
126 writel(INT_STAT_TC | INT_STAT_RDY,
127 &lpc32xx_nand_slc_regs->icr);
128
129 /* Configure NAND flash timings */
fa32dc7d 130 writel(TAC_W_RDY(CFG_LPC32XX_NAND_SLC_WDR_CLKS) |
bd79d3d6 131 TAC_W_WIDTH(hclk / CFG_LPC32XX_NAND_SLC_WWIDTH) |
b0c54827 132 TAC_W_HOLD(hclk / CFG_LPC32XX_NAND_SLC_WHOLD) |
308ed808 133 TAC_W_SETUP(hclk / CFG_LPC32XX_NAND_SLC_WSETUP) |
c102eb5c 134 TAC_R_RDY(CFG_LPC32XX_NAND_SLC_RDR_CLKS) |
fa0e72a3 135 TAC_R_WIDTH(hclk / CFG_LPC32XX_NAND_SLC_RWIDTH) |
bba52ab0 136 TAC_R_HOLD(hclk / CFG_LPC32XX_NAND_SLC_RHOLD) |
416ef8c7 137 TAC_R_SETUP(hclk / CFG_LPC32XX_NAND_SLC_RSETUP),
dcfd37e5
VZ
138 &lpc32xx_nand_slc_regs->tac);
139}
140
141static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
142 int cmd, unsigned int ctrl)
143{
144 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
145
146 if (ctrl & NAND_NCE)
147 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
148 else
149 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
150
151 if (cmd == NAND_CMD_NONE)
152 return;
153
154 if (ctrl & NAND_CLE)
155 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
156 else if (ctrl & NAND_ALE)
157 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
158}
159
160static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
161{
162 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
163}
164
44cdfc0e 165#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
5f63bf3e
SL
166/*
167 * Prepares DMA descriptors for NAND RD/WR operations
168 * If the size is < 256 Bytes then it is assumed to be
169 * an OOB transfer
170 */
171static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
172 const u8 *buffer, int size,
173 int read)
174{
175 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
176 struct lpc32xx_dmac_ll *dmalist_cur;
177 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
178
179 /*
180 * CTRL descriptor entry for reading ECC
181 * Copy Multiple times to sync DMA with Flash Controller
182 */
183 ecc_ctrl = 0x5 |
184 DMAC_CHAN_SRC_BURST_1 |
185 DMAC_CHAN_DEST_BURST_1 |
186 DMAC_CHAN_SRC_WIDTH_32 |
187 DMAC_CHAN_DEST_WIDTH_32 |
188 DMAC_CHAN_DEST_AHB1;
189
190 /* CTRL descriptor entry for reading/writing Data */
4e590945 191 ctrl = (CFG_SYS_NAND_ECCSIZE / 4) |
5f63bf3e
SL
192 DMAC_CHAN_SRC_BURST_4 |
193 DMAC_CHAN_DEST_BURST_4 |
194 DMAC_CHAN_SRC_WIDTH_32 |
195 DMAC_CHAN_DEST_WIDTH_32 |
196 DMAC_CHAN_DEST_AHB1;
197
198 /* CTRL descriptor entry for reading/writing Spare Area */
199 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
200 DMAC_CHAN_SRC_BURST_4 |
201 DMAC_CHAN_DEST_BURST_4 |
202 DMAC_CHAN_SRC_WIDTH_32 |
203 DMAC_CHAN_DEST_WIDTH_32 |
204 DMAC_CHAN_DEST_AHB1;
205
206 if (read) {
207 dmasrc = lpc32xx_dmac_set_dma_data();
208 dmadst = (u32)buffer;
209 ctrl |= DMAC_CHAN_DEST_AUTOINC;
210 } else {
211 dmadst = lpc32xx_dmac_set_dma_data();
212 dmasrc = (u32)buffer;
213 ctrl |= DMAC_CHAN_SRC_AUTOINC;
214 }
215
216 /*
217 * Write Operation Sequence for Small Block NAND
218 * ----------------------------------------------------------
219 * 1. X'fer 256 bytes of data from Memory to Flash.
220 * 2. Copy generated ECC data from Register to Spare Area
221 * 3. X'fer next 256 bytes of data from Memory to Flash.
222 * 4. Copy generated ECC data from Register to Spare Area.
223 * 5. X'fer 16 byets of Spare area from Memory to Flash.
224 * Read Operation Sequence for Small Block NAND
225 * ----------------------------------------------------------
226 * 1. X'fer 256 bytes of data from Flash to Memory.
227 * 2. Copy generated ECC data from Register to ECC calc Buffer.
228 * 3. X'fer next 256 bytes of data from Flash to Memory.
229 * 4. Copy generated ECC data from Register to ECC calc Buffer.
230 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
231 * Write Operation Sequence for Large Block NAND
232 * ----------------------------------------------------------
233 * 1. Steps(1-4) of Write Operations repeate for four times
234 * which generates 16 DMA descriptors to X'fer 2048 bytes of
235 * data & 32 bytes of ECC data.
236 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
237 * Read Operation Sequence for Large Block NAND
238 * ----------------------------------------------------------
239 * 1. Steps(1-4) of Read Operations repeate for four times
240 * which generates 16 DMA descriptors to X'fer 2048 bytes of
241 * data & 32 bytes of ECC data.
242 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
243 */
244
4e590945 245 for (i = 0; i < size/CFG_SYS_NAND_ECCSIZE; i++) {
5f63bf3e
SL
246 dmalist_cur = &dmalist[i * 2];
247 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
248
249 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
250 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
251 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
252 dmalist_cur->next_ctrl = ctrl;
253
254 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
255 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
256 dmalist_cur_ecc->next_lli =
257 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
258 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
259 }
260
261 if (i) { /* Data only transfer */
262 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
263 dmalist_cur_ecc->next_lli = 0;
264 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
265 return;
266 }
267
268 /* OOB only transfer */
269 if (read) {
270 dmasrc = lpc32xx_dmac_set_dma_data();
271 dmadst = (u32)buffer;
272 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
273 } else {
274 dmadst = lpc32xx_dmac_set_dma_data();
275 dmasrc = (u32)buffer;
276 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
277 }
278
279 /* Read/ Write Spare Area Data To/From Flash */
280 dmalist_cur = &dmalist[i * 2];
281 dmalist_cur->dma_src = dmasrc;
282 dmalist_cur->dma_dest = dmadst;
283 dmalist_cur->next_lli = 0;
284 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
285}
286
287static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
288 int len, int read)
289{
17cb4b8f 290 struct nand_chip *chip = mtd_to_nand(mtd);
5f63bf3e
SL
291 u32 config;
292 int ret;
293
294 /* DMA Channel Configuration */
295 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
296 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
297 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
298 DMAC_CHAN_ENABLE;
299
300 /* Prepare DMA descriptors */
301 lpc32xx_nand_dma_configure(chip, buf, len, read);
302
303 /* Setup SLC controller and start transfer */
304 if (read)
305 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
306 else /* NAND_ECC_WRITE */
307 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
308 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
309
310 /* Write length for new transfers */
311 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
312 readl(&lpc32xx_nand_slc_regs->tc))) {
313 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
314 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
315 }
316
317 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
318
319 /* Start DMA transfers */
320 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
321 if (unlikely(ret < 0))
322 BUG();
323
5f63bf3e
SL
324 /* Wait for NAND to be ready */
325 while (!lpc32xx_nand_dev_ready(mtd))
326 ;
327
328 /* Wait till DMA transfer is DONE */
329 if (lpc32xx_dma_wait_status(dmachan))
330 pr_err("NAND DMA transfer error!\r\n");
331
332 /* Stop DMA & HW ECC */
333 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
334 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
335 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
336}
337
338static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
339{
340 int i;
4e590945
TR
341 for (i = 0; i < (count * CFG_SYS_NAND_ECCBYTES);
342 i += CFG_SYS_NAND_ECCBYTES) {
343 u32 ce = ecc[i / CFG_SYS_NAND_ECCBYTES];
5f63bf3e
SL
344 ce = ~(ce << 2) & 0xFFFFFF;
345 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
346 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
347 spare[i] = (u8)(ce & 0xFF);
348 }
349 return 0;
350}
351
352static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
353 uint8_t *ecc_code)
354{
355 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
356}
357
358/*
359 * Enables and prepares SLC NAND controller
360 * for doing data transfers with H/W ECC enabled.
361 */
362static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
363{
364 /* Clear ECC */
365 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
366
367 /* Setup SLC controller for H/W ECC operations */
368 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
369}
370
371/*
372 * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
373 * mtd: MTD block structure
374 * dat: raw data read from the chip
375 * read_ecc: ECC from the chip
376 * calc_ecc: the ECC calculated from raw data
377 *
378 * Detect and correct a 1 bit error for 256 byte block
379 */
380int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
381 u_char *read_ecc, u_char *calc_ecc)
382{
383 unsigned int i;
384 int ret1, ret2 = 0;
385 u_char *r = read_ecc;
386 u_char *c = calc_ecc;
387 u16 data_offset = 0;
388
389 for (i = 0 ; i < ECCSTEPS ; i++) {
4e590945
TR
390 r += CFG_SYS_NAND_ECCBYTES;
391 c += CFG_SYS_NAND_ECCBYTES;
392 data_offset += CFG_SYS_NAND_ECCSIZE;
5f63bf3e
SL
393
394 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
395 if (ret1 < 0)
396 return -EBADMSG;
397 else
398 ret2 += ret1;
399 }
400
401 return ret2;
402}
5f63bf3e 403
5f63bf3e
SL
404static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
405{
406 lpc32xx_nand_xfer(mtd, buf, len, 1);
407}
dcfd37e5 408
5f63bf3e
SL
409static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
410 int len)
411{
412 lpc32xx_nand_xfer(mtd, buf, len, 0);
413}
dcfd37e5 414
5f63bf3e
SL
415/* Reuse the logic from "nand_read_page_hwecc()" */
416static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
417 uint8_t *buf, int oob_required, int page)
418{
419 int i;
420 int stat;
421 uint8_t *p = buf;
422 uint8_t *ecc_calc = chip->buffers->ecccalc;
423 uint8_t *ecc_code = chip->buffers->ecccode;
424 uint32_t *eccpos = chip->ecc.layout->eccpos;
425 unsigned int max_bitflips = 0;
426
427 /*
428 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
429 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
430 * of a page size using DMA controller scatter/gather mode through
431 * linked list; the ECC read is done without any software intervention.
432 */
433
434 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
435 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
436 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
437 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
438
439 for (i = 0; i < chip->ecc.total; i++)
440 ecc_code[i] = chip->oob_poi[eccpos[i]];
441
442 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
443 if (stat < 0)
444 mtd->ecc_stats.failed++;
445 else {
446 mtd->ecc_stats.corrected += stat;
447 max_bitflips = max_t(unsigned int, max_bitflips, stat);
448 }
449
450 return max_bitflips;
451}
452
453/* Reuse the logic from "nand_write_page_hwecc()" */
454static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
455 struct nand_chip *chip,
81c77252
SW
456 const uint8_t *buf, int oob_required,
457 int page)
5f63bf3e
SL
458{
459 int i;
460 uint8_t *ecc_calc = chip->buffers->ecccalc;
461 const uint8_t *p = buf;
462 uint32_t *eccpos = chip->ecc.layout->eccpos;
463
464 /*
465 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
466 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
467 * of a page size using DMA controller scatter/gather mode through
468 * linked list; the ECC read is done without any software intervention.
469 */
470
471 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
472 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
473 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
474
475 for (i = 0; i < chip->ecc.total; i++)
476 chip->oob_poi[eccpos[i]] = ecc_calc[i];
477
478 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
479
480 return 0;
481}
44cdfc0e
VZ
482#else
483static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
484{
485 while (len-- > 0)
486 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
487}
488
489static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
490{
491 while (len-- > 0)
492 writel(*buf++, &lpc32xx_nand_slc_regs->data);
493}
5f63bf3e
SL
494#endif
495
44cdfc0e
VZ
496static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
497{
498 return readl(&lpc32xx_nand_slc_regs->data);
499}
500
501static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
502{
503 writel(byte, &lpc32xx_nand_slc_regs->data);
504}
505
dcfd37e5
VZ
506/*
507 * LPC32xx has only one SLC NAND controller, don't utilize
508 * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
a187559e 509 * both in SPL NAND and U-Boot images.
dcfd37e5
VZ
510 */
511int board_nand_init(struct nand_chip *lpc32xx_chip)
512{
44cdfc0e 513#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
5f63bf3e
SL
514 int ret;
515
516 /* Acquire a channel for our use */
517 ret = lpc32xx_dma_get_channel();
518 if (unlikely(ret < 0)) {
519 pr_info("Unable to get free DMA channel for NAND transfers\n");
520 return -1;
521 }
522 dmachan = (unsigned int)ret;
523#endif
524
dcfd37e5
VZ
525 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
526 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
527
5f63bf3e
SL
528 /*
529 * The implementation of these functions is quite common, but
530 * they MUST be defined, because access to data register
531 * is strictly 32-bit aligned.
532 */
533 lpc32xx_chip->read_byte = lpc32xx_read_byte;
534 lpc32xx_chip->write_byte = lpc32xx_write_byte;
535
44cdfc0e 536#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
5f63bf3e
SL
537 /* Hardware ECC calculation is supported when DMA driver is selected */
538 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
539
540 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
541 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
542
543 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
544 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
545 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
546 lpc32xx_chip->chip_delay = 2000;
547
548 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
549 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
550 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
551#else
dcfd37e5
VZ
552 /*
553 * Hardware ECC calculation is not supported by the driver,
554 * because it requires DMA support, see LPC32x0 User Manual,
555 * note after SLC_ECC register description (UM10326, p.198)
556 */
557 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
558
559 /*
560 * The implementation of these functions is quite common, but
561 * they MUST be defined, because access to data register
562 * is strictly 32-bit aligned.
563 */
564 lpc32xx_chip->read_buf = lpc32xx_read_buf;
dcfd37e5 565 lpc32xx_chip->write_buf = lpc32xx_write_buf;
5f63bf3e 566#endif
dcfd37e5
VZ
567
568 /*
30cb3bf4 569 * These values are predefined
dcfd37e5
VZ
570 * for both small and large page NAND flash devices.
571 */
4e590945
TR
572 lpc32xx_chip->ecc.size = CFG_SYS_NAND_ECCSIZE;
573 lpc32xx_chip->ecc.bytes = CFG_SYS_NAND_ECCBYTES;
dcfd37e5
VZ
574 lpc32xx_chip->ecc.strength = 1;
575
30cb3bf4
SL
576 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
577 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
578
dcfd37e5
VZ
579#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
580 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
581#endif
582
583 /* Initialize NAND interface */
584 lpc32xx_nand_init();
585
586 return 0;
587}