blob: 99f6e15f4e071f704b01dc2c7ce5a5b62e1e9035 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +03002/*
3 * LPC32xx SLC NAND flash controller driver
4 *
5 * (C) Copyright 2015 Vladimir Zapolskiy <vz@mleia.com>
6 *
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -04007 * Hardware ECC support original source code
8 * Copyright (C) 2008 by NXP Semiconductors
9 * Author: Kevin Wells
10 *
11 * Copyright (c) 2015 Tyco Fire Protection Products.
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030012 */
13
14#include <common.h>
15#include <nand.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040016#include <linux/mtd/nand_ecc.h>
Masahiro Yamada56a931c2016-09-21 11:28:55 +090017#include <linux/errno.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030018#include <asm/io.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040019#include <asm/arch/config.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030020#include <asm/arch/clk.h>
21#include <asm/arch/sys_proto.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040022#include <asm/arch/dma.h>
23#include <asm/arch/cpu.h>
24
25#if defined(CONFIG_DMA_LPC32XX) && defined(CONFIG_SPL_BUILD)
26#warning "DMA support in SPL image is not tested"
27#endif
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030028
29struct lpc32xx_nand_slc_regs {
30 u32 data;
31 u32 addr;
32 u32 cmd;
33 u32 stop;
34 u32 ctrl;
35 u32 cfg;
36 u32 stat;
37 u32 int_stat;
38 u32 ien;
39 u32 isr;
40 u32 icr;
41 u32 tac;
42 u32 tc;
43 u32 ecc;
44 u32 dma_data;
45};
46
47/* CFG register */
48#define CFG_CE_LOW (1 << 5)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040049#define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
50#define CFG_ECC_EN (1 << 3) /* ECC enable bit */
51#define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
52#define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030053
54/* CTRL register */
55#define CTRL_SW_RESET (1 << 2)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040056#define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
57#define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030058
59/* STAT register */
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040060#define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030061#define STAT_NAND_READY (1 << 0)
62
63/* INT_STAT register */
64#define INT_STAT_TC (1 << 1)
65#define INT_STAT_RDY (1 << 0)
66
67/* TAC register bits, be aware of overflows */
68#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
69#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
70#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
71#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
72#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
73#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
74#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
75#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
76
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040077/* NAND ECC Layout for small page NAND devices
78 * Note: For large page devices, the default layouts are used. */
79static struct nand_ecclayout lpc32xx_nand_oob_16 = {
80 .eccbytes = 6,
81 .eccpos = {10, 11, 12, 13, 14, 15},
82 .oobfree = {
83 {.offset = 0,
84 . length = 4},
85 {.offset = 6,
86 . length = 4}
87 }
88};
89
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040090#if defined(CONFIG_DMA_LPC32XX)
91#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
92
93/*
94 * DMA Descriptors
95 * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
96 * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
97 */
98static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
99static u32 ecc_buffer[8]; /* MAX ECC size */
100static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
101
102/*
103 * Helper macro for the DMA client (i.e. NAND SLC):
104 * - to write the next DMA linked list item address
105 * (see arch/include/asm/arch-lpc32xx/dma.h).
106 * - to assign the DMA data register to DMA source or destination address.
107 * - to assign the ECC register to DMA source or destination address.
108 */
109#define lpc32xx_dmac_next_lli(x) ((u32)x)
110#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
111#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
112#endif
113
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300114static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
115 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
116
117static void lpc32xx_nand_init(void)
118{
119 uint32_t hclk = get_hclk_clk_rate();
120
121 /* Reset SLC NAND controller */
122 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
123
124 /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
125 writel(0, &lpc32xx_nand_slc_regs->cfg);
126
127 /* Interrupts disabled and cleared */
128 writel(0, &lpc32xx_nand_slc_regs->ien);
129 writel(INT_STAT_TC | INT_STAT_RDY,
130 &lpc32xx_nand_slc_regs->icr);
131
132 /* Configure NAND flash timings */
133 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
134 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
135 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
136 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
137 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
138 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
139 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
140 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
141 &lpc32xx_nand_slc_regs->tac);
142}
143
144static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
145 int cmd, unsigned int ctrl)
146{
147 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
148
149 if (ctrl & NAND_NCE)
150 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
151 else
152 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
153
154 if (cmd == NAND_CMD_NONE)
155 return;
156
157 if (ctrl & NAND_CLE)
158 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
159 else if (ctrl & NAND_ALE)
160 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
161}
162
163static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
164{
165 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
166}
167
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400168#if defined(CONFIG_DMA_LPC32XX)
169/*
170 * Prepares DMA descriptors for NAND RD/WR operations
171 * If the size is < 256 Bytes then it is assumed to be
172 * an OOB transfer
173 */
174static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
175 const u8 *buffer, int size,
176 int read)
177{
178 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
179 struct lpc32xx_dmac_ll *dmalist_cur;
180 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
181
182 /*
183 * CTRL descriptor entry for reading ECC
184 * Copy Multiple times to sync DMA with Flash Controller
185 */
186 ecc_ctrl = 0x5 |
187 DMAC_CHAN_SRC_BURST_1 |
188 DMAC_CHAN_DEST_BURST_1 |
189 DMAC_CHAN_SRC_WIDTH_32 |
190 DMAC_CHAN_DEST_WIDTH_32 |
191 DMAC_CHAN_DEST_AHB1;
192
193 /* CTRL descriptor entry for reading/writing Data */
194 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
195 DMAC_CHAN_SRC_BURST_4 |
196 DMAC_CHAN_DEST_BURST_4 |
197 DMAC_CHAN_SRC_WIDTH_32 |
198 DMAC_CHAN_DEST_WIDTH_32 |
199 DMAC_CHAN_DEST_AHB1;
200
201 /* CTRL descriptor entry for reading/writing Spare Area */
202 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
203 DMAC_CHAN_SRC_BURST_4 |
204 DMAC_CHAN_DEST_BURST_4 |
205 DMAC_CHAN_SRC_WIDTH_32 |
206 DMAC_CHAN_DEST_WIDTH_32 |
207 DMAC_CHAN_DEST_AHB1;
208
209 if (read) {
210 dmasrc = lpc32xx_dmac_set_dma_data();
211 dmadst = (u32)buffer;
212 ctrl |= DMAC_CHAN_DEST_AUTOINC;
213 } else {
214 dmadst = lpc32xx_dmac_set_dma_data();
215 dmasrc = (u32)buffer;
216 ctrl |= DMAC_CHAN_SRC_AUTOINC;
217 }
218
219 /*
220 * Write Operation Sequence for Small Block NAND
221 * ----------------------------------------------------------
222 * 1. X'fer 256 bytes of data from Memory to Flash.
223 * 2. Copy generated ECC data from Register to Spare Area
224 * 3. X'fer next 256 bytes of data from Memory to Flash.
225 * 4. Copy generated ECC data from Register to Spare Area.
226 * 5. X'fer 16 byets of Spare area from Memory to Flash.
227 * Read Operation Sequence for Small Block NAND
228 * ----------------------------------------------------------
229 * 1. X'fer 256 bytes of data from Flash to Memory.
230 * 2. Copy generated ECC data from Register to ECC calc Buffer.
231 * 3. X'fer next 256 bytes of data from Flash to Memory.
232 * 4. Copy generated ECC data from Register to ECC calc Buffer.
233 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
234 * Write Operation Sequence for Large Block NAND
235 * ----------------------------------------------------------
236 * 1. Steps(1-4) of Write Operations repeate for four times
237 * which generates 16 DMA descriptors to X'fer 2048 bytes of
238 * data & 32 bytes of ECC data.
239 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
240 * Read Operation Sequence for Large Block NAND
241 * ----------------------------------------------------------
242 * 1. Steps(1-4) of Read Operations repeate for four times
243 * which generates 16 DMA descriptors to X'fer 2048 bytes of
244 * data & 32 bytes of ECC data.
245 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
246 */
247
248 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
249 dmalist_cur = &dmalist[i * 2];
250 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
251
252 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
253 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
254 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
255 dmalist_cur->next_ctrl = ctrl;
256
257 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
258 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
259 dmalist_cur_ecc->next_lli =
260 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
261 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
262 }
263
264 if (i) { /* Data only transfer */
265 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
266 dmalist_cur_ecc->next_lli = 0;
267 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
268 return;
269 }
270
271 /* OOB only transfer */
272 if (read) {
273 dmasrc = lpc32xx_dmac_set_dma_data();
274 dmadst = (u32)buffer;
275 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
276 } else {
277 dmadst = lpc32xx_dmac_set_dma_data();
278 dmasrc = (u32)buffer;
279 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
280 }
281
282 /* Read/ Write Spare Area Data To/From Flash */
283 dmalist_cur = &dmalist[i * 2];
284 dmalist_cur->dma_src = dmasrc;
285 dmalist_cur->dma_dest = dmadst;
286 dmalist_cur->next_lli = 0;
287 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
288}
289
290static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
291 int len, int read)
292{
Scott Wood17fed142016-05-30 13:57:56 -0500293 struct nand_chip *chip = mtd_to_nand(mtd);
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400294 u32 config;
295 int ret;
296
297 /* DMA Channel Configuration */
298 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
299 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
300 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
301 DMAC_CHAN_ENABLE;
302
303 /* Prepare DMA descriptors */
304 lpc32xx_nand_dma_configure(chip, buf, len, read);
305
306 /* Setup SLC controller and start transfer */
307 if (read)
308 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
309 else /* NAND_ECC_WRITE */
310 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
311 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
312
313 /* Write length for new transfers */
314 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
315 readl(&lpc32xx_nand_slc_regs->tc))) {
316 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
317 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
318 }
319
320 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
321
322 /* Start DMA transfers */
323 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
324 if (unlikely(ret < 0))
325 BUG();
326
327
328 /* Wait for NAND to be ready */
329 while (!lpc32xx_nand_dev_ready(mtd))
330 ;
331
332 /* Wait till DMA transfer is DONE */
333 if (lpc32xx_dma_wait_status(dmachan))
334 pr_err("NAND DMA transfer error!\r\n");
335
336 /* Stop DMA & HW ECC */
337 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
338 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
339 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
340}
341
342static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
343{
344 int i;
345 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
346 i += CONFIG_SYS_NAND_ECCBYTES) {
347 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
348 ce = ~(ce << 2) & 0xFFFFFF;
349 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
350 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
351 spare[i] = (u8)(ce & 0xFF);
352 }
353 return 0;
354}
355
356static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
357 uint8_t *ecc_code)
358{
359 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
360}
361
362/*
363 * Enables and prepares SLC NAND controller
364 * for doing data transfers with H/W ECC enabled.
365 */
366static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
367{
368 /* Clear ECC */
369 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
370
371 /* Setup SLC controller for H/W ECC operations */
372 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
373}
374
375/*
376 * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
377 * mtd: MTD block structure
378 * dat: raw data read from the chip
379 * read_ecc: ECC from the chip
380 * calc_ecc: the ECC calculated from raw data
381 *
382 * Detect and correct a 1 bit error for 256 byte block
383 */
384int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
385 u_char *read_ecc, u_char *calc_ecc)
386{
387 unsigned int i;
388 int ret1, ret2 = 0;
389 u_char *r = read_ecc;
390 u_char *c = calc_ecc;
391 u16 data_offset = 0;
392
393 for (i = 0 ; i < ECCSTEPS ; i++) {
394 r += CONFIG_SYS_NAND_ECCBYTES;
395 c += CONFIG_SYS_NAND_ECCBYTES;
396 data_offset += CONFIG_SYS_NAND_ECCSIZE;
397
398 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
399 if (ret1 < 0)
400 return -EBADMSG;
401 else
402 ret2 += ret1;
403 }
404
405 return ret2;
406}
407#endif
408
409#if defined(CONFIG_DMA_LPC32XX)
410static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
411{
412 lpc32xx_nand_xfer(mtd, buf, len, 1);
413}
414#else
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300415static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
416{
417 while (len-- > 0)
418 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
419}
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400420#endif
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300421
422static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
423{
424 return readl(&lpc32xx_nand_slc_regs->data);
425}
426
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400427#if defined(CONFIG_DMA_LPC32XX)
428static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
429 int len)
430{
431 lpc32xx_nand_xfer(mtd, buf, len, 0);
432}
433#else
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300434static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
435{
436 while (len-- > 0)
437 writel(*buf++, &lpc32xx_nand_slc_regs->data);
438}
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400439#endif
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300440
441static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
442{
443 writel(byte, &lpc32xx_nand_slc_regs->data);
444}
445
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400446#if defined(CONFIG_DMA_LPC32XX)
447/* Reuse the logic from "nand_read_page_hwecc()" */
448static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
449 uint8_t *buf, int oob_required, int page)
450{
451 int i;
452 int stat;
453 uint8_t *p = buf;
454 uint8_t *ecc_calc = chip->buffers->ecccalc;
455 uint8_t *ecc_code = chip->buffers->ecccode;
456 uint32_t *eccpos = chip->ecc.layout->eccpos;
457 unsigned int max_bitflips = 0;
458
459 /*
460 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
461 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
462 * of a page size using DMA controller scatter/gather mode through
463 * linked list; the ECC read is done without any software intervention.
464 */
465
466 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
467 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
468 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
469 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
470
471 for (i = 0; i < chip->ecc.total; i++)
472 ecc_code[i] = chip->oob_poi[eccpos[i]];
473
474 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
475 if (stat < 0)
476 mtd->ecc_stats.failed++;
477 else {
478 mtd->ecc_stats.corrected += stat;
479 max_bitflips = max_t(unsigned int, max_bitflips, stat);
480 }
481
482 return max_bitflips;
483}
484
485/* Reuse the logic from "nand_write_page_hwecc()" */
486static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
487 struct nand_chip *chip,
Scott Wood46e13102016-05-30 13:57:57 -0500488 const uint8_t *buf, int oob_required,
489 int page)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400490{
491 int i;
492 uint8_t *ecc_calc = chip->buffers->ecccalc;
493 const uint8_t *p = buf;
494 uint32_t *eccpos = chip->ecc.layout->eccpos;
495
496 /*
497 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
498 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
499 * of a page size using DMA controller scatter/gather mode through
500 * linked list; the ECC read is done without any software intervention.
501 */
502
503 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
504 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
505 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
506
507 for (i = 0; i < chip->ecc.total; i++)
508 chip->oob_poi[eccpos[i]] = ecc_calc[i];
509
510 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
511
512 return 0;
513}
514#endif
515
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300516/*
517 * LPC32xx has only one SLC NAND controller, don't utilize
518 * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
Bin Meng75574052016-02-05 19:30:11 -0800519 * both in SPL NAND and U-Boot images.
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300520 */
521int board_nand_init(struct nand_chip *lpc32xx_chip)
522{
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400523#if defined(CONFIG_DMA_LPC32XX)
524 int ret;
525
526 /* Acquire a channel for our use */
527 ret = lpc32xx_dma_get_channel();
528 if (unlikely(ret < 0)) {
529 pr_info("Unable to get free DMA channel for NAND transfers\n");
530 return -1;
531 }
532 dmachan = (unsigned int)ret;
533#endif
534
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300535 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
536 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
537
538 /*
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400539 * The implementation of these functions is quite common, but
540 * they MUST be defined, because access to data register
541 * is strictly 32-bit aligned.
542 */
543 lpc32xx_chip->read_byte = lpc32xx_read_byte;
544 lpc32xx_chip->write_byte = lpc32xx_write_byte;
545
546#if defined(CONFIG_DMA_LPC32XX)
547 /* Hardware ECC calculation is supported when DMA driver is selected */
548 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
549
550 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
551 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
552
553 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
554 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
555 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
556 lpc32xx_chip->chip_delay = 2000;
557
558 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
559 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
560 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
561#else
562 /*
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300563 * Hardware ECC calculation is not supported by the driver,
564 * because it requires DMA support, see LPC32x0 User Manual,
565 * note after SLC_ECC register description (UM10326, p.198)
566 */
567 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
568
569 /*
570 * The implementation of these functions is quite common, but
571 * they MUST be defined, because access to data register
572 * is strictly 32-bit aligned.
573 */
574 lpc32xx_chip->read_buf = lpc32xx_read_buf;
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300575 lpc32xx_chip->write_buf = lpc32xx_write_buf;
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400576#endif
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300577
578 /*
Sylvain Lemieux416e10d2015-08-13 15:40:21 -0400579 * These values are predefined
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300580 * for both small and large page NAND flash devices.
581 */
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400582 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
583 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300584 lpc32xx_chip->ecc.strength = 1;
585
Sylvain Lemieux416e10d2015-08-13 15:40:21 -0400586 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
587 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
588
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300589#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
590 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
591#endif
592
593 /* Initialize NAND interface */
594 lpc32xx_nand_init();
595
596 return 0;
597}