blob: 8615b112a21ffeb746f939de5855ec5102b49f42 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +03002/*
3 * LPC32xx SLC NAND flash controller driver
4 *
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +03005 * (C) Copyright 2015-2018 Vladimir Zapolskiy <vz@mleia.com>
6 * Copyright (c) 2015 Tyco Fire Protection Products.
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +03007 *
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -04008 * Hardware ECC support original source code
9 * Copyright (C) 2008 by NXP Semiconductors
10 * Author: Kevin Wells
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030011 */
12
13#include <common.h>
14#include <nand.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040015#include <linux/mtd/nand_ecc.h>
Masahiro Yamada56a931c2016-09-21 11:28:55 +090016#include <linux/errno.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030017#include <asm/io.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040018#include <asm/arch/config.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030019#include <asm/arch/clk.h>
20#include <asm/arch/sys_proto.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040021#include <asm/arch/dma.h>
22#include <asm/arch/cpu.h>
23
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030024struct lpc32xx_nand_slc_regs {
25 u32 data;
26 u32 addr;
27 u32 cmd;
28 u32 stop;
29 u32 ctrl;
30 u32 cfg;
31 u32 stat;
32 u32 int_stat;
33 u32 ien;
34 u32 isr;
35 u32 icr;
36 u32 tac;
37 u32 tc;
38 u32 ecc;
39 u32 dma_data;
40};
41
42/* CFG register */
43#define CFG_CE_LOW (1 << 5)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040044#define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
45#define CFG_ECC_EN (1 << 3) /* ECC enable bit */
46#define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
47#define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030048
49/* CTRL register */
50#define CTRL_SW_RESET (1 << 2)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040051#define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
52#define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030053
54/* STAT register */
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040055#define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030056#define STAT_NAND_READY (1 << 0)
57
58/* INT_STAT register */
59#define INT_STAT_TC (1 << 1)
60#define INT_STAT_RDY (1 << 0)
61
62/* TAC register bits, be aware of overflows */
63#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
64#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
65#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
66#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
67#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
68#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
69#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
70#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
71
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040072/* NAND ECC Layout for small page NAND devices
73 * Note: For large page devices, the default layouts are used. */
74static struct nand_ecclayout lpc32xx_nand_oob_16 = {
75 .eccbytes = 6,
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +030076 .eccpos = { 10, 11, 12, 13, 14, 15, },
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040077 .oobfree = {
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +030078 { .offset = 0, .length = 4, },
79 { .offset = 6, .length = 4, },
80 }
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040081};
82
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +030083#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040084#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
85
86/*
87 * DMA Descriptors
88 * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
89 * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
90 */
91static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
92static u32 ecc_buffer[8]; /* MAX ECC size */
93static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
94
95/*
96 * Helper macro for the DMA client (i.e. NAND SLC):
97 * - to write the next DMA linked list item address
98 * (see arch/include/asm/arch-lpc32xx/dma.h).
99 * - to assign the DMA data register to DMA source or destination address.
100 * - to assign the ECC register to DMA source or destination address.
101 */
102#define lpc32xx_dmac_next_lli(x) ((u32)x)
103#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
104#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
105#endif
106
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300107static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
108 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
109
110static void lpc32xx_nand_init(void)
111{
112 uint32_t hclk = get_hclk_clk_rate();
113
114 /* Reset SLC NAND controller */
115 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
116
117 /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
118 writel(0, &lpc32xx_nand_slc_regs->cfg);
119
120 /* Interrupts disabled and cleared */
121 writel(0, &lpc32xx_nand_slc_regs->ien);
122 writel(INT_STAT_TC | INT_STAT_RDY,
123 &lpc32xx_nand_slc_regs->icr);
124
125 /* Configure NAND flash timings */
126 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
127 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
128 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
129 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
130 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
131 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
132 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
133 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
134 &lpc32xx_nand_slc_regs->tac);
135}
136
137static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
138 int cmd, unsigned int ctrl)
139{
140 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
141
142 if (ctrl & NAND_NCE)
143 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
144 else
145 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
146
147 if (cmd == NAND_CMD_NONE)
148 return;
149
150 if (ctrl & NAND_CLE)
151 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
152 else if (ctrl & NAND_ALE)
153 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
154}
155
156static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
157{
158 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
159}
160
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300161#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400162/*
163 * Prepares DMA descriptors for NAND RD/WR operations
164 * If the size is < 256 Bytes then it is assumed to be
165 * an OOB transfer
166 */
167static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
168 const u8 *buffer, int size,
169 int read)
170{
171 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
172 struct lpc32xx_dmac_ll *dmalist_cur;
173 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
174
175 /*
176 * CTRL descriptor entry for reading ECC
177 * Copy Multiple times to sync DMA with Flash Controller
178 */
179 ecc_ctrl = 0x5 |
180 DMAC_CHAN_SRC_BURST_1 |
181 DMAC_CHAN_DEST_BURST_1 |
182 DMAC_CHAN_SRC_WIDTH_32 |
183 DMAC_CHAN_DEST_WIDTH_32 |
184 DMAC_CHAN_DEST_AHB1;
185
186 /* CTRL descriptor entry for reading/writing Data */
187 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
188 DMAC_CHAN_SRC_BURST_4 |
189 DMAC_CHAN_DEST_BURST_4 |
190 DMAC_CHAN_SRC_WIDTH_32 |
191 DMAC_CHAN_DEST_WIDTH_32 |
192 DMAC_CHAN_DEST_AHB1;
193
194 /* CTRL descriptor entry for reading/writing Spare Area */
195 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
196 DMAC_CHAN_SRC_BURST_4 |
197 DMAC_CHAN_DEST_BURST_4 |
198 DMAC_CHAN_SRC_WIDTH_32 |
199 DMAC_CHAN_DEST_WIDTH_32 |
200 DMAC_CHAN_DEST_AHB1;
201
202 if (read) {
203 dmasrc = lpc32xx_dmac_set_dma_data();
204 dmadst = (u32)buffer;
205 ctrl |= DMAC_CHAN_DEST_AUTOINC;
206 } else {
207 dmadst = lpc32xx_dmac_set_dma_data();
208 dmasrc = (u32)buffer;
209 ctrl |= DMAC_CHAN_SRC_AUTOINC;
210 }
211
212 /*
213 * Write Operation Sequence for Small Block NAND
214 * ----------------------------------------------------------
215 * 1. X'fer 256 bytes of data from Memory to Flash.
216 * 2. Copy generated ECC data from Register to Spare Area
217 * 3. X'fer next 256 bytes of data from Memory to Flash.
218 * 4. Copy generated ECC data from Register to Spare Area.
219 * 5. X'fer 16 byets of Spare area from Memory to Flash.
220 * Read Operation Sequence for Small Block NAND
221 * ----------------------------------------------------------
222 * 1. X'fer 256 bytes of data from Flash to Memory.
223 * 2. Copy generated ECC data from Register to ECC calc Buffer.
224 * 3. X'fer next 256 bytes of data from Flash to Memory.
225 * 4. Copy generated ECC data from Register to ECC calc Buffer.
226 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
227 * Write Operation Sequence for Large Block NAND
228 * ----------------------------------------------------------
229 * 1. Steps(1-4) of Write Operations repeate for four times
230 * which generates 16 DMA descriptors to X'fer 2048 bytes of
231 * data & 32 bytes of ECC data.
232 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
233 * Read Operation Sequence for Large Block NAND
234 * ----------------------------------------------------------
235 * 1. Steps(1-4) of Read Operations repeate for four times
236 * which generates 16 DMA descriptors to X'fer 2048 bytes of
237 * data & 32 bytes of ECC data.
238 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
239 */
240
241 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
242 dmalist_cur = &dmalist[i * 2];
243 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
244
245 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
246 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
247 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
248 dmalist_cur->next_ctrl = ctrl;
249
250 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
251 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
252 dmalist_cur_ecc->next_lli =
253 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
254 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
255 }
256
257 if (i) { /* Data only transfer */
258 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
259 dmalist_cur_ecc->next_lli = 0;
260 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
261 return;
262 }
263
264 /* OOB only transfer */
265 if (read) {
266 dmasrc = lpc32xx_dmac_set_dma_data();
267 dmadst = (u32)buffer;
268 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
269 } else {
270 dmadst = lpc32xx_dmac_set_dma_data();
271 dmasrc = (u32)buffer;
272 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
273 }
274
275 /* Read/ Write Spare Area Data To/From Flash */
276 dmalist_cur = &dmalist[i * 2];
277 dmalist_cur->dma_src = dmasrc;
278 dmalist_cur->dma_dest = dmadst;
279 dmalist_cur->next_lli = 0;
280 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
281}
282
283static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
284 int len, int read)
285{
Scott Wood17fed142016-05-30 13:57:56 -0500286 struct nand_chip *chip = mtd_to_nand(mtd);
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400287 u32 config;
288 int ret;
289
290 /* DMA Channel Configuration */
291 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
292 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
293 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
294 DMAC_CHAN_ENABLE;
295
296 /* Prepare DMA descriptors */
297 lpc32xx_nand_dma_configure(chip, buf, len, read);
298
299 /* Setup SLC controller and start transfer */
300 if (read)
301 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
302 else /* NAND_ECC_WRITE */
303 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
304 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
305
306 /* Write length for new transfers */
307 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
308 readl(&lpc32xx_nand_slc_regs->tc))) {
309 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
310 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
311 }
312
313 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
314
315 /* Start DMA transfers */
316 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
317 if (unlikely(ret < 0))
318 BUG();
319
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400320 /* Wait for NAND to be ready */
321 while (!lpc32xx_nand_dev_ready(mtd))
322 ;
323
324 /* Wait till DMA transfer is DONE */
325 if (lpc32xx_dma_wait_status(dmachan))
326 pr_err("NAND DMA transfer error!\r\n");
327
328 /* Stop DMA & HW ECC */
329 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
330 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
331 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
332}
333
334static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
335{
336 int i;
337 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
338 i += CONFIG_SYS_NAND_ECCBYTES) {
339 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
340 ce = ~(ce << 2) & 0xFFFFFF;
341 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
342 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
343 spare[i] = (u8)(ce & 0xFF);
344 }
345 return 0;
346}
347
348static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
349 uint8_t *ecc_code)
350{
351 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
352}
353
354/*
355 * Enables and prepares SLC NAND controller
356 * for doing data transfers with H/W ECC enabled.
357 */
358static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
359{
360 /* Clear ECC */
361 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
362
363 /* Setup SLC controller for H/W ECC operations */
364 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
365}
366
367/*
368 * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
369 * mtd: MTD block structure
370 * dat: raw data read from the chip
371 * read_ecc: ECC from the chip
372 * calc_ecc: the ECC calculated from raw data
373 *
374 * Detect and correct a 1 bit error for 256 byte block
375 */
376int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
377 u_char *read_ecc, u_char *calc_ecc)
378{
379 unsigned int i;
380 int ret1, ret2 = 0;
381 u_char *r = read_ecc;
382 u_char *c = calc_ecc;
383 u16 data_offset = 0;
384
385 for (i = 0 ; i < ECCSTEPS ; i++) {
386 r += CONFIG_SYS_NAND_ECCBYTES;
387 c += CONFIG_SYS_NAND_ECCBYTES;
388 data_offset += CONFIG_SYS_NAND_ECCSIZE;
389
390 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
391 if (ret1 < 0)
392 return -EBADMSG;
393 else
394 ret2 += ret1;
395 }
396
397 return ret2;
398}
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400399
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400400static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
401{
402 lpc32xx_nand_xfer(mtd, buf, len, 1);
403}
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300404
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400405static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
406 int len)
407{
408 lpc32xx_nand_xfer(mtd, buf, len, 0);
409}
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300410
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400411/* Reuse the logic from "nand_read_page_hwecc()" */
412static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
413 uint8_t *buf, int oob_required, int page)
414{
415 int i;
416 int stat;
417 uint8_t *p = buf;
418 uint8_t *ecc_calc = chip->buffers->ecccalc;
419 uint8_t *ecc_code = chip->buffers->ecccode;
420 uint32_t *eccpos = chip->ecc.layout->eccpos;
421 unsigned int max_bitflips = 0;
422
423 /*
424 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
425 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
426 * of a page size using DMA controller scatter/gather mode through
427 * linked list; the ECC read is done without any software intervention.
428 */
429
430 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
431 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
432 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
433 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
434
435 for (i = 0; i < chip->ecc.total; i++)
436 ecc_code[i] = chip->oob_poi[eccpos[i]];
437
438 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
439 if (stat < 0)
440 mtd->ecc_stats.failed++;
441 else {
442 mtd->ecc_stats.corrected += stat;
443 max_bitflips = max_t(unsigned int, max_bitflips, stat);
444 }
445
446 return max_bitflips;
447}
448
449/* Reuse the logic from "nand_write_page_hwecc()" */
450static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
451 struct nand_chip *chip,
Scott Wood46e13102016-05-30 13:57:57 -0500452 const uint8_t *buf, int oob_required,
453 int page)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400454{
455 int i;
456 uint8_t *ecc_calc = chip->buffers->ecccalc;
457 const uint8_t *p = buf;
458 uint32_t *eccpos = chip->ecc.layout->eccpos;
459
460 /*
461 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
462 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
463 * of a page size using DMA controller scatter/gather mode through
464 * linked list; the ECC read is done without any software intervention.
465 */
466
467 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
468 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
469 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
470
471 for (i = 0; i < chip->ecc.total; i++)
472 chip->oob_poi[eccpos[i]] = ecc_calc[i];
473
474 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
475
476 return 0;
477}
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300478#else
479static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
480{
481 while (len-- > 0)
482 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
483}
484
485static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
486{
487 while (len-- > 0)
488 writel(*buf++, &lpc32xx_nand_slc_regs->data);
489}
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400490#endif
491
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300492static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
493{
494 return readl(&lpc32xx_nand_slc_regs->data);
495}
496
497static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
498{
499 writel(byte, &lpc32xx_nand_slc_regs->data);
500}
501
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300502/*
503 * LPC32xx has only one SLC NAND controller, don't utilize
504 * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
Bin Meng75574052016-02-05 19:30:11 -0800505 * both in SPL NAND and U-Boot images.
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300506 */
507int board_nand_init(struct nand_chip *lpc32xx_chip)
508{
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300509#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400510 int ret;
511
512 /* Acquire a channel for our use */
513 ret = lpc32xx_dma_get_channel();
514 if (unlikely(ret < 0)) {
515 pr_info("Unable to get free DMA channel for NAND transfers\n");
516 return -1;
517 }
518 dmachan = (unsigned int)ret;
519#endif
520
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300521 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
522 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
523
524 /*
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400525 * The implementation of these functions is quite common, but
526 * they MUST be defined, because access to data register
527 * is strictly 32-bit aligned.
528 */
529 lpc32xx_chip->read_byte = lpc32xx_read_byte;
530 lpc32xx_chip->write_byte = lpc32xx_write_byte;
531
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300532#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400533 /* Hardware ECC calculation is supported when DMA driver is selected */
534 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
535
536 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
537 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
538
539 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
540 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
541 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
542 lpc32xx_chip->chip_delay = 2000;
543
544 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
545 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
546 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
547#else
548 /*
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300549 * Hardware ECC calculation is not supported by the driver,
550 * because it requires DMA support, see LPC32x0 User Manual,
551 * note after SLC_ECC register description (UM10326, p.198)
552 */
553 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
554
555 /*
556 * The implementation of these functions is quite common, but
557 * they MUST be defined, because access to data register
558 * is strictly 32-bit aligned.
559 */
560 lpc32xx_chip->read_buf = lpc32xx_read_buf;
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300561 lpc32xx_chip->write_buf = lpc32xx_write_buf;
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400562#endif
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300563
564 /*
Sylvain Lemieux416e10d2015-08-13 15:40:21 -0400565 * These values are predefined
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300566 * for both small and large page NAND flash devices.
567 */
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400568 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
569 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300570 lpc32xx_chip->ecc.strength = 1;
571
Sylvain Lemieux416e10d2015-08-13 15:40:21 -0400572 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
573 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
574
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300575#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
576 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
577#endif
578
579 /* Initialize NAND interface */
580 lpc32xx_nand_init();
581
582 return 0;
583}