blob: 3d6cb1dc6350838bfe80931b2a83a344a01b8110 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +03002/*
3 * LPC32xx SLC NAND flash controller driver
4 *
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +03005 * (C) Copyright 2015-2018 Vladimir Zapolskiy <vz@mleia.com>
6 * Copyright (c) 2015 Tyco Fire Protection Products.
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +03007 *
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -04008 * Hardware ECC support original source code
9 * Copyright (C) 2008 by NXP Semiconductors
10 * Author: Kevin Wells
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030011 */
12
13#include <common.h>
Simon Glass0f2af882020-05-10 11:40:05 -060014#include <log.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030015#include <nand.h>
Simon Glassc06c1be2020-05-10 11:40:08 -060016#include <linux/bug.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040017#include <linux/mtd/nand_ecc.h>
Tom Rini3bde7e22021-09-22 14:50:35 -040018#include <linux/mtd/rawnand.h>
Masahiro Yamada56a931c2016-09-21 11:28:55 +090019#include <linux/errno.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030020#include <asm/io.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040021#include <asm/arch/config.h>
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030022#include <asm/arch/clk.h>
23#include <asm/arch/sys_proto.h>
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040024#include <asm/arch/dma.h>
25#include <asm/arch/cpu.h>
26
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030027struct lpc32xx_nand_slc_regs {
28 u32 data;
29 u32 addr;
30 u32 cmd;
31 u32 stop;
32 u32 ctrl;
33 u32 cfg;
34 u32 stat;
35 u32 int_stat;
36 u32 ien;
37 u32 isr;
38 u32 icr;
39 u32 tac;
40 u32 tc;
41 u32 ecc;
42 u32 dma_data;
43};
44
45/* CFG register */
46#define CFG_CE_LOW (1 << 5)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040047#define CFG_DMA_ECC (1 << 4) /* Enable DMA ECC bit */
48#define CFG_ECC_EN (1 << 3) /* ECC enable bit */
49#define CFG_DMA_BURST (1 << 2) /* DMA burst bit */
50#define CFG_DMA_DIR (1 << 1) /* DMA write(0)/read(1) bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030051
52/* CTRL register */
53#define CTRL_SW_RESET (1 << 2)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040054#define CTRL_ECC_CLEAR (1 << 1) /* Reset ECC bit */
55#define CTRL_DMA_START (1 << 0) /* Start DMA channel bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030056
57/* STAT register */
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040058#define STAT_DMA_FIFO (1 << 2) /* DMA FIFO has data bit */
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +030059#define STAT_NAND_READY (1 << 0)
60
61/* INT_STAT register */
62#define INT_STAT_TC (1 << 1)
63#define INT_STAT_RDY (1 << 0)
64
65/* TAC register bits, be aware of overflows */
66#define TAC_W_RDY(n) (max_t(uint32_t, (n), 0xF) << 28)
67#define TAC_W_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 24)
68#define TAC_W_HOLD(n) (max_t(uint32_t, (n), 0xF) << 20)
69#define TAC_W_SETUP(n) (max_t(uint32_t, (n), 0xF) << 16)
70#define TAC_R_RDY(n) (max_t(uint32_t, (n), 0xF) << 12)
71#define TAC_R_WIDTH(n) (max_t(uint32_t, (n), 0xF) << 8)
72#define TAC_R_HOLD(n) (max_t(uint32_t, (n), 0xF) << 4)
73#define TAC_R_SETUP(n) (max_t(uint32_t, (n), 0xF) << 0)
74
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040075/* NAND ECC Layout for small page NAND devices
76 * Note: For large page devices, the default layouts are used. */
77static struct nand_ecclayout lpc32xx_nand_oob_16 = {
78 .eccbytes = 6,
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +030079 .eccpos = { 10, 11, 12, 13, 14, 15, },
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040080 .oobfree = {
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +030081 { .offset = 0, .length = 4, },
82 { .offset = 6, .length = 4, },
83 }
Sylvain Lemieux416e10d2015-08-13 15:40:21 -040084};
85
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +030086#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -040087#define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
88
89/*
90 * DMA Descriptors
91 * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
92 * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
93 */
94static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
95static u32 ecc_buffer[8]; /* MAX ECC size */
96static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
97
98/*
99 * Helper macro for the DMA client (i.e. NAND SLC):
100 * - to write the next DMA linked list item address
101 * (see arch/include/asm/arch-lpc32xx/dma.h).
102 * - to assign the DMA data register to DMA source or destination address.
103 * - to assign the ECC register to DMA source or destination address.
104 */
105#define lpc32xx_dmac_next_lli(x) ((u32)x)
106#define lpc32xx_dmac_set_dma_data() ((u32)&lpc32xx_nand_slc_regs->dma_data)
107#define lpc32xx_dmac_set_ecc() ((u32)&lpc32xx_nand_slc_regs->ecc)
108#endif
109
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300110static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
111 = (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
112
113static void lpc32xx_nand_init(void)
114{
115 uint32_t hclk = get_hclk_clk_rate();
116
117 /* Reset SLC NAND controller */
118 writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
119
120 /* 8-bit bus, no DMA, no ECC, ordinary CE signal */
121 writel(0, &lpc32xx_nand_slc_regs->cfg);
122
123 /* Interrupts disabled and cleared */
124 writel(0, &lpc32xx_nand_slc_regs->ien);
125 writel(INT_STAT_TC | INT_STAT_RDY,
126 &lpc32xx_nand_slc_regs->icr);
127
128 /* Configure NAND flash timings */
129 writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
130 TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
131 TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
132 TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
133 TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
134 TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
135 TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
136 TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
137 &lpc32xx_nand_slc_regs->tac);
138}
139
140static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
141 int cmd, unsigned int ctrl)
142{
143 debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
144
145 if (ctrl & NAND_NCE)
146 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
147 else
148 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
149
150 if (cmd == NAND_CMD_NONE)
151 return;
152
153 if (ctrl & NAND_CLE)
154 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
155 else if (ctrl & NAND_ALE)
156 writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
157}
158
159static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
160{
161 return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
162}
163
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300164#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400165/*
166 * Prepares DMA descriptors for NAND RD/WR operations
167 * If the size is < 256 Bytes then it is assumed to be
168 * an OOB transfer
169 */
170static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
171 const u8 *buffer, int size,
172 int read)
173{
174 u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
175 struct lpc32xx_dmac_ll *dmalist_cur;
176 struct lpc32xx_dmac_ll *dmalist_cur_ecc;
177
178 /*
179 * CTRL descriptor entry for reading ECC
180 * Copy Multiple times to sync DMA with Flash Controller
181 */
182 ecc_ctrl = 0x5 |
183 DMAC_CHAN_SRC_BURST_1 |
184 DMAC_CHAN_DEST_BURST_1 |
185 DMAC_CHAN_SRC_WIDTH_32 |
186 DMAC_CHAN_DEST_WIDTH_32 |
187 DMAC_CHAN_DEST_AHB1;
188
189 /* CTRL descriptor entry for reading/writing Data */
190 ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
191 DMAC_CHAN_SRC_BURST_4 |
192 DMAC_CHAN_DEST_BURST_4 |
193 DMAC_CHAN_SRC_WIDTH_32 |
194 DMAC_CHAN_DEST_WIDTH_32 |
195 DMAC_CHAN_DEST_AHB1;
196
197 /* CTRL descriptor entry for reading/writing Spare Area */
198 oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
199 DMAC_CHAN_SRC_BURST_4 |
200 DMAC_CHAN_DEST_BURST_4 |
201 DMAC_CHAN_SRC_WIDTH_32 |
202 DMAC_CHAN_DEST_WIDTH_32 |
203 DMAC_CHAN_DEST_AHB1;
204
205 if (read) {
206 dmasrc = lpc32xx_dmac_set_dma_data();
207 dmadst = (u32)buffer;
208 ctrl |= DMAC_CHAN_DEST_AUTOINC;
209 } else {
210 dmadst = lpc32xx_dmac_set_dma_data();
211 dmasrc = (u32)buffer;
212 ctrl |= DMAC_CHAN_SRC_AUTOINC;
213 }
214
215 /*
216 * Write Operation Sequence for Small Block NAND
217 * ----------------------------------------------------------
218 * 1. X'fer 256 bytes of data from Memory to Flash.
219 * 2. Copy generated ECC data from Register to Spare Area
220 * 3. X'fer next 256 bytes of data from Memory to Flash.
221 * 4. Copy generated ECC data from Register to Spare Area.
222 * 5. X'fer 16 byets of Spare area from Memory to Flash.
223 * Read Operation Sequence for Small Block NAND
224 * ----------------------------------------------------------
225 * 1. X'fer 256 bytes of data from Flash to Memory.
226 * 2. Copy generated ECC data from Register to ECC calc Buffer.
227 * 3. X'fer next 256 bytes of data from Flash to Memory.
228 * 4. Copy generated ECC data from Register to ECC calc Buffer.
229 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
230 * Write Operation Sequence for Large Block NAND
231 * ----------------------------------------------------------
232 * 1. Steps(1-4) of Write Operations repeate for four times
233 * which generates 16 DMA descriptors to X'fer 2048 bytes of
234 * data & 32 bytes of ECC data.
235 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
236 * Read Operation Sequence for Large Block NAND
237 * ----------------------------------------------------------
238 * 1. Steps(1-4) of Read Operations repeate for four times
239 * which generates 16 DMA descriptors to X'fer 2048 bytes of
240 * data & 32 bytes of ECC data.
241 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
242 */
243
244 for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
245 dmalist_cur = &dmalist[i * 2];
246 dmalist_cur_ecc = &dmalist[(i * 2) + 1];
247
248 dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
249 dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
250 dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
251 dmalist_cur->next_ctrl = ctrl;
252
253 dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
254 dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
255 dmalist_cur_ecc->next_lli =
256 lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
257 dmalist_cur_ecc->next_ctrl = ecc_ctrl;
258 }
259
260 if (i) { /* Data only transfer */
261 dmalist_cur_ecc = &dmalist[(i * 2) - 1];
262 dmalist_cur_ecc->next_lli = 0;
263 dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
264 return;
265 }
266
267 /* OOB only transfer */
268 if (read) {
269 dmasrc = lpc32xx_dmac_set_dma_data();
270 dmadst = (u32)buffer;
271 oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
272 } else {
273 dmadst = lpc32xx_dmac_set_dma_data();
274 dmasrc = (u32)buffer;
275 oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
276 }
277
278 /* Read/ Write Spare Area Data To/From Flash */
279 dmalist_cur = &dmalist[i * 2];
280 dmalist_cur->dma_src = dmasrc;
281 dmalist_cur->dma_dest = dmadst;
282 dmalist_cur->next_lli = 0;
283 dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
284}
285
286static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
287 int len, int read)
288{
Scott Wood17fed142016-05-30 13:57:56 -0500289 struct nand_chip *chip = mtd_to_nand(mtd);
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400290 u32 config;
291 int ret;
292
293 /* DMA Channel Configuration */
294 config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
295 (read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
296 (read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
297 DMAC_CHAN_ENABLE;
298
299 /* Prepare DMA descriptors */
300 lpc32xx_nand_dma_configure(chip, buf, len, read);
301
302 /* Setup SLC controller and start transfer */
303 if (read)
304 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
305 else /* NAND_ECC_WRITE */
306 clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
307 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
308
309 /* Write length for new transfers */
310 if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
311 readl(&lpc32xx_nand_slc_regs->tc))) {
312 int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
313 writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
314 }
315
316 setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
317
318 /* Start DMA transfers */
319 ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
320 if (unlikely(ret < 0))
321 BUG();
322
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400323 /* Wait for NAND to be ready */
324 while (!lpc32xx_nand_dev_ready(mtd))
325 ;
326
327 /* Wait till DMA transfer is DONE */
328 if (lpc32xx_dma_wait_status(dmachan))
329 pr_err("NAND DMA transfer error!\r\n");
330
331 /* Stop DMA & HW ECC */
332 clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
333 clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
334 CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
335}
336
337static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
338{
339 int i;
340 for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
341 i += CONFIG_SYS_NAND_ECCBYTES) {
342 u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
343 ce = ~(ce << 2) & 0xFFFFFF;
344 spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
345 spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
346 spare[i] = (u8)(ce & 0xFF);
347 }
348 return 0;
349}
350
351static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
352 uint8_t *ecc_code)
353{
354 return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
355}
356
357/*
358 * Enables and prepares SLC NAND controller
359 * for doing data transfers with H/W ECC enabled.
360 */
361static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
362{
363 /* Clear ECC */
364 writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
365
366 /* Setup SLC controller for H/W ECC operations */
367 setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
368}
369
370/*
371 * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
372 * mtd: MTD block structure
373 * dat: raw data read from the chip
374 * read_ecc: ECC from the chip
375 * calc_ecc: the ECC calculated from raw data
376 *
377 * Detect and correct a 1 bit error for 256 byte block
378 */
379int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
380 u_char *read_ecc, u_char *calc_ecc)
381{
382 unsigned int i;
383 int ret1, ret2 = 0;
384 u_char *r = read_ecc;
385 u_char *c = calc_ecc;
386 u16 data_offset = 0;
387
388 for (i = 0 ; i < ECCSTEPS ; i++) {
389 r += CONFIG_SYS_NAND_ECCBYTES;
390 c += CONFIG_SYS_NAND_ECCBYTES;
391 data_offset += CONFIG_SYS_NAND_ECCSIZE;
392
393 ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
394 if (ret1 < 0)
395 return -EBADMSG;
396 else
397 ret2 += ret1;
398 }
399
400 return ret2;
401}
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400402
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400403static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
404{
405 lpc32xx_nand_xfer(mtd, buf, len, 1);
406}
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300407
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400408static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
409 int len)
410{
411 lpc32xx_nand_xfer(mtd, buf, len, 0);
412}
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300413
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400414/* Reuse the logic from "nand_read_page_hwecc()" */
415static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
416 uint8_t *buf, int oob_required, int page)
417{
418 int i;
419 int stat;
420 uint8_t *p = buf;
421 uint8_t *ecc_calc = chip->buffers->ecccalc;
422 uint8_t *ecc_code = chip->buffers->ecccode;
423 uint32_t *eccpos = chip->ecc.layout->eccpos;
424 unsigned int max_bitflips = 0;
425
426 /*
427 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
428 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
429 * of a page size using DMA controller scatter/gather mode through
430 * linked list; the ECC read is done without any software intervention.
431 */
432
433 lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
434 lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
435 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
436 lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
437
438 for (i = 0; i < chip->ecc.total; i++)
439 ecc_code[i] = chip->oob_poi[eccpos[i]];
440
441 stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
442 if (stat < 0)
443 mtd->ecc_stats.failed++;
444 else {
445 mtd->ecc_stats.corrected += stat;
446 max_bitflips = max_t(unsigned int, max_bitflips, stat);
447 }
448
449 return max_bitflips;
450}
451
452/* Reuse the logic from "nand_write_page_hwecc()" */
453static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
454 struct nand_chip *chip,
Scott Wood46e13102016-05-30 13:57:57 -0500455 const uint8_t *buf, int oob_required,
456 int page)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400457{
458 int i;
459 uint8_t *ecc_calc = chip->buffers->ecccalc;
460 const uint8_t *p = buf;
461 uint32_t *eccpos = chip->ecc.layout->eccpos;
462
463 /*
464 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
465 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
466 * of a page size using DMA controller scatter/gather mode through
467 * linked list; the ECC read is done without any software intervention.
468 */
469
470 lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
471 lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
472 lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
473
474 for (i = 0; i < chip->ecc.total; i++)
475 chip->oob_poi[eccpos[i]] = ecc_calc[i];
476
477 lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
478
479 return 0;
480}
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300481#else
482static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
483{
484 while (len-- > 0)
485 *buf++ = readl(&lpc32xx_nand_slc_regs->data);
486}
487
488static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
489{
490 while (len-- > 0)
491 writel(*buf++, &lpc32xx_nand_slc_regs->data);
492}
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400493#endif
494
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300495static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
496{
497 return readl(&lpc32xx_nand_slc_regs->data);
498}
499
500static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
501{
502 writel(byte, &lpc32xx_nand_slc_regs->data);
503}
504
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300505/*
506 * LPC32xx has only one SLC NAND controller, don't utilize
507 * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
Bin Meng75574052016-02-05 19:30:11 -0800508 * both in SPL NAND and U-Boot images.
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300509 */
510int board_nand_init(struct nand_chip *lpc32xx_chip)
511{
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300512#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400513 int ret;
514
515 /* Acquire a channel for our use */
516 ret = lpc32xx_dma_get_channel();
517 if (unlikely(ret < 0)) {
518 pr_info("Unable to get free DMA channel for NAND transfers\n");
519 return -1;
520 }
521 dmachan = (unsigned int)ret;
522#endif
523
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300524 lpc32xx_chip->cmd_ctrl = lpc32xx_nand_cmd_ctrl;
525 lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
526
527 /*
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400528 * The implementation of these functions is quite common, but
529 * they MUST be defined, because access to data register
530 * is strictly 32-bit aligned.
531 */
532 lpc32xx_chip->read_byte = lpc32xx_read_byte;
533 lpc32xx_chip->write_byte = lpc32xx_write_byte;
534
Vladimir Zapolskiy9abf6252018-10-19 03:21:18 +0300535#if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400536 /* Hardware ECC calculation is supported when DMA driver is selected */
537 lpc32xx_chip->ecc.mode = NAND_ECC_HW;
538
539 lpc32xx_chip->read_buf = lpc32xx_dma_read_buf;
540 lpc32xx_chip->write_buf = lpc32xx_dma_write_buf;
541
542 lpc32xx_chip->ecc.calculate = lpc32xx_ecc_calculate;
543 lpc32xx_chip->ecc.correct = lpc32xx_correct_data;
544 lpc32xx_chip->ecc.hwctl = lpc32xx_hwecc_enable;
545 lpc32xx_chip->chip_delay = 2000;
546
547 lpc32xx_chip->ecc.read_page = lpc32xx_read_page_hwecc;
548 lpc32xx_chip->ecc.write_page = lpc32xx_write_page_hwecc;
549 lpc32xx_chip->options |= NAND_NO_SUBPAGE_WRITE;
550#else
551 /*
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300552 * Hardware ECC calculation is not supported by the driver,
553 * because it requires DMA support, see LPC32x0 User Manual,
554 * note after SLC_ECC register description (UM10326, p.198)
555 */
556 lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
557
558 /*
559 * The implementation of these functions is quite common, but
560 * they MUST be defined, because access to data register
561 * is strictly 32-bit aligned.
562 */
563 lpc32xx_chip->read_buf = lpc32xx_read_buf;
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300564 lpc32xx_chip->write_buf = lpc32xx_write_buf;
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400565#endif
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300566
567 /*
Sylvain Lemieux416e10d2015-08-13 15:40:21 -0400568 * These values are predefined
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300569 * for both small and large page NAND flash devices.
570 */
Sylvain Lemieuxe72017a2015-08-13 15:40:20 -0400571 lpc32xx_chip->ecc.size = CONFIG_SYS_NAND_ECCSIZE;
572 lpc32xx_chip->ecc.bytes = CONFIG_SYS_NAND_ECCBYTES;
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300573 lpc32xx_chip->ecc.strength = 1;
574
Sylvain Lemieux416e10d2015-08-13 15:40:21 -0400575 if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
576 lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
577
Vladimir Zapolskiy78f04f02015-07-18 03:07:52 +0300578#if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
579 lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
580#endif
581
582 /* Initialize NAND interface */
583 lpc32xx_nand_init();
584
585 return 0;
586}