blob: a8f795d957ffa03f2ab6e29defb5c1915afb684f [file] [log] [blame]
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301/*
2 * Arasan NAND Flash Controller Driver
3 *
4 * Copyright (C) 2014 - 2015 Xilinx, Inc.
5 *
6 * SPDX-License-Identifier: GPL-2.0+
7 */
8
9#include <common.h>
10#include <malloc.h>
11#include <asm/io.h>
Masahiro Yamada56a931c2016-09-21 11:28:55 +090012#include <linux/errno.h>
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053013#include <linux/mtd/mtd.h>
14#include <linux/mtd/nand.h>
15#include <linux/mtd/partitions.h>
16#include <linux/mtd/nand_ecc.h>
17#include <asm/arch/hardware.h>
18#include <asm/arch/sys_proto.h>
19#include <nand.h>
20
21struct arasan_nand_info {
22 void __iomem *nand_base;
23 u32 page;
24};
25
26struct nand_regs {
27 u32 pkt_reg;
28 u32 memadr_reg1;
29 u32 memadr_reg2;
30 u32 cmd_reg;
31 u32 pgm_reg;
32 u32 intsts_enr;
33 u32 intsig_enr;
34 u32 intsts_reg;
35 u32 rdy_busy;
36 u32 cms_sysadr_reg;
37 u32 flash_sts_reg;
38 u32 tmg_reg;
39 u32 buf_dataport;
40 u32 ecc_reg;
41 u32 ecc_errcnt_reg;
42 u32 ecc_sprcmd_reg;
43 u32 errcnt_1bitreg;
44 u32 errcnt_2bitreg;
45 u32 errcnt_3bitreg;
46 u32 errcnt_4bitreg;
47 u32 dma_sysadr0_reg;
48 u32 dma_bufbdry_reg;
49 u32 cpu_rls_reg;
50 u32 errcnt_5bitreg;
51 u32 errcnt_6bitreg;
52 u32 errcnt_7bitreg;
53 u32 errcnt_8bitreg;
54 u32 data_if_reg;
55};
56
57#define arasan_nand_base ((struct nand_regs __iomem *)ARASAN_NAND_BASEADDR)
58
59struct arasan_nand_command_format {
60 u8 cmd1;
61 u8 cmd2;
62 u8 addr_cycles;
63 u32 pgm;
64};
65
66#define ONDIE_ECC_FEATURE_ADDR 0x90
67
68#define ARASAN_PROG_RD_MASK 0x00000001
69#define ARASAN_PROG_BLK_ERS_MASK 0x00000004
70#define ARASAN_PROG_RD_ID_MASK 0x00000040
71#define ARASAN_PROG_RD_STS_MASK 0x00000008
72#define ARASAN_PROG_PG_PROG_MASK 0x00000010
73#define ARASAN_PROG_RD_PARAM_PG_MASK 0x00000080
74#define ARASAN_PROG_RST_MASK 0x00000100
75#define ARASAN_PROG_GET_FTRS_MASK 0x00000200
76#define ARASAN_PROG_SET_FTRS_MASK 0x00000400
77#define ARASAN_PROG_CHNG_ROWADR_END_MASK 0x00400000
78
79#define ARASAN_NAND_CMD_ECC_ON_MASK 0x80000000
80#define ARASAN_NAND_CMD_CMD12_MASK 0xFFFF
81#define ARASAN_NAND_CMD_PG_SIZE_MASK 0x3800000
82#define ARASAN_NAND_CMD_PG_SIZE_SHIFT 23
83#define ARASAN_NAND_CMD_CMD2_SHIFT 8
84#define ARASAN_NAND_CMD_ADDR_CYCL_MASK 0x70000000
85#define ARASAN_NAND_CMD_ADDR_CYCL_SHIFT 28
86
87#define ARASAN_NAND_MEM_ADDR1_PAGE_MASK 0xFFFF0000
88#define ARASAN_NAND_MEM_ADDR1_COL_MASK 0xFFFF
89#define ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT 16
90#define ARASAN_NAND_MEM_ADDR2_PAGE_MASK 0xFF
91#define ARASAN_NAND_MEM_ADDR2_CS_MASK 0xC0000000
92#define ARASAN_NAND_MEM_ADDR2_BCH_MASK 0xE000000
93#define ARASAN_NAND_MEM_ADDR2_BCH_SHIFT 25
94
95#define ARASAN_NAND_INT_STS_ERR_EN_MASK 0x10
96#define ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK 0x08
97#define ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK 0x02
98#define ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK 0x01
99#define ARASAN_NAND_INT_STS_XFR_CMPLT_MASK 0x04
100
101#define ARASAN_NAND_PKT_REG_PKT_CNT_MASK 0xFFF000
102#define ARASAN_NAND_PKT_REG_PKT_SIZE_MASK 0x7FF
103#define ARASAN_NAND_PKT_REG_PKT_CNT_SHFT 12
104
105#define ARASAN_NAND_ROW_ADDR_CYCL_MASK 0x0F
106#define ARASAN_NAND_COL_ADDR_CYCL_MASK 0xF0
107#define ARASAN_NAND_COL_ADDR_CYCL_SHIFT 4
108
109#define ARASAN_NAND_ECC_SIZE_SHIFT 16
110#define ARASAN_NAND_ECC_BCH_SHIFT 27
111
112#define ARASAN_NAND_PKTSIZE_1K 1024
113#define ARASAN_NAND_PKTSIZE_512 512
114
115#define ARASAN_NAND_POLL_TIMEOUT 1000000
116#define ARASAN_NAND_INVALID_ADDR_CYCL 0xFF
117
118#define ERR_ADDR_CYCLE -1
119#define READ_BUFF_SIZE 0x4000
120
121static struct arasan_nand_command_format *curr_cmd;
122
123enum addr_cycles {
124 NAND_ADDR_CYCL_NONE,
125 NAND_ADDR_CYCL_ONE,
126 NAND_ADDR_CYCL_ROW,
127 NAND_ADDR_CYCL_COL,
128 NAND_ADDR_CYCL_BOTH,
129};
130
131static struct arasan_nand_command_format arasan_nand_commands[] = {
132 {NAND_CMD_READ0, NAND_CMD_READSTART, NAND_ADDR_CYCL_BOTH,
133 ARASAN_PROG_RD_MASK},
134 {NAND_CMD_RNDOUT, NAND_CMD_RNDOUTSTART, NAND_ADDR_CYCL_COL,
135 ARASAN_PROG_RD_MASK},
136 {NAND_CMD_READID, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
137 ARASAN_PROG_RD_ID_MASK},
138 {NAND_CMD_STATUS, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
139 ARASAN_PROG_RD_STS_MASK},
140 {NAND_CMD_SEQIN, NAND_CMD_PAGEPROG, NAND_ADDR_CYCL_BOTH,
141 ARASAN_PROG_PG_PROG_MASK},
142 {NAND_CMD_RNDIN, NAND_CMD_NONE, NAND_ADDR_CYCL_COL,
143 ARASAN_PROG_CHNG_ROWADR_END_MASK},
144 {NAND_CMD_ERASE1, NAND_CMD_ERASE2, NAND_ADDR_CYCL_ROW,
145 ARASAN_PROG_BLK_ERS_MASK},
146 {NAND_CMD_RESET, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
147 ARASAN_PROG_RST_MASK},
148 {NAND_CMD_PARAM, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
149 ARASAN_PROG_RD_PARAM_PG_MASK},
150 {NAND_CMD_GET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
151 ARASAN_PROG_GET_FTRS_MASK},
152 {NAND_CMD_SET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
153 ARASAN_PROG_SET_FTRS_MASK},
154 {NAND_CMD_NONE, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE, 0},
155};
156
157struct arasan_ecc_matrix {
158 u32 pagesize;
159 u32 ecc_codeword_size;
160 u8 eccbits;
161 u8 bch;
162 u8 bchval;
163 u16 eccaddr;
164 u16 eccsize;
165};
166
167static const struct arasan_ecc_matrix ecc_matrix[] = {
168 {512, 512, 1, 0, 0, 0x20D, 0x3},
169 {512, 512, 4, 1, 3, 0x209, 0x7},
170 {512, 512, 8, 1, 2, 0x203, 0xD},
171 /*
172 * 2K byte page
173 */
174 {2048, 512, 1, 0, 0, 0x834, 0xC},
175 {2048, 512, 4, 1, 3, 0x826, 0x1A},
176 {2048, 512, 8, 1, 2, 0x80c, 0x34},
177 {2048, 512, 12, 1, 1, 0x822, 0x4E},
178 {2048, 512, 16, 1, 0, 0x808, 0x68},
179 {2048, 1024, 24, 1, 4, 0x81c, 0x54},
180 /*
181 * 4K byte page
182 */
183 {4096, 512, 1, 0, 0, 0x1068, 0x18},
184 {4096, 512, 4, 1, 3, 0x104c, 0x34},
185 {4096, 512, 8, 1, 2, 0x1018, 0x68},
186 {4096, 512, 12, 1, 1, 0x1044, 0x9C},
187 {4096, 512, 16, 1, 0, 0x1010, 0xD0},
188 {4096, 1024, 24, 1, 4, 0x1038, 0xA8},
189 /*
190 * 8K byte page
191 */
192 {8192, 512, 1, 0, 0, 0x20d0, 0x30},
193 {8192, 512, 4, 1, 3, 0x2098, 0x68},
194 {8192, 512, 8, 1, 2, 0x2030, 0xD0},
195 {8192, 512, 12, 1, 1, 0x2088, 0x138},
196 {8192, 512, 16, 1, 0, 0x2020, 0x1A0},
197 {8192, 1024, 24, 1, 4, 0x2070, 0x150},
198 /*
199 * 16K byte page
200 */
201 {16384, 512, 1, 0, 0, 0x4460, 0x60},
202 {16384, 512, 4, 1, 3, 0x43f0, 0xD0},
203 {16384, 512, 8, 1, 2, 0x4320, 0x1A0},
204 {16384, 512, 12, 1, 1, 0x4250, 0x270},
205 {16384, 512, 16, 1, 0, 0x4180, 0x340},
206 {16384, 1024, 24, 1, 4, 0x4220, 0x2A0}
207};
208
209static u8 buf_data[READ_BUFF_SIZE];
210static u32 buf_index;
211
212static struct nand_ecclayout nand_oob;
213
214static struct nand_chip nand_chip[CONFIG_SYS_MAX_NAND_DEVICE];
215
216static void arasan_nand_select_chip(struct mtd_info *mtd, int chip)
217{
218}
219
220static void arasan_nand_enable_ecc(void)
221{
222 u32 reg_val;
223
224 reg_val = readl(&arasan_nand_base->cmd_reg);
225 reg_val |= ARASAN_NAND_CMD_ECC_ON_MASK;
226
227 writel(reg_val, &arasan_nand_base->cmd_reg);
228}
229
230static u8 arasan_nand_get_addrcycle(struct mtd_info *mtd)
231{
232 u8 addrcycles;
Scott Wood17fed142016-05-30 13:57:56 -0500233 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530234
235 switch (curr_cmd->addr_cycles) {
236 case NAND_ADDR_CYCL_NONE:
237 addrcycles = 0;
238 break;
239 case NAND_ADDR_CYCL_ONE:
240 addrcycles = 1;
241 break;
242 case NAND_ADDR_CYCL_ROW:
243 addrcycles = chip->onfi_params.addr_cycles &
244 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
245 break;
246 case NAND_ADDR_CYCL_COL:
247 addrcycles = (chip->onfi_params.addr_cycles &
248 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
249 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
250 break;
251 case NAND_ADDR_CYCL_BOTH:
252 addrcycles = chip->onfi_params.addr_cycles &
253 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
254 addrcycles += (chip->onfi_params.addr_cycles &
255 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
256 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
257 break;
258 default:
259 addrcycles = ARASAN_NAND_INVALID_ADDR_CYCL;
260 break;
261 }
262 return addrcycles;
263}
264
265static int arasan_nand_read_page(struct mtd_info *mtd, u8 *buf, u32 size)
266{
Scott Wood17fed142016-05-30 13:57:56 -0500267 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530268 u32 reg_val, i, pktsize, pktnum;
269 u32 *bufptr = (u32 *)buf;
270 u32 timeout;
271 u32 rdcount = 0;
272 u8 addr_cycles;
273
274 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
275 pktsize = ARASAN_NAND_PKTSIZE_1K;
276 else
277 pktsize = ARASAN_NAND_PKTSIZE_512;
278
279 if (size % pktsize)
280 pktnum = size/pktsize + 1;
281 else
282 pktnum = size/pktsize;
283
284 reg_val = readl(&arasan_nand_base->intsts_enr);
285 reg_val |= ARASAN_NAND_INT_STS_ERR_EN_MASK |
286 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK;
287 writel(reg_val, &arasan_nand_base->intsts_enr);
288
289 reg_val = readl(&arasan_nand_base->pkt_reg);
290 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
291 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
292 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) |
293 pktsize;
294 writel(reg_val, &arasan_nand_base->pkt_reg);
295
296 arasan_nand_enable_ecc();
297 addr_cycles = arasan_nand_get_addrcycle(mtd);
298 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
299 return ERR_ADDR_CYCLE;
300
301 writel((NAND_CMD_RNDOUTSTART << ARASAN_NAND_CMD_CMD2_SHIFT) |
302 NAND_CMD_RNDOUT | (addr_cycles <<
303 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT),
304 &arasan_nand_base->ecc_sprcmd_reg);
305 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
306
307 while (rdcount < pktnum) {
308 timeout = ARASAN_NAND_POLL_TIMEOUT;
309 while (!(readl(&arasan_nand_base->intsts_reg) &
310 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
311 udelay(1);
312 timeout--;
313 }
314 if (!timeout) {
315 puts("arasan_read_page: timedout:Buff RDY\n");
316 return -ETIMEDOUT;
317 }
318
319 rdcount++;
320
321 if (pktnum == rdcount) {
322 reg_val = readl(&arasan_nand_base->intsts_enr);
323 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
324 writel(reg_val, &arasan_nand_base->intsts_enr);
325 } else {
326 reg_val = readl(&arasan_nand_base->intsts_enr);
327 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
328 &arasan_nand_base->intsts_enr);
329 }
330 reg_val = readl(&arasan_nand_base->intsts_reg);
331 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
332 &arasan_nand_base->intsts_reg);
333
334 for (i = 0; i < pktsize/4; i++)
335 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
336
337
338 bufptr += pktsize/4;
339
340 if (rdcount >= pktnum)
341 break;
342
343 writel(ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
344 &arasan_nand_base->intsts_enr);
345 }
346
347 timeout = ARASAN_NAND_POLL_TIMEOUT;
348
349 while (!(readl(&arasan_nand_base->intsts_reg) &
350 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
351 udelay(1);
352 timeout--;
353 }
354 if (!timeout) {
355 puts("arasan rd_page timedout:Xfer CMPLT\n");
356 return -ETIMEDOUT;
357 }
358
359 reg_val = readl(&arasan_nand_base->intsts_enr);
360 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
361 &arasan_nand_base->intsts_enr);
362 reg_val = readl(&arasan_nand_base->intsts_reg);
363 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
364 &arasan_nand_base->intsts_reg);
365
366 if (readl(&arasan_nand_base->intsts_reg) &
367 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK) {
368 printf("arasan rd_page:sbiterror\n");
369 return -1;
370 }
371
372 if (readl(&arasan_nand_base->intsts_reg) &
373 ARASAN_NAND_INT_STS_ERR_EN_MASK) {
374 mtd->ecc_stats.failed++;
375 printf("arasan rd_page:multibiterror\n");
376 return -1;
377 }
378
379 return 0;
380}
381
382static int arasan_nand_read_page_hwecc(struct mtd_info *mtd,
383 struct nand_chip *chip, u8 *buf, int oob_required, int page)
384{
385 int status;
386
387 status = arasan_nand_read_page(mtd, buf, (mtd->writesize));
388
389 if (oob_required)
390 chip->ecc.read_oob(mtd, chip, page);
391
392 return status;
393}
394
395static void arasan_nand_fill_tx(const u8 *buf, int len)
396{
397 u32 __iomem *nand = &arasan_nand_base->buf_dataport;
398
399 if (((unsigned long)buf & 0x3) != 0) {
400 if (((unsigned long)buf & 0x1) != 0) {
401 if (len) {
402 writeb(*buf, nand);
403 buf += 1;
404 len--;
405 }
406 }
407
408 if (((unsigned long)buf & 0x3) != 0) {
409 if (len >= 2) {
410 writew(*(u16 *)buf, nand);
411 buf += 2;
412 len -= 2;
413 }
414 }
415 }
416
417 while (len >= 4) {
418 writel(*(u32 *)buf, nand);
419 buf += 4;
420 len -= 4;
421 }
422
423 if (len) {
424 if (len >= 2) {
425 writew(*(u16 *)buf, nand);
426 buf += 2;
427 len -= 2;
428 }
429
430 if (len)
431 writeb(*buf, nand);
432 }
433}
434
435static int arasan_nand_write_page_hwecc(struct mtd_info *mtd,
Scott Wood46e13102016-05-30 13:57:57 -0500436 struct nand_chip *chip, const u8 *buf, int oob_required,
437 int page)
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530438{
439 u32 reg_val, i, pktsize, pktnum;
440 const u32 *bufptr = (const u32 *)buf;
441 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
442 u32 size = mtd->writesize;
443 u32 rdcount = 0;
444 u8 column_addr_cycles;
Scott Wood17fed142016-05-30 13:57:56 -0500445 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530446
447 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
448 pktsize = ARASAN_NAND_PKTSIZE_1K;
449 else
450 pktsize = ARASAN_NAND_PKTSIZE_512;
451
452 if (size % pktsize)
453 pktnum = size/pktsize + 1;
454 else
455 pktnum = size/pktsize;
456
457 reg_val = readl(&arasan_nand_base->pkt_reg);
458 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
459 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
460 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | pktsize;
461 writel(reg_val, &arasan_nand_base->pkt_reg);
462
463 arasan_nand_enable_ecc();
464 column_addr_cycles = (chip->onfi_params.addr_cycles &
465 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
466 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
467 writel((NAND_CMD_RNDIN | (column_addr_cycles << 28)),
468 &arasan_nand_base->ecc_sprcmd_reg);
469 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
470
471 while (rdcount < pktnum) {
472 timeout = ARASAN_NAND_POLL_TIMEOUT;
473 while (!(readl(&arasan_nand_base->intsts_reg) &
474 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
475 udelay(1);
476 timeout--;
477 }
478
479 if (!timeout) {
480 puts("arasan_write_page: timedout:Buff RDY\n");
481 return -ETIMEDOUT;
482 }
483
484 rdcount++;
485
486 if (pktnum == rdcount) {
487 reg_val = readl(&arasan_nand_base->intsts_enr);
488 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
489 writel(reg_val, &arasan_nand_base->intsts_enr);
490 } else {
491 reg_val = readl(&arasan_nand_base->intsts_enr);
492 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
493 &arasan_nand_base->intsts_enr);
494 }
495
496 reg_val = readl(&arasan_nand_base->intsts_reg);
497 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
498 &arasan_nand_base->intsts_reg);
499
500 for (i = 0; i < pktsize/4; i++)
501 writel(bufptr[i], &arasan_nand_base->buf_dataport);
502
503 bufptr += pktsize/4;
504
505 if (rdcount >= pktnum)
506 break;
507
508 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
509 &arasan_nand_base->intsts_enr);
510 }
511
512 timeout = ARASAN_NAND_POLL_TIMEOUT;
513
514 while (!(readl(&arasan_nand_base->intsts_reg) &
515 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
516 udelay(1);
517 timeout--;
518 }
519 if (!timeout) {
520 puts("arasan write_page timedout:Xfer CMPLT\n");
521 return -ETIMEDOUT;
522 }
523
524 reg_val = readl(&arasan_nand_base->intsts_enr);
525 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
526 &arasan_nand_base->intsts_enr);
527 reg_val = readl(&arasan_nand_base->intsts_reg);
528 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
529 &arasan_nand_base->intsts_reg);
530
531 if (oob_required)
532 chip->ecc.write_oob(mtd, chip, nand->page);
533
534 return 0;
535}
536
537static int arasan_nand_read_oob(struct mtd_info *mtd, struct nand_chip *chip,
538 int page)
539{
540 chip->cmdfunc(mtd, NAND_CMD_READOOB, 0, page);
541 chip->read_buf(mtd, chip->oob_poi, (mtd->oobsize));
542
543 return 0;
544}
545
546static int arasan_nand_write_oob(struct mtd_info *mtd, struct nand_chip *chip,
547 int page)
548{
549 int status = 0;
550 const u8 *buf = chip->oob_poi;
551
552 chip->cmdfunc(mtd, NAND_CMD_SEQIN, mtd->writesize, page);
553 chip->write_buf(mtd, buf, mtd->oobsize);
554
555 return status;
556}
557
558static int arasan_nand_reset(struct arasan_nand_command_format *curr_cmd)
559{
560 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
561 u32 cmd_reg = 0;
562
563 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
564 &arasan_nand_base->intsts_enr);
565 cmd_reg = readl(&arasan_nand_base->cmd_reg);
566 cmd_reg &= ~ARASAN_NAND_CMD_CMD12_MASK;
567
568 cmd_reg |= curr_cmd->cmd1 |
569 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
570 writel(cmd_reg, &arasan_nand_base->cmd_reg);
571 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
572
573 while (!(readl(&arasan_nand_base->intsts_reg) &
574 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
575 udelay(1);
576 timeout--;
577 }
578 if (!timeout) {
579 printf("ERROR:%s timedout\n", __func__);
580 return -ETIMEDOUT;
581 }
582
583 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
584 &arasan_nand_base->intsts_enr);
585
586 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
587 &arasan_nand_base->intsts_reg);
588
589 return 0;
590}
591
592static u8 arasan_nand_page(struct mtd_info *mtd)
593{
594 u8 page_val = 0;
595
596 switch (mtd->writesize) {
597 case 512:
598 page_val = 0;
599 break;
600 case 2048:
601 page_val = 1;
602 break;
603 case 4096:
604 page_val = 2;
605 break;
606 case 8192:
607 page_val = 3;
608 break;
609 case 16384:
610 page_val = 4;
611 break;
612 case 1024:
613 page_val = 5;
614 break;
615 default:
616 printf("%s:Pagesize>16K\n", __func__);
617 break;
618 }
619
620 return page_val;
621}
622
623static int arasan_nand_send_wrcmd(struct arasan_nand_command_format *curr_cmd,
624 int column, int page_addr, struct mtd_info *mtd)
625{
626 u32 reg_val, page;
627 u8 page_val, addr_cycles;
628
629 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
630 &arasan_nand_base->intsts_enr);
631 reg_val = readl(&arasan_nand_base->cmd_reg);
632 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
633 reg_val |= curr_cmd->cmd1 |
634 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
635 if (curr_cmd->cmd1 == NAND_CMD_SEQIN) {
636 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
637 page_val = arasan_nand_page(mtd);
638 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
639 }
640
641 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
642 addr_cycles = arasan_nand_get_addrcycle(mtd);
643
644 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
645 return ERR_ADDR_CYCLE;
646
647 reg_val |= (addr_cycles <<
648 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
649 writel(reg_val, &arasan_nand_base->cmd_reg);
650
651 if (page_addr == -1)
652 page_addr = 0;
653
654 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
655 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
656 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
657 writel(page|column, &arasan_nand_base->memadr_reg1);
658
659 reg_val = readl(&arasan_nand_base->memadr_reg2);
660 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
661 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
662 writel(reg_val, &arasan_nand_base->memadr_reg2);
663 reg_val = readl(&arasan_nand_base->memadr_reg2);
664 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
665 writel(reg_val, &arasan_nand_base->memadr_reg2);
666
667 return 0;
668}
669
670static void arasan_nand_write_buf(struct mtd_info *mtd, const u8 *buf, int len)
671{
672 u32 reg_val;
673 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
674
675 reg_val = readl(&arasan_nand_base->pkt_reg);
676 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
677 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
678
679 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | len;
680 writel(reg_val, &arasan_nand_base->pkt_reg);
681 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
682
683 while (!(readl(&arasan_nand_base->intsts_reg) &
684 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
685 udelay(1);
686 timeout--;
687 }
688
689 if (!timeout)
690 puts("ERROR:arasan_nand_write_buf timedout:Buff RDY\n");
691
692 reg_val = readl(&arasan_nand_base->intsts_enr);
693 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
694 writel(reg_val, &arasan_nand_base->intsts_enr);
695 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
696 &arasan_nand_base->intsts_enr);
697 reg_val = readl(&arasan_nand_base->intsts_reg);
698 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
699 &arasan_nand_base->intsts_reg);
700
701 arasan_nand_fill_tx(buf, len);
702
703 timeout = ARASAN_NAND_POLL_TIMEOUT;
704 while (!(readl(&arasan_nand_base->intsts_reg) &
705 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
706 udelay(1);
707 timeout--;
708 }
709 if (!timeout)
710 puts("ERROR:arasan_nand_write_buf timedout:Xfer CMPLT\n");
711
712 writel(readl(&arasan_nand_base->intsts_enr) |
713 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
714 &arasan_nand_base->intsts_enr);
715 writel(readl(&arasan_nand_base->intsts_reg) |
716 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
717 &arasan_nand_base->intsts_reg);
718}
719
720static int arasan_nand_erase(struct arasan_nand_command_format *curr_cmd,
721 int column, int page_addr, struct mtd_info *mtd)
722{
723 u32 reg_val, page;
724 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
725 u8 row_addr_cycles;
726
727 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
728 &arasan_nand_base->intsts_enr);
729 reg_val = readl(&arasan_nand_base->cmd_reg);
730 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
731 reg_val |= curr_cmd->cmd1 |
732 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
733 row_addr_cycles = arasan_nand_get_addrcycle(mtd);
734
735 if (row_addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
736 return ERR_ADDR_CYCLE;
737
738 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
739 reg_val |= (row_addr_cycles <<
740 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
741
742 writel(reg_val, &arasan_nand_base->cmd_reg);
743
744 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
745 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
746 column = page_addr & ARASAN_NAND_MEM_ADDR1_COL_MASK;
747 writel(page | column, &arasan_nand_base->memadr_reg1);
748
749 reg_val = readl(&arasan_nand_base->memadr_reg2);
750 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
751 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
752 writel(reg_val, &arasan_nand_base->memadr_reg2);
753 reg_val = readl(&arasan_nand_base->memadr_reg2);
754 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
755 writel(reg_val, &arasan_nand_base->memadr_reg2);
756 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
757
758 while (!(readl(&arasan_nand_base->intsts_reg) &
759 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
760 udelay(1);
761 timeout--;
762 }
763 if (!timeout) {
764 printf("ERROR:%s timedout:Xfer CMPLT\n", __func__);
765 return -ETIMEDOUT;
766 }
767
768 reg_val = readl(&arasan_nand_base->intsts_enr);
769 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
770 &arasan_nand_base->intsts_enr);
771 reg_val = readl(&arasan_nand_base->intsts_reg);
772 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
773 &arasan_nand_base->intsts_reg);
774
775 return 0;
776}
777
778static int arasan_nand_read_status(struct arasan_nand_command_format *curr_cmd,
779 int column, int page_addr, struct mtd_info *mtd)
780{
781 u32 reg_val;
782 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
783 u8 addr_cycles;
784
785 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
786 &arasan_nand_base->intsts_enr);
787 reg_val = readl(&arasan_nand_base->cmd_reg);
788 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
789 reg_val |= curr_cmd->cmd1 |
790 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
791 addr_cycles = arasan_nand_get_addrcycle(mtd);
792
793 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
794 return ERR_ADDR_CYCLE;
795
796 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
797 reg_val |= (addr_cycles <<
798 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
799
800 writel(reg_val, &arasan_nand_base->cmd_reg);
801
802 reg_val = readl(&arasan_nand_base->pkt_reg);
803 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
804 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
805 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | 1;
806 writel(reg_val, &arasan_nand_base->pkt_reg);
807
808 reg_val = readl(&arasan_nand_base->memadr_reg2);
809 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
810 writel(reg_val, &arasan_nand_base->memadr_reg2);
811
812 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
813 while (!(readl(&arasan_nand_base->intsts_reg) &
814 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
815 udelay(1);
816 timeout--;
817 }
818
819 if (!timeout) {
820 printf("ERROR:%s: timedout:Xfer CMPLT\n", __func__);
821 return -ETIMEDOUT;
822 }
823
824 reg_val = readl(&arasan_nand_base->intsts_enr);
825 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
826 &arasan_nand_base->intsts_enr);
827 reg_val = readl(&arasan_nand_base->intsts_reg);
828 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
829 &arasan_nand_base->intsts_reg);
830
831 return 0;
832}
833
834static int arasan_nand_send_rdcmd(struct arasan_nand_command_format *curr_cmd,
835 int column, int page_addr, struct mtd_info *mtd)
836{
837 u32 reg_val, addr_cycles, page;
838 u8 page_val;
839
840 reg_val = readl(&arasan_nand_base->intsts_enr);
841 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
842 &arasan_nand_base->intsts_enr);
843
844 reg_val = readl(&arasan_nand_base->cmd_reg);
845 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
846 reg_val |= curr_cmd->cmd1 |
847 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
848
849 if (curr_cmd->cmd1 == NAND_CMD_RNDOUT ||
850 curr_cmd->cmd1 == NAND_CMD_READ0) {
851 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
852 page_val = arasan_nand_page(mtd);
853 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
854 }
855
Siva Durga Prasad Paladugu99459c22016-08-25 16:00:04 +0530856 reg_val &= ~ARASAN_NAND_CMD_ECC_ON_MASK;
857
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530858 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
859
860 addr_cycles = arasan_nand_get_addrcycle(mtd);
861
862 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
863 return ERR_ADDR_CYCLE;
864
865 reg_val |= (addr_cycles << 28);
866 writel(reg_val, &arasan_nand_base->cmd_reg);
867
868 if (page_addr == -1)
869 page_addr = 0;
870
871 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
872 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
873 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
874 writel(page | column, &arasan_nand_base->memadr_reg1);
875
876 reg_val = readl(&arasan_nand_base->memadr_reg2);
877 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
878 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
879 writel(reg_val, &arasan_nand_base->memadr_reg2);
880
881 reg_val = readl(&arasan_nand_base->memadr_reg2);
882 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
883 writel(reg_val, &arasan_nand_base->memadr_reg2);
884 buf_index = 0;
885
886 return 0;
887}
888
889static void arasan_nand_read_buf(struct mtd_info *mtd, u8 *buf, int size)
890{
891 u32 reg_val, i;
892 u32 *bufptr = (u32 *)buf;
893 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
894
895 reg_val = readl(&arasan_nand_base->pkt_reg);
896 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
897 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
898 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | size;
899 writel(reg_val, &arasan_nand_base->pkt_reg);
900
901 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
902
903 while (!(readl(&arasan_nand_base->intsts_reg) &
904 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
905 udelay(1);
906 timeout--;
907 }
908
909 if (!timeout)
910 puts("ERROR:arasan_nand_read_buf timedout:Buff RDY\n");
911
912 reg_val = readl(&arasan_nand_base->intsts_enr);
913 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
914 writel(reg_val, &arasan_nand_base->intsts_enr);
915
916 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
917 &arasan_nand_base->intsts_enr);
918 reg_val = readl(&arasan_nand_base->intsts_reg);
919 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
920 &arasan_nand_base->intsts_reg);
921
922 buf_index = 0;
923 for (i = 0; i < size / 4; i++)
924 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
925
926 if (size & 0x03)
927 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
928
929 timeout = ARASAN_NAND_POLL_TIMEOUT;
930
931 while (!(readl(&arasan_nand_base->intsts_reg) &
932 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
933 udelay(1);
934 timeout--;
935 }
936
937 if (!timeout)
938 puts("ERROR:arasan_nand_read_buf timedout:Xfer CMPLT\n");
939
940 reg_val = readl(&arasan_nand_base->intsts_enr);
941 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
942 &arasan_nand_base->intsts_enr);
943 reg_val = readl(&arasan_nand_base->intsts_reg);
944 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
945 &arasan_nand_base->intsts_reg);
946}
947
948static u8 arasan_nand_read_byte(struct mtd_info *mtd)
949{
Scott Wood17fed142016-05-30 13:57:56 -0500950 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530951 u32 size;
952 u8 val;
953 struct nand_onfi_params *p;
954
955 if (buf_index == 0) {
956 p = &chip->onfi_params;
957 if (curr_cmd->cmd1 == NAND_CMD_READID)
958 size = 4;
959 else if (curr_cmd->cmd1 == NAND_CMD_PARAM)
960 size = sizeof(struct nand_onfi_params);
961 else if (curr_cmd->cmd1 == NAND_CMD_RNDOUT)
962 size = le16_to_cpu(p->ext_param_page_length) * 16;
963 else if (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES)
964 size = 4;
965 else if (curr_cmd->cmd1 == NAND_CMD_STATUS)
966 return readb(&arasan_nand_base->flash_sts_reg);
967 else
968 size = 8;
969 chip->read_buf(mtd, &buf_data[0], size);
970 }
971
972 val = *(&buf_data[0] + buf_index);
973 buf_index++;
974
975 return val;
976}
977
978static void arasan_nand_cmd_function(struct mtd_info *mtd, unsigned int command,
979 int column, int page_addr)
980{
981 u32 i, ret = 0;
Scott Wood17fed142016-05-30 13:57:56 -0500982 struct nand_chip *chip = mtd_to_nand(mtd);
983 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530984
985 curr_cmd = NULL;
986 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
987 &arasan_nand_base->intsts_enr);
988
989 if ((command == NAND_CMD_READOOB) &&
990 (mtd->writesize > 512)) {
991 column += mtd->writesize;
992 command = NAND_CMD_READ0;
993 }
994
995 /* Get the command format */
996 for (i = 0; (arasan_nand_commands[i].cmd1 != NAND_CMD_NONE ||
997 arasan_nand_commands[i].cmd2 != NAND_CMD_NONE); i++) {
998 if (command == arasan_nand_commands[i].cmd1) {
999 curr_cmd = &arasan_nand_commands[i];
1000 break;
1001 }
1002 }
1003
1004 if (curr_cmd == NULL) {
1005 printf("Unsupported Command; 0x%x\n", command);
1006 return;
1007 }
1008
1009 if (curr_cmd->cmd1 == NAND_CMD_RESET)
1010 ret = arasan_nand_reset(curr_cmd);
1011
1012 if ((curr_cmd->cmd1 == NAND_CMD_READID) ||
1013 (curr_cmd->cmd1 == NAND_CMD_PARAM) ||
1014 (curr_cmd->cmd1 == NAND_CMD_RNDOUT) ||
1015 (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES) ||
1016 (curr_cmd->cmd1 == NAND_CMD_READ0))
1017 ret = arasan_nand_send_rdcmd(curr_cmd, column, page_addr, mtd);
1018
1019 if ((curr_cmd->cmd1 == NAND_CMD_SET_FEATURES) ||
1020 (curr_cmd->cmd1 == NAND_CMD_SEQIN)) {
1021 nand->page = page_addr;
1022 ret = arasan_nand_send_wrcmd(curr_cmd, column, page_addr, mtd);
1023 }
1024
1025 if (curr_cmd->cmd1 == NAND_CMD_ERASE1)
1026 ret = arasan_nand_erase(curr_cmd, column, page_addr, mtd);
1027
1028 if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1029 ret = arasan_nand_read_status(curr_cmd, column, page_addr, mtd);
1030
1031 if (ret != 0)
1032 printf("ERROR:%s:command:0x%x\n", __func__, curr_cmd->cmd1);
1033}
1034
1035static int arasan_nand_ecc_init(struct mtd_info *mtd)
1036{
1037 int found = -1;
1038 u32 regval, eccpos_start, i;
Scott Wood17fed142016-05-30 13:57:56 -05001039 struct nand_chip *nand_chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301040
1041 nand_chip->ecc.mode = NAND_ECC_HW;
1042 nand_chip->ecc.hwctl = NULL;
1043 nand_chip->ecc.read_page = arasan_nand_read_page_hwecc;
1044 nand_chip->ecc.write_page = arasan_nand_write_page_hwecc;
1045 nand_chip->ecc.read_oob = arasan_nand_read_oob;
1046 nand_chip->ecc.write_oob = arasan_nand_write_oob;
1047
1048 for (i = 0; i < ARRAY_SIZE(ecc_matrix); i++) {
1049 if ((ecc_matrix[i].pagesize == mtd->writesize) &&
1050 (ecc_matrix[i].ecc_codeword_size >=
1051 nand_chip->ecc_step_ds)) {
1052 if (ecc_matrix[i].eccbits >=
1053 nand_chip->ecc_strength_ds) {
1054 found = i;
1055 break;
1056 }
1057 found = i;
1058 }
1059 }
1060
1061 if (found < 0)
1062 return 1;
1063
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301064 regval = ecc_matrix[found].eccaddr |
1065 (ecc_matrix[found].eccsize << ARASAN_NAND_ECC_SIZE_SHIFT) |
1066 (ecc_matrix[found].bch << ARASAN_NAND_ECC_BCH_SHIFT);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301067 writel(regval, &arasan_nand_base->ecc_reg);
1068
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301069 if (ecc_matrix[found].bch) {
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301070 regval = readl(&arasan_nand_base->memadr_reg2);
1071 regval &= ~ARASAN_NAND_MEM_ADDR2_BCH_MASK;
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301072 regval |= (ecc_matrix[found].bchval <<
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301073 ARASAN_NAND_MEM_ADDR2_BCH_SHIFT);
1074 writel(regval, &arasan_nand_base->memadr_reg2);
1075 }
1076
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301077 nand_oob.eccbytes = ecc_matrix[found].eccsize;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301078 eccpos_start = mtd->oobsize - nand_oob.eccbytes;
1079
1080 for (i = 0; i < nand_oob.eccbytes; i++)
1081 nand_oob.eccpos[i] = eccpos_start + i;
1082
1083 nand_oob.oobfree[0].offset = 2;
1084 nand_oob.oobfree[0].length = eccpos_start - 2;
1085
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301086 nand_chip->ecc.size = ecc_matrix[found].ecc_codeword_size;
1087 nand_chip->ecc.strength = ecc_matrix[found].eccbits;
1088 nand_chip->ecc.bytes = ecc_matrix[found].eccsize;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301089 nand_chip->ecc.layout = &nand_oob;
1090
1091 return 0;
1092}
1093
1094static int arasan_nand_init(struct nand_chip *nand_chip, int devnum)
1095{
1096 struct arasan_nand_info *nand;
1097 struct mtd_info *mtd;
1098 int err = -1;
1099
1100 nand = calloc(1, sizeof(struct arasan_nand_info));
1101 if (!nand) {
1102 printf("%s: failed to allocate\n", __func__);
1103 return err;
1104 }
1105
1106 nand->nand_base = arasan_nand_base;
Scott Wood17fed142016-05-30 13:57:56 -05001107 mtd = nand_to_mtd(nand_chip);
1108 nand_set_controller_data(nand_chip, nand);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301109
1110 /* Set the driver entry points for MTD */
1111 nand_chip->cmdfunc = arasan_nand_cmd_function;
1112 nand_chip->select_chip = arasan_nand_select_chip;
1113 nand_chip->read_byte = arasan_nand_read_byte;
1114
1115 /* Buffer read/write routines */
1116 nand_chip->read_buf = arasan_nand_read_buf;
1117 nand_chip->write_buf = arasan_nand_write_buf;
1118 nand_chip->bbt_options = NAND_BBT_USE_FLASH;
1119
1120 writel(0x0, &arasan_nand_base->cmd_reg);
1121 writel(0x0, &arasan_nand_base->pgm_reg);
1122
1123 /* first scan to find the device and get the page size */
1124 if (nand_scan_ident(mtd, 1, NULL)) {
1125 printf("%s: nand_scan_ident failed\n", __func__);
1126 goto fail;
1127 }
1128
1129 if (arasan_nand_ecc_init(mtd)) {
1130 printf("%s: nand_ecc_init failed\n", __func__);
1131 goto fail;
1132 }
1133
1134 if (nand_scan_tail(mtd)) {
1135 printf("%s: nand_scan_tail failed\n", __func__);
1136 goto fail;
1137 }
1138
Scott Wood2c1b7e12016-05-30 13:57:55 -05001139 if (nand_register(devnum, mtd)) {
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301140 printf("Nand Register Fail\n");
1141 goto fail;
1142 }
1143
1144 return 0;
1145fail:
1146 free(nand);
1147 return err;
1148}
1149
1150void board_nand_init(void)
1151{
1152 struct nand_chip *nand = &nand_chip[0];
1153
1154 if (arasan_nand_init(nand, 0))
1155 puts("NAND init failed\n");
1156}