blob: 3c9a0215c53e6fc5e310a22dee17d01909ecc271 [file] [log] [blame]
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301/*
2 * Arasan NAND Flash Controller Driver
3 *
4 * Copyright (C) 2014 - 2015 Xilinx, Inc.
5 *
6 * SPDX-License-Identifier: GPL-2.0+
7 */
8
9#include <common.h>
10#include <malloc.h>
11#include <asm/io.h>
Masahiro Yamada56a931c2016-09-21 11:28:55 +090012#include <linux/errno.h>
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053013#include <linux/mtd/mtd.h>
Masahiro Yamada2b7a8732017-11-30 13:45:24 +090014#include <linux/mtd/rawnand.h>
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053015#include <linux/mtd/partitions.h>
16#include <linux/mtd/nand_ecc.h>
17#include <asm/arch/hardware.h>
18#include <asm/arch/sys_proto.h>
19#include <nand.h>
20
21struct arasan_nand_info {
22 void __iomem *nand_base;
23 u32 page;
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +053024 bool on_die_ecc_enabled;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053025};
26
27struct nand_regs {
28 u32 pkt_reg;
29 u32 memadr_reg1;
30 u32 memadr_reg2;
31 u32 cmd_reg;
32 u32 pgm_reg;
33 u32 intsts_enr;
34 u32 intsig_enr;
35 u32 intsts_reg;
36 u32 rdy_busy;
37 u32 cms_sysadr_reg;
38 u32 flash_sts_reg;
39 u32 tmg_reg;
40 u32 buf_dataport;
41 u32 ecc_reg;
42 u32 ecc_errcnt_reg;
43 u32 ecc_sprcmd_reg;
44 u32 errcnt_1bitreg;
45 u32 errcnt_2bitreg;
46 u32 errcnt_3bitreg;
47 u32 errcnt_4bitreg;
48 u32 dma_sysadr0_reg;
49 u32 dma_bufbdry_reg;
50 u32 cpu_rls_reg;
51 u32 errcnt_5bitreg;
52 u32 errcnt_6bitreg;
53 u32 errcnt_7bitreg;
54 u32 errcnt_8bitreg;
55 u32 data_if_reg;
56};
57
58#define arasan_nand_base ((struct nand_regs __iomem *)ARASAN_NAND_BASEADDR)
59
60struct arasan_nand_command_format {
61 u8 cmd1;
62 u8 cmd2;
63 u8 addr_cycles;
64 u32 pgm;
65};
66
67#define ONDIE_ECC_FEATURE_ADDR 0x90
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +053068#define ENABLE_ONDIE_ECC 0x08
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053069
70#define ARASAN_PROG_RD_MASK 0x00000001
71#define ARASAN_PROG_BLK_ERS_MASK 0x00000004
72#define ARASAN_PROG_RD_ID_MASK 0x00000040
73#define ARASAN_PROG_RD_STS_MASK 0x00000008
74#define ARASAN_PROG_PG_PROG_MASK 0x00000010
75#define ARASAN_PROG_RD_PARAM_PG_MASK 0x00000080
76#define ARASAN_PROG_RST_MASK 0x00000100
77#define ARASAN_PROG_GET_FTRS_MASK 0x00000200
78#define ARASAN_PROG_SET_FTRS_MASK 0x00000400
79#define ARASAN_PROG_CHNG_ROWADR_END_MASK 0x00400000
80
81#define ARASAN_NAND_CMD_ECC_ON_MASK 0x80000000
82#define ARASAN_NAND_CMD_CMD12_MASK 0xFFFF
83#define ARASAN_NAND_CMD_PG_SIZE_MASK 0x3800000
84#define ARASAN_NAND_CMD_PG_SIZE_SHIFT 23
85#define ARASAN_NAND_CMD_CMD2_SHIFT 8
86#define ARASAN_NAND_CMD_ADDR_CYCL_MASK 0x70000000
87#define ARASAN_NAND_CMD_ADDR_CYCL_SHIFT 28
88
89#define ARASAN_NAND_MEM_ADDR1_PAGE_MASK 0xFFFF0000
90#define ARASAN_NAND_MEM_ADDR1_COL_MASK 0xFFFF
91#define ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT 16
92#define ARASAN_NAND_MEM_ADDR2_PAGE_MASK 0xFF
93#define ARASAN_NAND_MEM_ADDR2_CS_MASK 0xC0000000
94#define ARASAN_NAND_MEM_ADDR2_BCH_MASK 0xE000000
95#define ARASAN_NAND_MEM_ADDR2_BCH_SHIFT 25
96
97#define ARASAN_NAND_INT_STS_ERR_EN_MASK 0x10
98#define ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK 0x08
99#define ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK 0x02
100#define ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK 0x01
101#define ARASAN_NAND_INT_STS_XFR_CMPLT_MASK 0x04
102
103#define ARASAN_NAND_PKT_REG_PKT_CNT_MASK 0xFFF000
104#define ARASAN_NAND_PKT_REG_PKT_SIZE_MASK 0x7FF
105#define ARASAN_NAND_PKT_REG_PKT_CNT_SHFT 12
106
107#define ARASAN_NAND_ROW_ADDR_CYCL_MASK 0x0F
108#define ARASAN_NAND_COL_ADDR_CYCL_MASK 0xF0
109#define ARASAN_NAND_COL_ADDR_CYCL_SHIFT 4
110
111#define ARASAN_NAND_ECC_SIZE_SHIFT 16
112#define ARASAN_NAND_ECC_BCH_SHIFT 27
113
114#define ARASAN_NAND_PKTSIZE_1K 1024
115#define ARASAN_NAND_PKTSIZE_512 512
116
117#define ARASAN_NAND_POLL_TIMEOUT 1000000
118#define ARASAN_NAND_INVALID_ADDR_CYCL 0xFF
119
120#define ERR_ADDR_CYCLE -1
121#define READ_BUFF_SIZE 0x4000
122
123static struct arasan_nand_command_format *curr_cmd;
124
125enum addr_cycles {
126 NAND_ADDR_CYCL_NONE,
127 NAND_ADDR_CYCL_ONE,
128 NAND_ADDR_CYCL_ROW,
129 NAND_ADDR_CYCL_COL,
130 NAND_ADDR_CYCL_BOTH,
131};
132
133static struct arasan_nand_command_format arasan_nand_commands[] = {
134 {NAND_CMD_READ0, NAND_CMD_READSTART, NAND_ADDR_CYCL_BOTH,
135 ARASAN_PROG_RD_MASK},
136 {NAND_CMD_RNDOUT, NAND_CMD_RNDOUTSTART, NAND_ADDR_CYCL_COL,
137 ARASAN_PROG_RD_MASK},
138 {NAND_CMD_READID, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
139 ARASAN_PROG_RD_ID_MASK},
140 {NAND_CMD_STATUS, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
141 ARASAN_PROG_RD_STS_MASK},
142 {NAND_CMD_SEQIN, NAND_CMD_PAGEPROG, NAND_ADDR_CYCL_BOTH,
143 ARASAN_PROG_PG_PROG_MASK},
144 {NAND_CMD_RNDIN, NAND_CMD_NONE, NAND_ADDR_CYCL_COL,
145 ARASAN_PROG_CHNG_ROWADR_END_MASK},
146 {NAND_CMD_ERASE1, NAND_CMD_ERASE2, NAND_ADDR_CYCL_ROW,
147 ARASAN_PROG_BLK_ERS_MASK},
148 {NAND_CMD_RESET, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
149 ARASAN_PROG_RST_MASK},
150 {NAND_CMD_PARAM, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
151 ARASAN_PROG_RD_PARAM_PG_MASK},
152 {NAND_CMD_GET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
153 ARASAN_PROG_GET_FTRS_MASK},
154 {NAND_CMD_SET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
155 ARASAN_PROG_SET_FTRS_MASK},
156 {NAND_CMD_NONE, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE, 0},
157};
158
159struct arasan_ecc_matrix {
160 u32 pagesize;
161 u32 ecc_codeword_size;
162 u8 eccbits;
163 u8 bch;
164 u8 bchval;
165 u16 eccaddr;
166 u16 eccsize;
167};
168
169static const struct arasan_ecc_matrix ecc_matrix[] = {
170 {512, 512, 1, 0, 0, 0x20D, 0x3},
171 {512, 512, 4, 1, 3, 0x209, 0x7},
172 {512, 512, 8, 1, 2, 0x203, 0xD},
173 /*
174 * 2K byte page
175 */
176 {2048, 512, 1, 0, 0, 0x834, 0xC},
177 {2048, 512, 4, 1, 3, 0x826, 0x1A},
178 {2048, 512, 8, 1, 2, 0x80c, 0x34},
179 {2048, 512, 12, 1, 1, 0x822, 0x4E},
180 {2048, 512, 16, 1, 0, 0x808, 0x68},
181 {2048, 1024, 24, 1, 4, 0x81c, 0x54},
182 /*
183 * 4K byte page
184 */
185 {4096, 512, 1, 0, 0, 0x1068, 0x18},
186 {4096, 512, 4, 1, 3, 0x104c, 0x34},
187 {4096, 512, 8, 1, 2, 0x1018, 0x68},
188 {4096, 512, 12, 1, 1, 0x1044, 0x9C},
189 {4096, 512, 16, 1, 0, 0x1010, 0xD0},
190 {4096, 1024, 24, 1, 4, 0x1038, 0xA8},
191 /*
192 * 8K byte page
193 */
194 {8192, 512, 1, 0, 0, 0x20d0, 0x30},
195 {8192, 512, 4, 1, 3, 0x2098, 0x68},
196 {8192, 512, 8, 1, 2, 0x2030, 0xD0},
197 {8192, 512, 12, 1, 1, 0x2088, 0x138},
198 {8192, 512, 16, 1, 0, 0x2020, 0x1A0},
199 {8192, 1024, 24, 1, 4, 0x2070, 0x150},
200 /*
201 * 16K byte page
202 */
203 {16384, 512, 1, 0, 0, 0x4460, 0x60},
204 {16384, 512, 4, 1, 3, 0x43f0, 0xD0},
205 {16384, 512, 8, 1, 2, 0x4320, 0x1A0},
206 {16384, 512, 12, 1, 1, 0x4250, 0x270},
207 {16384, 512, 16, 1, 0, 0x4180, 0x340},
208 {16384, 1024, 24, 1, 4, 0x4220, 0x2A0}
209};
210
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530211static struct nand_ecclayout ondie_nand_oob_64 = {
212 .eccbytes = 32,
213
214 .eccpos = {
215 8, 9, 10, 11, 12, 13, 14, 15,
216 24, 25, 26, 27, 28, 29, 30, 31,
217 40, 41, 42, 43, 44, 45, 46, 47,
218 56, 57, 58, 59, 60, 61, 62, 63
219 },
220
221 .oobfree = {
222 { .offset = 4, .length = 4 },
223 { .offset = 20, .length = 4 },
224 { .offset = 36, .length = 4 },
225 { .offset = 52, .length = 4 }
226 }
227};
228
229/*
230 * bbt decriptors for chips with on-die ECC and
231 * chips with 64-byte OOB
232 */
233static u8 bbt_pattern[] = {'B', 'b', 't', '0' };
234static u8 mirror_pattern[] = {'1', 't', 'b', 'B' };
235
236static struct nand_bbt_descr bbt_main_descr = {
237 .options = NAND_BBT_LASTBLOCK | NAND_BBT_CREATE | NAND_BBT_WRITE |
238 NAND_BBT_2BIT | NAND_BBT_VERSION | NAND_BBT_PERCHIP,
239 .offs = 4,
240 .len = 4,
241 .veroffs = 20,
242 .maxblocks = 4,
243 .pattern = bbt_pattern
244};
245
246static struct nand_bbt_descr bbt_mirror_descr = {
247 .options = NAND_BBT_LASTBLOCK | NAND_BBT_CREATE | NAND_BBT_WRITE |
248 NAND_BBT_2BIT | NAND_BBT_VERSION | NAND_BBT_PERCHIP,
249 .offs = 4,
250 .len = 4,
251 .veroffs = 20,
252 .maxblocks = 4,
253 .pattern = mirror_pattern
254};
255
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530256static u8 buf_data[READ_BUFF_SIZE];
257static u32 buf_index;
258
259static struct nand_ecclayout nand_oob;
260
261static struct nand_chip nand_chip[CONFIG_SYS_MAX_NAND_DEVICE];
262
263static void arasan_nand_select_chip(struct mtd_info *mtd, int chip)
264{
265}
266
267static void arasan_nand_enable_ecc(void)
268{
269 u32 reg_val;
270
271 reg_val = readl(&arasan_nand_base->cmd_reg);
272 reg_val |= ARASAN_NAND_CMD_ECC_ON_MASK;
273
274 writel(reg_val, &arasan_nand_base->cmd_reg);
275}
276
277static u8 arasan_nand_get_addrcycle(struct mtd_info *mtd)
278{
279 u8 addrcycles;
Scott Wood17fed142016-05-30 13:57:56 -0500280 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530281
282 switch (curr_cmd->addr_cycles) {
283 case NAND_ADDR_CYCL_NONE:
284 addrcycles = 0;
285 break;
286 case NAND_ADDR_CYCL_ONE:
287 addrcycles = 1;
288 break;
289 case NAND_ADDR_CYCL_ROW:
290 addrcycles = chip->onfi_params.addr_cycles &
291 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
292 break;
293 case NAND_ADDR_CYCL_COL:
294 addrcycles = (chip->onfi_params.addr_cycles &
295 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
296 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
297 break;
298 case NAND_ADDR_CYCL_BOTH:
299 addrcycles = chip->onfi_params.addr_cycles &
300 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
301 addrcycles += (chip->onfi_params.addr_cycles &
302 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
303 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
304 break;
305 default:
306 addrcycles = ARASAN_NAND_INVALID_ADDR_CYCL;
307 break;
308 }
309 return addrcycles;
310}
311
312static int arasan_nand_read_page(struct mtd_info *mtd, u8 *buf, u32 size)
313{
Scott Wood17fed142016-05-30 13:57:56 -0500314 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530315 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530316 u32 reg_val, i, pktsize, pktnum;
317 u32 *bufptr = (u32 *)buf;
318 u32 timeout;
319 u32 rdcount = 0;
320 u8 addr_cycles;
321
322 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
323 pktsize = ARASAN_NAND_PKTSIZE_1K;
324 else
325 pktsize = ARASAN_NAND_PKTSIZE_512;
326
327 if (size % pktsize)
328 pktnum = size/pktsize + 1;
329 else
330 pktnum = size/pktsize;
331
332 reg_val = readl(&arasan_nand_base->intsts_enr);
333 reg_val |= ARASAN_NAND_INT_STS_ERR_EN_MASK |
334 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK;
335 writel(reg_val, &arasan_nand_base->intsts_enr);
336
337 reg_val = readl(&arasan_nand_base->pkt_reg);
338 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
339 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
340 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) |
341 pktsize;
342 writel(reg_val, &arasan_nand_base->pkt_reg);
343
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530344 if (!nand->on_die_ecc_enabled) {
345 arasan_nand_enable_ecc();
346 addr_cycles = arasan_nand_get_addrcycle(mtd);
347 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
348 return ERR_ADDR_CYCLE;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530349
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530350 writel((NAND_CMD_RNDOUTSTART << ARASAN_NAND_CMD_CMD2_SHIFT) |
351 NAND_CMD_RNDOUT | (addr_cycles <<
352 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT),
353 &arasan_nand_base->ecc_sprcmd_reg);
354 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530355 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
356
357 while (rdcount < pktnum) {
358 timeout = ARASAN_NAND_POLL_TIMEOUT;
359 while (!(readl(&arasan_nand_base->intsts_reg) &
360 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
361 udelay(1);
362 timeout--;
363 }
364 if (!timeout) {
365 puts("arasan_read_page: timedout:Buff RDY\n");
366 return -ETIMEDOUT;
367 }
368
369 rdcount++;
370
371 if (pktnum == rdcount) {
372 reg_val = readl(&arasan_nand_base->intsts_enr);
373 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
374 writel(reg_val, &arasan_nand_base->intsts_enr);
375 } else {
376 reg_val = readl(&arasan_nand_base->intsts_enr);
377 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
378 &arasan_nand_base->intsts_enr);
379 }
380 reg_val = readl(&arasan_nand_base->intsts_reg);
381 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
382 &arasan_nand_base->intsts_reg);
383
384 for (i = 0; i < pktsize/4; i++)
385 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
386
387
388 bufptr += pktsize/4;
389
390 if (rdcount >= pktnum)
391 break;
392
393 writel(ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
394 &arasan_nand_base->intsts_enr);
395 }
396
397 timeout = ARASAN_NAND_POLL_TIMEOUT;
398
399 while (!(readl(&arasan_nand_base->intsts_reg) &
400 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
401 udelay(1);
402 timeout--;
403 }
404 if (!timeout) {
405 puts("arasan rd_page timedout:Xfer CMPLT\n");
406 return -ETIMEDOUT;
407 }
408
409 reg_val = readl(&arasan_nand_base->intsts_enr);
410 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
411 &arasan_nand_base->intsts_enr);
412 reg_val = readl(&arasan_nand_base->intsts_reg);
413 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
414 &arasan_nand_base->intsts_reg);
415
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530416 if (!nand->on_die_ecc_enabled) {
417 if (readl(&arasan_nand_base->intsts_reg) &
418 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK) {
419 printf("arasan rd_page:sbiterror\n");
420 return -1;
421 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530422
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530423 if (readl(&arasan_nand_base->intsts_reg) &
424 ARASAN_NAND_INT_STS_ERR_EN_MASK) {
425 mtd->ecc_stats.failed++;
426 printf("arasan rd_page:multibiterror\n");
427 return -1;
428 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530429 }
430
431 return 0;
432}
433
434static int arasan_nand_read_page_hwecc(struct mtd_info *mtd,
435 struct nand_chip *chip, u8 *buf, int oob_required, int page)
436{
437 int status;
438
439 status = arasan_nand_read_page(mtd, buf, (mtd->writesize));
440
441 if (oob_required)
442 chip->ecc.read_oob(mtd, chip, page);
443
444 return status;
445}
446
447static void arasan_nand_fill_tx(const u8 *buf, int len)
448{
449 u32 __iomem *nand = &arasan_nand_base->buf_dataport;
450
451 if (((unsigned long)buf & 0x3) != 0) {
452 if (((unsigned long)buf & 0x1) != 0) {
453 if (len) {
454 writeb(*buf, nand);
455 buf += 1;
456 len--;
457 }
458 }
459
460 if (((unsigned long)buf & 0x3) != 0) {
461 if (len >= 2) {
462 writew(*(u16 *)buf, nand);
463 buf += 2;
464 len -= 2;
465 }
466 }
467 }
468
469 while (len >= 4) {
470 writel(*(u32 *)buf, nand);
471 buf += 4;
472 len -= 4;
473 }
474
475 if (len) {
476 if (len >= 2) {
477 writew(*(u16 *)buf, nand);
478 buf += 2;
479 len -= 2;
480 }
481
482 if (len)
483 writeb(*buf, nand);
484 }
485}
486
487static int arasan_nand_write_page_hwecc(struct mtd_info *mtd,
Scott Wood46e13102016-05-30 13:57:57 -0500488 struct nand_chip *chip, const u8 *buf, int oob_required,
489 int page)
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530490{
491 u32 reg_val, i, pktsize, pktnum;
492 const u32 *bufptr = (const u32 *)buf;
493 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
494 u32 size = mtd->writesize;
495 u32 rdcount = 0;
496 u8 column_addr_cycles;
Scott Wood17fed142016-05-30 13:57:56 -0500497 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530498
499 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
500 pktsize = ARASAN_NAND_PKTSIZE_1K;
501 else
502 pktsize = ARASAN_NAND_PKTSIZE_512;
503
504 if (size % pktsize)
505 pktnum = size/pktsize + 1;
506 else
507 pktnum = size/pktsize;
508
509 reg_val = readl(&arasan_nand_base->pkt_reg);
510 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
511 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
512 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | pktsize;
513 writel(reg_val, &arasan_nand_base->pkt_reg);
514
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530515 if (!nand->on_die_ecc_enabled) {
516 arasan_nand_enable_ecc();
517 column_addr_cycles = (chip->onfi_params.addr_cycles &
518 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
519 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
520 writel((NAND_CMD_RNDIN | (column_addr_cycles << 28)),
521 &arasan_nand_base->ecc_sprcmd_reg);
522 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530523 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
524
525 while (rdcount < pktnum) {
526 timeout = ARASAN_NAND_POLL_TIMEOUT;
527 while (!(readl(&arasan_nand_base->intsts_reg) &
528 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
529 udelay(1);
530 timeout--;
531 }
532
533 if (!timeout) {
534 puts("arasan_write_page: timedout:Buff RDY\n");
535 return -ETIMEDOUT;
536 }
537
538 rdcount++;
539
540 if (pktnum == rdcount) {
541 reg_val = readl(&arasan_nand_base->intsts_enr);
542 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
543 writel(reg_val, &arasan_nand_base->intsts_enr);
544 } else {
545 reg_val = readl(&arasan_nand_base->intsts_enr);
546 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
547 &arasan_nand_base->intsts_enr);
548 }
549
550 reg_val = readl(&arasan_nand_base->intsts_reg);
551 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
552 &arasan_nand_base->intsts_reg);
553
554 for (i = 0; i < pktsize/4; i++)
555 writel(bufptr[i], &arasan_nand_base->buf_dataport);
556
557 bufptr += pktsize/4;
558
559 if (rdcount >= pktnum)
560 break;
561
562 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
563 &arasan_nand_base->intsts_enr);
564 }
565
566 timeout = ARASAN_NAND_POLL_TIMEOUT;
567
568 while (!(readl(&arasan_nand_base->intsts_reg) &
569 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
570 udelay(1);
571 timeout--;
572 }
573 if (!timeout) {
574 puts("arasan write_page timedout:Xfer CMPLT\n");
575 return -ETIMEDOUT;
576 }
577
578 reg_val = readl(&arasan_nand_base->intsts_enr);
579 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
580 &arasan_nand_base->intsts_enr);
581 reg_val = readl(&arasan_nand_base->intsts_reg);
582 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
583 &arasan_nand_base->intsts_reg);
584
585 if (oob_required)
586 chip->ecc.write_oob(mtd, chip, nand->page);
587
588 return 0;
589}
590
591static int arasan_nand_read_oob(struct mtd_info *mtd, struct nand_chip *chip,
592 int page)
593{
594 chip->cmdfunc(mtd, NAND_CMD_READOOB, 0, page);
595 chip->read_buf(mtd, chip->oob_poi, (mtd->oobsize));
596
597 return 0;
598}
599
600static int arasan_nand_write_oob(struct mtd_info *mtd, struct nand_chip *chip,
601 int page)
602{
603 int status = 0;
604 const u8 *buf = chip->oob_poi;
605
606 chip->cmdfunc(mtd, NAND_CMD_SEQIN, mtd->writesize, page);
607 chip->write_buf(mtd, buf, mtd->oobsize);
608
609 return status;
610}
611
612static int arasan_nand_reset(struct arasan_nand_command_format *curr_cmd)
613{
614 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
615 u32 cmd_reg = 0;
616
617 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
618 &arasan_nand_base->intsts_enr);
619 cmd_reg = readl(&arasan_nand_base->cmd_reg);
620 cmd_reg &= ~ARASAN_NAND_CMD_CMD12_MASK;
621
622 cmd_reg |= curr_cmd->cmd1 |
623 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
624 writel(cmd_reg, &arasan_nand_base->cmd_reg);
625 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
626
627 while (!(readl(&arasan_nand_base->intsts_reg) &
628 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
629 udelay(1);
630 timeout--;
631 }
632 if (!timeout) {
633 printf("ERROR:%s timedout\n", __func__);
634 return -ETIMEDOUT;
635 }
636
637 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
638 &arasan_nand_base->intsts_enr);
639
640 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
641 &arasan_nand_base->intsts_reg);
642
643 return 0;
644}
645
646static u8 arasan_nand_page(struct mtd_info *mtd)
647{
648 u8 page_val = 0;
649
650 switch (mtd->writesize) {
651 case 512:
652 page_val = 0;
653 break;
654 case 2048:
655 page_val = 1;
656 break;
657 case 4096:
658 page_val = 2;
659 break;
660 case 8192:
661 page_val = 3;
662 break;
663 case 16384:
664 page_val = 4;
665 break;
666 case 1024:
667 page_val = 5;
668 break;
669 default:
670 printf("%s:Pagesize>16K\n", __func__);
671 break;
672 }
673
674 return page_val;
675}
676
677static int arasan_nand_send_wrcmd(struct arasan_nand_command_format *curr_cmd,
678 int column, int page_addr, struct mtd_info *mtd)
679{
680 u32 reg_val, page;
681 u8 page_val, addr_cycles;
682
683 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
684 &arasan_nand_base->intsts_enr);
685 reg_val = readl(&arasan_nand_base->cmd_reg);
686 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
687 reg_val |= curr_cmd->cmd1 |
688 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
689 if (curr_cmd->cmd1 == NAND_CMD_SEQIN) {
690 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
691 page_val = arasan_nand_page(mtd);
692 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
693 }
694
695 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
696 addr_cycles = arasan_nand_get_addrcycle(mtd);
697
698 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
699 return ERR_ADDR_CYCLE;
700
701 reg_val |= (addr_cycles <<
702 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
703 writel(reg_val, &arasan_nand_base->cmd_reg);
704
705 if (page_addr == -1)
706 page_addr = 0;
707
708 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
709 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
710 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
711 writel(page|column, &arasan_nand_base->memadr_reg1);
712
713 reg_val = readl(&arasan_nand_base->memadr_reg2);
714 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
715 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
716 writel(reg_val, &arasan_nand_base->memadr_reg2);
717 reg_val = readl(&arasan_nand_base->memadr_reg2);
718 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
719 writel(reg_val, &arasan_nand_base->memadr_reg2);
720
721 return 0;
722}
723
724static void arasan_nand_write_buf(struct mtd_info *mtd, const u8 *buf, int len)
725{
726 u32 reg_val;
727 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
728
729 reg_val = readl(&arasan_nand_base->pkt_reg);
730 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
731 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
732
733 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | len;
734 writel(reg_val, &arasan_nand_base->pkt_reg);
735 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
736
737 while (!(readl(&arasan_nand_base->intsts_reg) &
738 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
739 udelay(1);
740 timeout--;
741 }
742
743 if (!timeout)
744 puts("ERROR:arasan_nand_write_buf timedout:Buff RDY\n");
745
746 reg_val = readl(&arasan_nand_base->intsts_enr);
747 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
748 writel(reg_val, &arasan_nand_base->intsts_enr);
749 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
750 &arasan_nand_base->intsts_enr);
751 reg_val = readl(&arasan_nand_base->intsts_reg);
752 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
753 &arasan_nand_base->intsts_reg);
754
755 arasan_nand_fill_tx(buf, len);
756
757 timeout = ARASAN_NAND_POLL_TIMEOUT;
758 while (!(readl(&arasan_nand_base->intsts_reg) &
759 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
760 udelay(1);
761 timeout--;
762 }
763 if (!timeout)
764 puts("ERROR:arasan_nand_write_buf timedout:Xfer CMPLT\n");
765
766 writel(readl(&arasan_nand_base->intsts_enr) |
767 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
768 &arasan_nand_base->intsts_enr);
769 writel(readl(&arasan_nand_base->intsts_reg) |
770 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
771 &arasan_nand_base->intsts_reg);
772}
773
774static int arasan_nand_erase(struct arasan_nand_command_format *curr_cmd,
775 int column, int page_addr, struct mtd_info *mtd)
776{
777 u32 reg_val, page;
778 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
779 u8 row_addr_cycles;
780
781 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
782 &arasan_nand_base->intsts_enr);
783 reg_val = readl(&arasan_nand_base->cmd_reg);
784 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
785 reg_val |= curr_cmd->cmd1 |
786 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
787 row_addr_cycles = arasan_nand_get_addrcycle(mtd);
788
789 if (row_addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
790 return ERR_ADDR_CYCLE;
791
792 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
793 reg_val |= (row_addr_cycles <<
794 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
795
796 writel(reg_val, &arasan_nand_base->cmd_reg);
797
798 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
799 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
800 column = page_addr & ARASAN_NAND_MEM_ADDR1_COL_MASK;
801 writel(page | column, &arasan_nand_base->memadr_reg1);
802
803 reg_val = readl(&arasan_nand_base->memadr_reg2);
804 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
805 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
806 writel(reg_val, &arasan_nand_base->memadr_reg2);
807 reg_val = readl(&arasan_nand_base->memadr_reg2);
808 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
809 writel(reg_val, &arasan_nand_base->memadr_reg2);
810 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
811
812 while (!(readl(&arasan_nand_base->intsts_reg) &
813 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
814 udelay(1);
815 timeout--;
816 }
817 if (!timeout) {
818 printf("ERROR:%s timedout:Xfer CMPLT\n", __func__);
819 return -ETIMEDOUT;
820 }
821
822 reg_val = readl(&arasan_nand_base->intsts_enr);
823 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
824 &arasan_nand_base->intsts_enr);
825 reg_val = readl(&arasan_nand_base->intsts_reg);
826 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
827 &arasan_nand_base->intsts_reg);
828
829 return 0;
830}
831
832static int arasan_nand_read_status(struct arasan_nand_command_format *curr_cmd,
833 int column, int page_addr, struct mtd_info *mtd)
834{
835 u32 reg_val;
836 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
837 u8 addr_cycles;
838
839 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
840 &arasan_nand_base->intsts_enr);
841 reg_val = readl(&arasan_nand_base->cmd_reg);
842 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
843 reg_val |= curr_cmd->cmd1 |
844 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
845 addr_cycles = arasan_nand_get_addrcycle(mtd);
846
847 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
848 return ERR_ADDR_CYCLE;
849
850 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
851 reg_val |= (addr_cycles <<
852 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
853
854 writel(reg_val, &arasan_nand_base->cmd_reg);
855
856 reg_val = readl(&arasan_nand_base->pkt_reg);
857 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
858 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
859 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | 1;
860 writel(reg_val, &arasan_nand_base->pkt_reg);
861
862 reg_val = readl(&arasan_nand_base->memadr_reg2);
863 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
864 writel(reg_val, &arasan_nand_base->memadr_reg2);
865
866 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
867 while (!(readl(&arasan_nand_base->intsts_reg) &
868 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
869 udelay(1);
870 timeout--;
871 }
872
873 if (!timeout) {
874 printf("ERROR:%s: timedout:Xfer CMPLT\n", __func__);
875 return -ETIMEDOUT;
876 }
877
878 reg_val = readl(&arasan_nand_base->intsts_enr);
879 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
880 &arasan_nand_base->intsts_enr);
881 reg_val = readl(&arasan_nand_base->intsts_reg);
882 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
883 &arasan_nand_base->intsts_reg);
884
885 return 0;
886}
887
888static int arasan_nand_send_rdcmd(struct arasan_nand_command_format *curr_cmd,
889 int column, int page_addr, struct mtd_info *mtd)
890{
891 u32 reg_val, addr_cycles, page;
892 u8 page_val;
893
894 reg_val = readl(&arasan_nand_base->intsts_enr);
895 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
896 &arasan_nand_base->intsts_enr);
897
898 reg_val = readl(&arasan_nand_base->cmd_reg);
899 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
900 reg_val |= curr_cmd->cmd1 |
901 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
902
903 if (curr_cmd->cmd1 == NAND_CMD_RNDOUT ||
904 curr_cmd->cmd1 == NAND_CMD_READ0) {
905 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
906 page_val = arasan_nand_page(mtd);
907 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
908 }
909
Siva Durga Prasad Paladugu99459c22016-08-25 16:00:04 +0530910 reg_val &= ~ARASAN_NAND_CMD_ECC_ON_MASK;
911
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530912 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
913
914 addr_cycles = arasan_nand_get_addrcycle(mtd);
915
916 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
917 return ERR_ADDR_CYCLE;
918
919 reg_val |= (addr_cycles << 28);
920 writel(reg_val, &arasan_nand_base->cmd_reg);
921
922 if (page_addr == -1)
923 page_addr = 0;
924
925 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
926 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
927 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
928 writel(page | column, &arasan_nand_base->memadr_reg1);
929
930 reg_val = readl(&arasan_nand_base->memadr_reg2);
931 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
932 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
933 writel(reg_val, &arasan_nand_base->memadr_reg2);
934
935 reg_val = readl(&arasan_nand_base->memadr_reg2);
936 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
937 writel(reg_val, &arasan_nand_base->memadr_reg2);
938 buf_index = 0;
939
940 return 0;
941}
942
943static void arasan_nand_read_buf(struct mtd_info *mtd, u8 *buf, int size)
944{
945 u32 reg_val, i;
946 u32 *bufptr = (u32 *)buf;
947 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
948
949 reg_val = readl(&arasan_nand_base->pkt_reg);
950 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
951 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
952 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | size;
953 writel(reg_val, &arasan_nand_base->pkt_reg);
954
955 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
956
957 while (!(readl(&arasan_nand_base->intsts_reg) &
958 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
959 udelay(1);
960 timeout--;
961 }
962
963 if (!timeout)
964 puts("ERROR:arasan_nand_read_buf timedout:Buff RDY\n");
965
966 reg_val = readl(&arasan_nand_base->intsts_enr);
967 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
968 writel(reg_val, &arasan_nand_base->intsts_enr);
969
970 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
971 &arasan_nand_base->intsts_enr);
972 reg_val = readl(&arasan_nand_base->intsts_reg);
973 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
974 &arasan_nand_base->intsts_reg);
975
976 buf_index = 0;
977 for (i = 0; i < size / 4; i++)
978 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
979
980 if (size & 0x03)
981 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
982
983 timeout = ARASAN_NAND_POLL_TIMEOUT;
984
985 while (!(readl(&arasan_nand_base->intsts_reg) &
986 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
987 udelay(1);
988 timeout--;
989 }
990
991 if (!timeout)
992 puts("ERROR:arasan_nand_read_buf timedout:Xfer CMPLT\n");
993
994 reg_val = readl(&arasan_nand_base->intsts_enr);
995 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
996 &arasan_nand_base->intsts_enr);
997 reg_val = readl(&arasan_nand_base->intsts_reg);
998 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
999 &arasan_nand_base->intsts_reg);
1000}
1001
1002static u8 arasan_nand_read_byte(struct mtd_info *mtd)
1003{
Scott Wood17fed142016-05-30 13:57:56 -05001004 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301005 u32 size;
1006 u8 val;
1007 struct nand_onfi_params *p;
1008
1009 if (buf_index == 0) {
1010 p = &chip->onfi_params;
1011 if (curr_cmd->cmd1 == NAND_CMD_READID)
1012 size = 4;
1013 else if (curr_cmd->cmd1 == NAND_CMD_PARAM)
1014 size = sizeof(struct nand_onfi_params);
1015 else if (curr_cmd->cmd1 == NAND_CMD_RNDOUT)
1016 size = le16_to_cpu(p->ext_param_page_length) * 16;
1017 else if (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES)
1018 size = 4;
1019 else if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1020 return readb(&arasan_nand_base->flash_sts_reg);
1021 else
1022 size = 8;
1023 chip->read_buf(mtd, &buf_data[0], size);
1024 }
1025
1026 val = *(&buf_data[0] + buf_index);
1027 buf_index++;
1028
1029 return val;
1030}
1031
1032static void arasan_nand_cmd_function(struct mtd_info *mtd, unsigned int command,
1033 int column, int page_addr)
1034{
1035 u32 i, ret = 0;
Scott Wood17fed142016-05-30 13:57:56 -05001036 struct nand_chip *chip = mtd_to_nand(mtd);
1037 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301038
1039 curr_cmd = NULL;
1040 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
1041 &arasan_nand_base->intsts_enr);
1042
1043 if ((command == NAND_CMD_READOOB) &&
1044 (mtd->writesize > 512)) {
1045 column += mtd->writesize;
1046 command = NAND_CMD_READ0;
1047 }
1048
1049 /* Get the command format */
1050 for (i = 0; (arasan_nand_commands[i].cmd1 != NAND_CMD_NONE ||
1051 arasan_nand_commands[i].cmd2 != NAND_CMD_NONE); i++) {
1052 if (command == arasan_nand_commands[i].cmd1) {
1053 curr_cmd = &arasan_nand_commands[i];
1054 break;
1055 }
1056 }
1057
1058 if (curr_cmd == NULL) {
1059 printf("Unsupported Command; 0x%x\n", command);
1060 return;
1061 }
1062
1063 if (curr_cmd->cmd1 == NAND_CMD_RESET)
1064 ret = arasan_nand_reset(curr_cmd);
1065
1066 if ((curr_cmd->cmd1 == NAND_CMD_READID) ||
1067 (curr_cmd->cmd1 == NAND_CMD_PARAM) ||
1068 (curr_cmd->cmd1 == NAND_CMD_RNDOUT) ||
1069 (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES) ||
1070 (curr_cmd->cmd1 == NAND_CMD_READ0))
1071 ret = arasan_nand_send_rdcmd(curr_cmd, column, page_addr, mtd);
1072
1073 if ((curr_cmd->cmd1 == NAND_CMD_SET_FEATURES) ||
1074 (curr_cmd->cmd1 == NAND_CMD_SEQIN)) {
1075 nand->page = page_addr;
1076 ret = arasan_nand_send_wrcmd(curr_cmd, column, page_addr, mtd);
1077 }
1078
1079 if (curr_cmd->cmd1 == NAND_CMD_ERASE1)
1080 ret = arasan_nand_erase(curr_cmd, column, page_addr, mtd);
1081
1082 if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1083 ret = arasan_nand_read_status(curr_cmd, column, page_addr, mtd);
1084
1085 if (ret != 0)
1086 printf("ERROR:%s:command:0x%x\n", __func__, curr_cmd->cmd1);
1087}
1088
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +05301089static void arasan_check_ondie(struct mtd_info *mtd)
1090{
1091 struct nand_chip *nand_chip = mtd_to_nand(mtd);
1092 struct arasan_nand_info *nand = nand_get_controller_data(nand_chip);
1093 u8 maf_id, dev_id;
1094 u8 get_feature[4];
1095 u8 set_feature[4] = {ENABLE_ONDIE_ECC, 0x00, 0x00, 0x00};
1096 u32 i;
1097
1098 /* Send the command for reading device ID */
1099 nand_chip->cmdfunc(mtd, NAND_CMD_RESET, -1, -1);
1100 nand_chip->cmdfunc(mtd, NAND_CMD_READID, 0, -1);
1101
1102 /* Read manufacturer and device IDs */
1103 maf_id = nand_chip->read_byte(mtd);
1104 dev_id = nand_chip->read_byte(mtd);
1105
1106 if ((maf_id == NAND_MFR_MICRON) &&
1107 ((dev_id == 0xf1) || (dev_id == 0xa1) || (dev_id == 0xb1) ||
1108 (dev_id == 0xaa) || (dev_id == 0xba) || (dev_id == 0xda) ||
1109 (dev_id == 0xca) || (dev_id == 0xac) || (dev_id == 0xbc) ||
1110 (dev_id == 0xdc) || (dev_id == 0xcc) || (dev_id == 0xa3) ||
1111 (dev_id == 0xb3) || (dev_id == 0xd3) || (dev_id == 0xc3))) {
1112 nand_chip->cmdfunc(mtd, NAND_CMD_SET_FEATURES,
1113 ONDIE_ECC_FEATURE_ADDR, -1);
1114
1115 nand_chip->write_buf(mtd, &set_feature[0], 4);
1116 nand_chip->cmdfunc(mtd, NAND_CMD_GET_FEATURES,
1117 ONDIE_ECC_FEATURE_ADDR, -1);
1118
1119 for (i = 0; i < 4; i++)
1120 get_feature[i] = nand_chip->read_byte(mtd);
1121
1122 if (get_feature[0] & ENABLE_ONDIE_ECC)
1123 nand->on_die_ecc_enabled = true;
1124 else
1125 printf("%s: Unable to enable OnDie ECC\n", __func__);
1126
1127 /* Use the BBT pattern descriptors */
1128 nand_chip->bbt_td = &bbt_main_descr;
1129 nand_chip->bbt_md = &bbt_mirror_descr;
1130 }
1131}
1132
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301133static int arasan_nand_ecc_init(struct mtd_info *mtd)
1134{
1135 int found = -1;
Siva Durga Prasad Paladugub0a4f132018-01-04 16:04:22 +05301136 u32 regval, eccpos_start, i, eccaddr;
Scott Wood17fed142016-05-30 13:57:56 -05001137 struct nand_chip *nand_chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301138
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301139 for (i = 0; i < ARRAY_SIZE(ecc_matrix); i++) {
1140 if ((ecc_matrix[i].pagesize == mtd->writesize) &&
1141 (ecc_matrix[i].ecc_codeword_size >=
1142 nand_chip->ecc_step_ds)) {
1143 if (ecc_matrix[i].eccbits >=
1144 nand_chip->ecc_strength_ds) {
1145 found = i;
1146 break;
1147 }
1148 found = i;
1149 }
1150 }
1151
1152 if (found < 0)
1153 return 1;
1154
Siva Durga Prasad Paladugub0a4f132018-01-04 16:04:22 +05301155 eccaddr = mtd->writesize + mtd->oobsize -
1156 ecc_matrix[found].eccsize;
1157
1158 regval = eccaddr |
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301159 (ecc_matrix[found].eccsize << ARASAN_NAND_ECC_SIZE_SHIFT) |
1160 (ecc_matrix[found].bch << ARASAN_NAND_ECC_BCH_SHIFT);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301161 writel(regval, &arasan_nand_base->ecc_reg);
1162
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301163 if (ecc_matrix[found].bch) {
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301164 regval = readl(&arasan_nand_base->memadr_reg2);
1165 regval &= ~ARASAN_NAND_MEM_ADDR2_BCH_MASK;
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301166 regval |= (ecc_matrix[found].bchval <<
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301167 ARASAN_NAND_MEM_ADDR2_BCH_SHIFT);
1168 writel(regval, &arasan_nand_base->memadr_reg2);
1169 }
1170
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301171 nand_oob.eccbytes = ecc_matrix[found].eccsize;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301172 eccpos_start = mtd->oobsize - nand_oob.eccbytes;
1173
1174 for (i = 0; i < nand_oob.eccbytes; i++)
1175 nand_oob.eccpos[i] = eccpos_start + i;
1176
1177 nand_oob.oobfree[0].offset = 2;
1178 nand_oob.oobfree[0].length = eccpos_start - 2;
1179
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301180 nand_chip->ecc.size = ecc_matrix[found].ecc_codeword_size;
1181 nand_chip->ecc.strength = ecc_matrix[found].eccbits;
1182 nand_chip->ecc.bytes = ecc_matrix[found].eccsize;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301183 nand_chip->ecc.layout = &nand_oob;
1184
1185 return 0;
1186}
1187
1188static int arasan_nand_init(struct nand_chip *nand_chip, int devnum)
1189{
1190 struct arasan_nand_info *nand;
1191 struct mtd_info *mtd;
1192 int err = -1;
1193
1194 nand = calloc(1, sizeof(struct arasan_nand_info));
1195 if (!nand) {
1196 printf("%s: failed to allocate\n", __func__);
1197 return err;
1198 }
1199
1200 nand->nand_base = arasan_nand_base;
Scott Wood17fed142016-05-30 13:57:56 -05001201 mtd = nand_to_mtd(nand_chip);
1202 nand_set_controller_data(nand_chip, nand);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301203
1204 /* Set the driver entry points for MTD */
1205 nand_chip->cmdfunc = arasan_nand_cmd_function;
1206 nand_chip->select_chip = arasan_nand_select_chip;
1207 nand_chip->read_byte = arasan_nand_read_byte;
1208
1209 /* Buffer read/write routines */
1210 nand_chip->read_buf = arasan_nand_read_buf;
1211 nand_chip->write_buf = arasan_nand_write_buf;
1212 nand_chip->bbt_options = NAND_BBT_USE_FLASH;
1213
1214 writel(0x0, &arasan_nand_base->cmd_reg);
1215 writel(0x0, &arasan_nand_base->pgm_reg);
1216
1217 /* first scan to find the device and get the page size */
1218 if (nand_scan_ident(mtd, 1, NULL)) {
1219 printf("%s: nand_scan_ident failed\n", __func__);
1220 goto fail;
1221 }
1222
Siva Durga Prasad Paladugu80c889c2018-01-04 16:04:20 +05301223 nand_chip->ecc.mode = NAND_ECC_HW;
1224 nand_chip->ecc.hwctl = NULL;
1225 nand_chip->ecc.read_page = arasan_nand_read_page_hwecc;
1226 nand_chip->ecc.write_page = arasan_nand_write_page_hwecc;
1227 nand_chip->ecc.read_oob = arasan_nand_read_oob;
1228 nand_chip->ecc.write_oob = arasan_nand_write_oob;
1229
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +05301230 arasan_check_ondie(mtd);
1231
1232 /*
1233 * If on die supported, then give priority to on-die ecc and use
1234 * it instead of controller ecc.
1235 */
1236 if (nand->on_die_ecc_enabled) {
1237 nand_chip->ecc.strength = 1;
1238 nand_chip->ecc.size = mtd->writesize;
1239 nand_chip->ecc.bytes = 0;
1240 nand_chip->ecc.layout = &ondie_nand_oob_64;
1241 } else {
1242 if (arasan_nand_ecc_init(mtd)) {
1243 printf("%s: nand_ecc_init failed\n", __func__);
1244 goto fail;
1245 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301246 }
1247
1248 if (nand_scan_tail(mtd)) {
1249 printf("%s: nand_scan_tail failed\n", __func__);
1250 goto fail;
1251 }
1252
Scott Wood2c1b7e12016-05-30 13:57:55 -05001253 if (nand_register(devnum, mtd)) {
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301254 printf("Nand Register Fail\n");
1255 goto fail;
1256 }
1257
1258 return 0;
1259fail:
1260 free(nand);
1261 return err;
1262}
1263
1264void board_nand_init(void)
1265{
1266 struct nand_chip *nand = &nand_chip[0];
1267
1268 if (arasan_nand_init(nand, 0))
1269 puts("NAND init failed\n");
1270}