blob: 3be66efb73f65793c30e1b8625c32959645ec637 [file] [log] [blame]
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301/*
2 * Arasan NAND Flash Controller Driver
3 *
4 * Copyright (C) 2014 - 2015 Xilinx, Inc.
5 *
6 * SPDX-License-Identifier: GPL-2.0+
7 */
8
9#include <common.h>
10#include <malloc.h>
11#include <asm/io.h>
Masahiro Yamada56a931c2016-09-21 11:28:55 +090012#include <linux/errno.h>
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053013#include <linux/mtd/mtd.h>
Masahiro Yamada2b7a8732017-11-30 13:45:24 +090014#include <linux/mtd/rawnand.h>
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053015#include <linux/mtd/partitions.h>
16#include <linux/mtd/nand_ecc.h>
17#include <asm/arch/hardware.h>
18#include <asm/arch/sys_proto.h>
19#include <nand.h>
20
21struct arasan_nand_info {
22 void __iomem *nand_base;
23 u32 page;
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +053024 bool on_die_ecc_enabled;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053025};
26
27struct nand_regs {
28 u32 pkt_reg;
29 u32 memadr_reg1;
30 u32 memadr_reg2;
31 u32 cmd_reg;
32 u32 pgm_reg;
33 u32 intsts_enr;
34 u32 intsig_enr;
35 u32 intsts_reg;
36 u32 rdy_busy;
37 u32 cms_sysadr_reg;
38 u32 flash_sts_reg;
39 u32 tmg_reg;
40 u32 buf_dataport;
41 u32 ecc_reg;
42 u32 ecc_errcnt_reg;
43 u32 ecc_sprcmd_reg;
44 u32 errcnt_1bitreg;
45 u32 errcnt_2bitreg;
46 u32 errcnt_3bitreg;
47 u32 errcnt_4bitreg;
48 u32 dma_sysadr0_reg;
49 u32 dma_bufbdry_reg;
50 u32 cpu_rls_reg;
51 u32 errcnt_5bitreg;
52 u32 errcnt_6bitreg;
53 u32 errcnt_7bitreg;
54 u32 errcnt_8bitreg;
55 u32 data_if_reg;
56};
57
58#define arasan_nand_base ((struct nand_regs __iomem *)ARASAN_NAND_BASEADDR)
59
60struct arasan_nand_command_format {
61 u8 cmd1;
62 u8 cmd2;
63 u8 addr_cycles;
64 u32 pgm;
65};
66
67#define ONDIE_ECC_FEATURE_ADDR 0x90
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +053068#define ENABLE_ONDIE_ECC 0x08
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053069
70#define ARASAN_PROG_RD_MASK 0x00000001
71#define ARASAN_PROG_BLK_ERS_MASK 0x00000004
72#define ARASAN_PROG_RD_ID_MASK 0x00000040
73#define ARASAN_PROG_RD_STS_MASK 0x00000008
74#define ARASAN_PROG_PG_PROG_MASK 0x00000010
75#define ARASAN_PROG_RD_PARAM_PG_MASK 0x00000080
76#define ARASAN_PROG_RST_MASK 0x00000100
77#define ARASAN_PROG_GET_FTRS_MASK 0x00000200
78#define ARASAN_PROG_SET_FTRS_MASK 0x00000400
79#define ARASAN_PROG_CHNG_ROWADR_END_MASK 0x00400000
80
81#define ARASAN_NAND_CMD_ECC_ON_MASK 0x80000000
82#define ARASAN_NAND_CMD_CMD12_MASK 0xFFFF
83#define ARASAN_NAND_CMD_PG_SIZE_MASK 0x3800000
84#define ARASAN_NAND_CMD_PG_SIZE_SHIFT 23
85#define ARASAN_NAND_CMD_CMD2_SHIFT 8
86#define ARASAN_NAND_CMD_ADDR_CYCL_MASK 0x70000000
87#define ARASAN_NAND_CMD_ADDR_CYCL_SHIFT 28
88
Vipul Kumar9d9b99b2018-03-10 17:52:23 +053089#define ARASAN_NAND_MEM_ADDR1_PAGE_MASK 0xFFFF0000
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +053090#define ARASAN_NAND_MEM_ADDR1_COL_MASK 0xFFFF
91#define ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT 16
92#define ARASAN_NAND_MEM_ADDR2_PAGE_MASK 0xFF
93#define ARASAN_NAND_MEM_ADDR2_CS_MASK 0xC0000000
94#define ARASAN_NAND_MEM_ADDR2_BCH_MASK 0xE000000
95#define ARASAN_NAND_MEM_ADDR2_BCH_SHIFT 25
96
97#define ARASAN_NAND_INT_STS_ERR_EN_MASK 0x10
98#define ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK 0x08
99#define ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK 0x02
100#define ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK 0x01
101#define ARASAN_NAND_INT_STS_XFR_CMPLT_MASK 0x04
102
103#define ARASAN_NAND_PKT_REG_PKT_CNT_MASK 0xFFF000
104#define ARASAN_NAND_PKT_REG_PKT_SIZE_MASK 0x7FF
105#define ARASAN_NAND_PKT_REG_PKT_CNT_SHFT 12
106
107#define ARASAN_NAND_ROW_ADDR_CYCL_MASK 0x0F
108#define ARASAN_NAND_COL_ADDR_CYCL_MASK 0xF0
109#define ARASAN_NAND_COL_ADDR_CYCL_SHIFT 4
110
111#define ARASAN_NAND_ECC_SIZE_SHIFT 16
112#define ARASAN_NAND_ECC_BCH_SHIFT 27
113
114#define ARASAN_NAND_PKTSIZE_1K 1024
115#define ARASAN_NAND_PKTSIZE_512 512
116
117#define ARASAN_NAND_POLL_TIMEOUT 1000000
118#define ARASAN_NAND_INVALID_ADDR_CYCL 0xFF
119
120#define ERR_ADDR_CYCLE -1
121#define READ_BUFF_SIZE 0x4000
122
123static struct arasan_nand_command_format *curr_cmd;
124
125enum addr_cycles {
126 NAND_ADDR_CYCL_NONE,
127 NAND_ADDR_CYCL_ONE,
128 NAND_ADDR_CYCL_ROW,
129 NAND_ADDR_CYCL_COL,
130 NAND_ADDR_CYCL_BOTH,
131};
132
133static struct arasan_nand_command_format arasan_nand_commands[] = {
134 {NAND_CMD_READ0, NAND_CMD_READSTART, NAND_ADDR_CYCL_BOTH,
135 ARASAN_PROG_RD_MASK},
136 {NAND_CMD_RNDOUT, NAND_CMD_RNDOUTSTART, NAND_ADDR_CYCL_COL,
137 ARASAN_PROG_RD_MASK},
138 {NAND_CMD_READID, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
139 ARASAN_PROG_RD_ID_MASK},
140 {NAND_CMD_STATUS, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
141 ARASAN_PROG_RD_STS_MASK},
142 {NAND_CMD_SEQIN, NAND_CMD_PAGEPROG, NAND_ADDR_CYCL_BOTH,
143 ARASAN_PROG_PG_PROG_MASK},
144 {NAND_CMD_RNDIN, NAND_CMD_NONE, NAND_ADDR_CYCL_COL,
145 ARASAN_PROG_CHNG_ROWADR_END_MASK},
146 {NAND_CMD_ERASE1, NAND_CMD_ERASE2, NAND_ADDR_CYCL_ROW,
147 ARASAN_PROG_BLK_ERS_MASK},
148 {NAND_CMD_RESET, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE,
149 ARASAN_PROG_RST_MASK},
150 {NAND_CMD_PARAM, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
151 ARASAN_PROG_RD_PARAM_PG_MASK},
152 {NAND_CMD_GET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
153 ARASAN_PROG_GET_FTRS_MASK},
154 {NAND_CMD_SET_FEATURES, NAND_CMD_NONE, NAND_ADDR_CYCL_ONE,
155 ARASAN_PROG_SET_FTRS_MASK},
156 {NAND_CMD_NONE, NAND_CMD_NONE, NAND_ADDR_CYCL_NONE, 0},
157};
158
159struct arasan_ecc_matrix {
160 u32 pagesize;
161 u32 ecc_codeword_size;
162 u8 eccbits;
163 u8 bch;
164 u8 bchval;
165 u16 eccaddr;
166 u16 eccsize;
167};
168
169static const struct arasan_ecc_matrix ecc_matrix[] = {
170 {512, 512, 1, 0, 0, 0x20D, 0x3},
171 {512, 512, 4, 1, 3, 0x209, 0x7},
172 {512, 512, 8, 1, 2, 0x203, 0xD},
173 /*
174 * 2K byte page
175 */
176 {2048, 512, 1, 0, 0, 0x834, 0xC},
177 {2048, 512, 4, 1, 3, 0x826, 0x1A},
178 {2048, 512, 8, 1, 2, 0x80c, 0x34},
179 {2048, 512, 12, 1, 1, 0x822, 0x4E},
180 {2048, 512, 16, 1, 0, 0x808, 0x68},
181 {2048, 1024, 24, 1, 4, 0x81c, 0x54},
182 /*
183 * 4K byte page
184 */
185 {4096, 512, 1, 0, 0, 0x1068, 0x18},
186 {4096, 512, 4, 1, 3, 0x104c, 0x34},
187 {4096, 512, 8, 1, 2, 0x1018, 0x68},
188 {4096, 512, 12, 1, 1, 0x1044, 0x9C},
189 {4096, 512, 16, 1, 0, 0x1010, 0xD0},
190 {4096, 1024, 24, 1, 4, 0x1038, 0xA8},
191 /*
192 * 8K byte page
193 */
194 {8192, 512, 1, 0, 0, 0x20d0, 0x30},
195 {8192, 512, 4, 1, 3, 0x2098, 0x68},
196 {8192, 512, 8, 1, 2, 0x2030, 0xD0},
197 {8192, 512, 12, 1, 1, 0x2088, 0x138},
198 {8192, 512, 16, 1, 0, 0x2020, 0x1A0},
199 {8192, 1024, 24, 1, 4, 0x2070, 0x150},
200 /*
201 * 16K byte page
202 */
203 {16384, 512, 1, 0, 0, 0x4460, 0x60},
204 {16384, 512, 4, 1, 3, 0x43f0, 0xD0},
205 {16384, 512, 8, 1, 2, 0x4320, 0x1A0},
206 {16384, 512, 12, 1, 1, 0x4250, 0x270},
207 {16384, 512, 16, 1, 0, 0x4180, 0x340},
208 {16384, 1024, 24, 1, 4, 0x4220, 0x2A0}
209};
210
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530211static struct nand_ecclayout ondie_nand_oob_64 = {
212 .eccbytes = 32,
213
214 .eccpos = {
215 8, 9, 10, 11, 12, 13, 14, 15,
216 24, 25, 26, 27, 28, 29, 30, 31,
217 40, 41, 42, 43, 44, 45, 46, 47,
218 56, 57, 58, 59, 60, 61, 62, 63
219 },
220
221 .oobfree = {
222 { .offset = 4, .length = 4 },
223 { .offset = 20, .length = 4 },
224 { .offset = 36, .length = 4 },
225 { .offset = 52, .length = 4 }
226 }
227};
228
229/*
230 * bbt decriptors for chips with on-die ECC and
231 * chips with 64-byte OOB
232 */
233static u8 bbt_pattern[] = {'B', 'b', 't', '0' };
234static u8 mirror_pattern[] = {'1', 't', 'b', 'B' };
235
236static struct nand_bbt_descr bbt_main_descr = {
237 .options = NAND_BBT_LASTBLOCK | NAND_BBT_CREATE | NAND_BBT_WRITE |
238 NAND_BBT_2BIT | NAND_BBT_VERSION | NAND_BBT_PERCHIP,
239 .offs = 4,
240 .len = 4,
241 .veroffs = 20,
242 .maxblocks = 4,
243 .pattern = bbt_pattern
244};
245
246static struct nand_bbt_descr bbt_mirror_descr = {
247 .options = NAND_BBT_LASTBLOCK | NAND_BBT_CREATE | NAND_BBT_WRITE |
248 NAND_BBT_2BIT | NAND_BBT_VERSION | NAND_BBT_PERCHIP,
249 .offs = 4,
250 .len = 4,
251 .veroffs = 20,
252 .maxblocks = 4,
253 .pattern = mirror_pattern
254};
255
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530256static u8 buf_data[READ_BUFF_SIZE];
257static u32 buf_index;
258
259static struct nand_ecclayout nand_oob;
260
261static struct nand_chip nand_chip[CONFIG_SYS_MAX_NAND_DEVICE];
262
263static void arasan_nand_select_chip(struct mtd_info *mtd, int chip)
264{
265}
266
267static void arasan_nand_enable_ecc(void)
268{
269 u32 reg_val;
270
271 reg_val = readl(&arasan_nand_base->cmd_reg);
272 reg_val |= ARASAN_NAND_CMD_ECC_ON_MASK;
273
274 writel(reg_val, &arasan_nand_base->cmd_reg);
275}
276
277static u8 arasan_nand_get_addrcycle(struct mtd_info *mtd)
278{
279 u8 addrcycles;
Scott Wood17fed142016-05-30 13:57:56 -0500280 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530281
282 switch (curr_cmd->addr_cycles) {
283 case NAND_ADDR_CYCL_NONE:
284 addrcycles = 0;
285 break;
286 case NAND_ADDR_CYCL_ONE:
287 addrcycles = 1;
288 break;
289 case NAND_ADDR_CYCL_ROW:
290 addrcycles = chip->onfi_params.addr_cycles &
291 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
292 break;
293 case NAND_ADDR_CYCL_COL:
294 addrcycles = (chip->onfi_params.addr_cycles &
295 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
296 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
297 break;
298 case NAND_ADDR_CYCL_BOTH:
299 addrcycles = chip->onfi_params.addr_cycles &
300 ARASAN_NAND_ROW_ADDR_CYCL_MASK;
301 addrcycles += (chip->onfi_params.addr_cycles &
302 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
303 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
304 break;
305 default:
306 addrcycles = ARASAN_NAND_INVALID_ADDR_CYCL;
307 break;
308 }
309 return addrcycles;
310}
311
312static int arasan_nand_read_page(struct mtd_info *mtd, u8 *buf, u32 size)
313{
Scott Wood17fed142016-05-30 13:57:56 -0500314 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530315 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530316 u32 reg_val, i, pktsize, pktnum;
317 u32 *bufptr = (u32 *)buf;
318 u32 timeout;
319 u32 rdcount = 0;
320 u8 addr_cycles;
321
322 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
323 pktsize = ARASAN_NAND_PKTSIZE_1K;
324 else
325 pktsize = ARASAN_NAND_PKTSIZE_512;
326
327 if (size % pktsize)
328 pktnum = size/pktsize + 1;
329 else
330 pktnum = size/pktsize;
331
332 reg_val = readl(&arasan_nand_base->intsts_enr);
333 reg_val |= ARASAN_NAND_INT_STS_ERR_EN_MASK |
334 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK;
335 writel(reg_val, &arasan_nand_base->intsts_enr);
336
337 reg_val = readl(&arasan_nand_base->pkt_reg);
338 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
339 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
340 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) |
341 pktsize;
342 writel(reg_val, &arasan_nand_base->pkt_reg);
343
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530344 if (!nand->on_die_ecc_enabled) {
345 arasan_nand_enable_ecc();
346 addr_cycles = arasan_nand_get_addrcycle(mtd);
347 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
348 return ERR_ADDR_CYCLE;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530349
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530350 writel((NAND_CMD_RNDOUTSTART << ARASAN_NAND_CMD_CMD2_SHIFT) |
351 NAND_CMD_RNDOUT | (addr_cycles <<
352 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT),
353 &arasan_nand_base->ecc_sprcmd_reg);
354 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530355 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
356
357 while (rdcount < pktnum) {
358 timeout = ARASAN_NAND_POLL_TIMEOUT;
359 while (!(readl(&arasan_nand_base->intsts_reg) &
360 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
361 udelay(1);
362 timeout--;
363 }
364 if (!timeout) {
365 puts("arasan_read_page: timedout:Buff RDY\n");
366 return -ETIMEDOUT;
367 }
368
369 rdcount++;
370
371 if (pktnum == rdcount) {
372 reg_val = readl(&arasan_nand_base->intsts_enr);
373 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
374 writel(reg_val, &arasan_nand_base->intsts_enr);
375 } else {
376 reg_val = readl(&arasan_nand_base->intsts_enr);
377 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
378 &arasan_nand_base->intsts_enr);
379 }
380 reg_val = readl(&arasan_nand_base->intsts_reg);
381 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
382 &arasan_nand_base->intsts_reg);
383
384 for (i = 0; i < pktsize/4; i++)
385 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
386
387
388 bufptr += pktsize/4;
389
390 if (rdcount >= pktnum)
391 break;
392
393 writel(ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
394 &arasan_nand_base->intsts_enr);
395 }
396
397 timeout = ARASAN_NAND_POLL_TIMEOUT;
398
399 while (!(readl(&arasan_nand_base->intsts_reg) &
400 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
401 udelay(1);
402 timeout--;
403 }
404 if (!timeout) {
405 puts("arasan rd_page timedout:Xfer CMPLT\n");
406 return -ETIMEDOUT;
407 }
408
409 reg_val = readl(&arasan_nand_base->intsts_enr);
410 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
411 &arasan_nand_base->intsts_enr);
412 reg_val = readl(&arasan_nand_base->intsts_reg);
413 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
414 &arasan_nand_base->intsts_reg);
415
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530416 if (!nand->on_die_ecc_enabled) {
417 if (readl(&arasan_nand_base->intsts_reg) &
418 ARASAN_NAND_INT_STS_MUL_BIT_ERR_MASK) {
419 printf("arasan rd_page:sbiterror\n");
420 return -1;
421 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530422
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530423 if (readl(&arasan_nand_base->intsts_reg) &
424 ARASAN_NAND_INT_STS_ERR_EN_MASK) {
425 mtd->ecc_stats.failed++;
426 printf("arasan rd_page:multibiterror\n");
427 return -1;
428 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530429 }
430
431 return 0;
432}
433
434static int arasan_nand_read_page_hwecc(struct mtd_info *mtd,
435 struct nand_chip *chip, u8 *buf, int oob_required, int page)
436{
437 int status;
438
439 status = arasan_nand_read_page(mtd, buf, (mtd->writesize));
440
441 if (oob_required)
442 chip->ecc.read_oob(mtd, chip, page);
443
444 return status;
445}
446
447static void arasan_nand_fill_tx(const u8 *buf, int len)
448{
449 u32 __iomem *nand = &arasan_nand_base->buf_dataport;
450
451 if (((unsigned long)buf & 0x3) != 0) {
452 if (((unsigned long)buf & 0x1) != 0) {
453 if (len) {
454 writeb(*buf, nand);
455 buf += 1;
456 len--;
457 }
458 }
459
460 if (((unsigned long)buf & 0x3) != 0) {
461 if (len >= 2) {
462 writew(*(u16 *)buf, nand);
463 buf += 2;
464 len -= 2;
465 }
466 }
467 }
468
469 while (len >= 4) {
470 writel(*(u32 *)buf, nand);
471 buf += 4;
472 len -= 4;
473 }
474
475 if (len) {
476 if (len >= 2) {
477 writew(*(u16 *)buf, nand);
478 buf += 2;
479 len -= 2;
480 }
481
482 if (len)
483 writeb(*buf, nand);
484 }
485}
486
487static int arasan_nand_write_page_hwecc(struct mtd_info *mtd,
Scott Wood46e13102016-05-30 13:57:57 -0500488 struct nand_chip *chip, const u8 *buf, int oob_required,
489 int page)
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530490{
491 u32 reg_val, i, pktsize, pktnum;
492 const u32 *bufptr = (const u32 *)buf;
493 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
494 u32 size = mtd->writesize;
495 u32 rdcount = 0;
496 u8 column_addr_cycles;
Scott Wood17fed142016-05-30 13:57:56 -0500497 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530498
499 if (chip->ecc_step_ds >= ARASAN_NAND_PKTSIZE_1K)
500 pktsize = ARASAN_NAND_PKTSIZE_1K;
501 else
502 pktsize = ARASAN_NAND_PKTSIZE_512;
503
504 if (size % pktsize)
505 pktnum = size/pktsize + 1;
506 else
507 pktnum = size/pktsize;
508
509 reg_val = readl(&arasan_nand_base->pkt_reg);
510 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
511 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
512 reg_val |= (pktnum << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | pktsize;
513 writel(reg_val, &arasan_nand_base->pkt_reg);
514
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +0530515 if (!nand->on_die_ecc_enabled) {
516 arasan_nand_enable_ecc();
517 column_addr_cycles = (chip->onfi_params.addr_cycles &
518 ARASAN_NAND_COL_ADDR_CYCL_MASK) >>
519 ARASAN_NAND_COL_ADDR_CYCL_SHIFT;
520 writel((NAND_CMD_RNDIN | (column_addr_cycles << 28)),
521 &arasan_nand_base->ecc_sprcmd_reg);
522 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530523 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
524
525 while (rdcount < pktnum) {
526 timeout = ARASAN_NAND_POLL_TIMEOUT;
527 while (!(readl(&arasan_nand_base->intsts_reg) &
528 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
529 udelay(1);
530 timeout--;
531 }
532
533 if (!timeout) {
534 puts("arasan_write_page: timedout:Buff RDY\n");
535 return -ETIMEDOUT;
536 }
537
538 rdcount++;
539
540 if (pktnum == rdcount) {
541 reg_val = readl(&arasan_nand_base->intsts_enr);
542 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
543 writel(reg_val, &arasan_nand_base->intsts_enr);
544 } else {
545 reg_val = readl(&arasan_nand_base->intsts_enr);
546 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
547 &arasan_nand_base->intsts_enr);
548 }
549
550 reg_val = readl(&arasan_nand_base->intsts_reg);
551 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
552 &arasan_nand_base->intsts_reg);
553
554 for (i = 0; i < pktsize/4; i++)
555 writel(bufptr[i], &arasan_nand_base->buf_dataport);
556
557 bufptr += pktsize/4;
558
559 if (rdcount >= pktnum)
560 break;
561
562 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
563 &arasan_nand_base->intsts_enr);
564 }
565
566 timeout = ARASAN_NAND_POLL_TIMEOUT;
567
568 while (!(readl(&arasan_nand_base->intsts_reg) &
569 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
570 udelay(1);
571 timeout--;
572 }
573 if (!timeout) {
574 puts("arasan write_page timedout:Xfer CMPLT\n");
575 return -ETIMEDOUT;
576 }
577
578 reg_val = readl(&arasan_nand_base->intsts_enr);
579 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
580 &arasan_nand_base->intsts_enr);
581 reg_val = readl(&arasan_nand_base->intsts_reg);
582 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
583 &arasan_nand_base->intsts_reg);
584
585 if (oob_required)
586 chip->ecc.write_oob(mtd, chip, nand->page);
587
588 return 0;
589}
590
591static int arasan_nand_read_oob(struct mtd_info *mtd, struct nand_chip *chip,
592 int page)
593{
594 chip->cmdfunc(mtd, NAND_CMD_READOOB, 0, page);
595 chip->read_buf(mtd, chip->oob_poi, (mtd->oobsize));
596
597 return 0;
598}
599
600static int arasan_nand_write_oob(struct mtd_info *mtd, struct nand_chip *chip,
601 int page)
602{
603 int status = 0;
604 const u8 *buf = chip->oob_poi;
605
606 chip->cmdfunc(mtd, NAND_CMD_SEQIN, mtd->writesize, page);
607 chip->write_buf(mtd, buf, mtd->oobsize);
608
609 return status;
610}
611
612static int arasan_nand_reset(struct arasan_nand_command_format *curr_cmd)
613{
614 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
615 u32 cmd_reg = 0;
616
617 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
618 &arasan_nand_base->intsts_enr);
619 cmd_reg = readl(&arasan_nand_base->cmd_reg);
620 cmd_reg &= ~ARASAN_NAND_CMD_CMD12_MASK;
621
622 cmd_reg |= curr_cmd->cmd1 |
623 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
624 writel(cmd_reg, &arasan_nand_base->cmd_reg);
625 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
626
627 while (!(readl(&arasan_nand_base->intsts_reg) &
628 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
629 udelay(1);
630 timeout--;
631 }
632 if (!timeout) {
633 printf("ERROR:%s timedout\n", __func__);
634 return -ETIMEDOUT;
635 }
636
637 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
638 &arasan_nand_base->intsts_enr);
639
640 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
641 &arasan_nand_base->intsts_reg);
642
643 return 0;
644}
645
646static u8 arasan_nand_page(struct mtd_info *mtd)
647{
648 u8 page_val = 0;
649
650 switch (mtd->writesize) {
651 case 512:
652 page_val = 0;
653 break;
654 case 2048:
655 page_val = 1;
656 break;
657 case 4096:
658 page_val = 2;
659 break;
660 case 8192:
661 page_val = 3;
662 break;
663 case 16384:
664 page_val = 4;
665 break;
666 case 1024:
667 page_val = 5;
668 break;
669 default:
670 printf("%s:Pagesize>16K\n", __func__);
671 break;
672 }
673
674 return page_val;
675}
676
677static int arasan_nand_send_wrcmd(struct arasan_nand_command_format *curr_cmd,
678 int column, int page_addr, struct mtd_info *mtd)
679{
680 u32 reg_val, page;
681 u8 page_val, addr_cycles;
682
683 writel(ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
684 &arasan_nand_base->intsts_enr);
685 reg_val = readl(&arasan_nand_base->cmd_reg);
686 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
687 reg_val |= curr_cmd->cmd1 |
688 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
689 if (curr_cmd->cmd1 == NAND_CMD_SEQIN) {
690 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
691 page_val = arasan_nand_page(mtd);
692 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
693 }
694
695 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
696 addr_cycles = arasan_nand_get_addrcycle(mtd);
697
698 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
699 return ERR_ADDR_CYCLE;
700
701 reg_val |= (addr_cycles <<
702 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
703 writel(reg_val, &arasan_nand_base->cmd_reg);
704
705 if (page_addr == -1)
706 page_addr = 0;
707
708 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
709 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
710 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
711 writel(page|column, &arasan_nand_base->memadr_reg1);
712
713 reg_val = readl(&arasan_nand_base->memadr_reg2);
714 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
715 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
716 writel(reg_val, &arasan_nand_base->memadr_reg2);
717 reg_val = readl(&arasan_nand_base->memadr_reg2);
718 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
719 writel(reg_val, &arasan_nand_base->memadr_reg2);
720
721 return 0;
722}
723
724static void arasan_nand_write_buf(struct mtd_info *mtd, const u8 *buf, int len)
725{
726 u32 reg_val;
727 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
728
729 reg_val = readl(&arasan_nand_base->pkt_reg);
730 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
731 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
732
733 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | len;
734 writel(reg_val, &arasan_nand_base->pkt_reg);
735 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
736
737 while (!(readl(&arasan_nand_base->intsts_reg) &
738 ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK) && timeout) {
739 udelay(1);
740 timeout--;
741 }
742
743 if (!timeout)
744 puts("ERROR:arasan_nand_write_buf timedout:Buff RDY\n");
745
746 reg_val = readl(&arasan_nand_base->intsts_enr);
747 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
748 writel(reg_val, &arasan_nand_base->intsts_enr);
749 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
750 &arasan_nand_base->intsts_enr);
751 reg_val = readl(&arasan_nand_base->intsts_reg);
752 writel(reg_val | ARASAN_NAND_INT_STS_BUF_WR_RDY_MASK,
753 &arasan_nand_base->intsts_reg);
754
755 arasan_nand_fill_tx(buf, len);
756
757 timeout = ARASAN_NAND_POLL_TIMEOUT;
758 while (!(readl(&arasan_nand_base->intsts_reg) &
759 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
760 udelay(1);
761 timeout--;
762 }
763 if (!timeout)
764 puts("ERROR:arasan_nand_write_buf timedout:Xfer CMPLT\n");
765
766 writel(readl(&arasan_nand_base->intsts_enr) |
767 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
768 &arasan_nand_base->intsts_enr);
769 writel(readl(&arasan_nand_base->intsts_reg) |
770 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
771 &arasan_nand_base->intsts_reg);
772}
773
774static int arasan_nand_erase(struct arasan_nand_command_format *curr_cmd,
775 int column, int page_addr, struct mtd_info *mtd)
776{
777 u32 reg_val, page;
778 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
779 u8 row_addr_cycles;
780
781 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
782 &arasan_nand_base->intsts_enr);
783 reg_val = readl(&arasan_nand_base->cmd_reg);
784 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
785 reg_val |= curr_cmd->cmd1 |
786 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
787 row_addr_cycles = arasan_nand_get_addrcycle(mtd);
788
789 if (row_addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
790 return ERR_ADDR_CYCLE;
791
792 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
793 reg_val |= (row_addr_cycles <<
794 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
795
796 writel(reg_val, &arasan_nand_base->cmd_reg);
797
Vipul Kumar673a5c22018-03-05 15:24:59 +0530798 page = (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
Vipul Kumar9d9b99b2018-03-10 17:52:23 +0530799 ARASAN_NAND_MEM_ADDR1_COL_MASK;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530800 column = page_addr & ARASAN_NAND_MEM_ADDR1_COL_MASK;
Vipul Kumar673a5c22018-03-05 15:24:59 +0530801 writel(column | (page << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT),
802 &arasan_nand_base->memadr_reg1);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530803
804 reg_val = readl(&arasan_nand_base->memadr_reg2);
805 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
806 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
807 writel(reg_val, &arasan_nand_base->memadr_reg2);
808 reg_val = readl(&arasan_nand_base->memadr_reg2);
809 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
810 writel(reg_val, &arasan_nand_base->memadr_reg2);
811 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
812
813 while (!(readl(&arasan_nand_base->intsts_reg) &
814 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
815 udelay(1);
816 timeout--;
817 }
818 if (!timeout) {
819 printf("ERROR:%s timedout:Xfer CMPLT\n", __func__);
820 return -ETIMEDOUT;
821 }
822
823 reg_val = readl(&arasan_nand_base->intsts_enr);
824 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
825 &arasan_nand_base->intsts_enr);
826 reg_val = readl(&arasan_nand_base->intsts_reg);
827 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
828 &arasan_nand_base->intsts_reg);
829
830 return 0;
831}
832
833static int arasan_nand_read_status(struct arasan_nand_command_format *curr_cmd,
834 int column, int page_addr, struct mtd_info *mtd)
835{
836 u32 reg_val;
837 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
838 u8 addr_cycles;
839
840 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
841 &arasan_nand_base->intsts_enr);
842 reg_val = readl(&arasan_nand_base->cmd_reg);
843 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
844 reg_val |= curr_cmd->cmd1 |
845 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
846 addr_cycles = arasan_nand_get_addrcycle(mtd);
847
848 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
849 return ERR_ADDR_CYCLE;
850
851 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
852 reg_val |= (addr_cycles <<
853 ARASAN_NAND_CMD_ADDR_CYCL_SHIFT);
854
855 writel(reg_val, &arasan_nand_base->cmd_reg);
856
857 reg_val = readl(&arasan_nand_base->pkt_reg);
858 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
859 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
860 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | 1;
861 writel(reg_val, &arasan_nand_base->pkt_reg);
862
863 reg_val = readl(&arasan_nand_base->memadr_reg2);
864 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
865 writel(reg_val, &arasan_nand_base->memadr_reg2);
866
867 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
868 while (!(readl(&arasan_nand_base->intsts_reg) &
869 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
870 udelay(1);
871 timeout--;
872 }
873
874 if (!timeout) {
875 printf("ERROR:%s: timedout:Xfer CMPLT\n", __func__);
876 return -ETIMEDOUT;
877 }
878
879 reg_val = readl(&arasan_nand_base->intsts_enr);
880 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
881 &arasan_nand_base->intsts_enr);
882 reg_val = readl(&arasan_nand_base->intsts_reg);
883 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
884 &arasan_nand_base->intsts_reg);
885
886 return 0;
887}
888
889static int arasan_nand_send_rdcmd(struct arasan_nand_command_format *curr_cmd,
890 int column, int page_addr, struct mtd_info *mtd)
891{
892 u32 reg_val, addr_cycles, page;
893 u8 page_val;
894
895 reg_val = readl(&arasan_nand_base->intsts_enr);
896 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
897 &arasan_nand_base->intsts_enr);
898
899 reg_val = readl(&arasan_nand_base->cmd_reg);
900 reg_val &= ~ARASAN_NAND_CMD_CMD12_MASK;
901 reg_val |= curr_cmd->cmd1 |
902 (curr_cmd->cmd2 << ARASAN_NAND_CMD_CMD2_SHIFT);
903
904 if (curr_cmd->cmd1 == NAND_CMD_RNDOUT ||
905 curr_cmd->cmd1 == NAND_CMD_READ0) {
906 reg_val &= ~ARASAN_NAND_CMD_PG_SIZE_MASK;
907 page_val = arasan_nand_page(mtd);
908 reg_val |= (page_val << ARASAN_NAND_CMD_PG_SIZE_SHIFT);
909 }
910
Siva Durga Prasad Paladugu99459c22016-08-25 16:00:04 +0530911 reg_val &= ~ARASAN_NAND_CMD_ECC_ON_MASK;
912
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +0530913 reg_val &= ~ARASAN_NAND_CMD_ADDR_CYCL_MASK;
914
915 addr_cycles = arasan_nand_get_addrcycle(mtd);
916
917 if (addr_cycles == ARASAN_NAND_INVALID_ADDR_CYCL)
918 return ERR_ADDR_CYCLE;
919
920 reg_val |= (addr_cycles << 28);
921 writel(reg_val, &arasan_nand_base->cmd_reg);
922
923 if (page_addr == -1)
924 page_addr = 0;
925
926 page = (page_addr << ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT) &
927 ARASAN_NAND_MEM_ADDR1_PAGE_MASK;
928 column &= ARASAN_NAND_MEM_ADDR1_COL_MASK;
929 writel(page | column, &arasan_nand_base->memadr_reg1);
930
931 reg_val = readl(&arasan_nand_base->memadr_reg2);
932 reg_val &= ~ARASAN_NAND_MEM_ADDR2_PAGE_MASK;
933 reg_val |= (page_addr >> ARASAN_NAND_MEM_ADDR1_PAGE_SHIFT);
934 writel(reg_val, &arasan_nand_base->memadr_reg2);
935
936 reg_val = readl(&arasan_nand_base->memadr_reg2);
937 reg_val &= ~ARASAN_NAND_MEM_ADDR2_CS_MASK;
938 writel(reg_val, &arasan_nand_base->memadr_reg2);
939 buf_index = 0;
940
941 return 0;
942}
943
944static void arasan_nand_read_buf(struct mtd_info *mtd, u8 *buf, int size)
945{
946 u32 reg_val, i;
947 u32 *bufptr = (u32 *)buf;
948 u32 timeout = ARASAN_NAND_POLL_TIMEOUT;
949
950 reg_val = readl(&arasan_nand_base->pkt_reg);
951 reg_val &= ~(ARASAN_NAND_PKT_REG_PKT_CNT_MASK |
952 ARASAN_NAND_PKT_REG_PKT_SIZE_MASK);
953 reg_val |= (1 << ARASAN_NAND_PKT_REG_PKT_CNT_SHFT) | size;
954 writel(reg_val, &arasan_nand_base->pkt_reg);
955
956 writel(curr_cmd->pgm, &arasan_nand_base->pgm_reg);
957
958 while (!(readl(&arasan_nand_base->intsts_reg) &
959 ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK) && timeout) {
960 udelay(1);
961 timeout--;
962 }
963
964 if (!timeout)
965 puts("ERROR:arasan_nand_read_buf timedout:Buff RDY\n");
966
967 reg_val = readl(&arasan_nand_base->intsts_enr);
968 reg_val |= ARASAN_NAND_INT_STS_XFR_CMPLT_MASK;
969 writel(reg_val, &arasan_nand_base->intsts_enr);
970
971 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
972 &arasan_nand_base->intsts_enr);
973 reg_val = readl(&arasan_nand_base->intsts_reg);
974 writel(reg_val | ARASAN_NAND_INT_STS_BUF_RD_RDY_MASK,
975 &arasan_nand_base->intsts_reg);
976
977 buf_index = 0;
978 for (i = 0; i < size / 4; i++)
979 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
980
981 if (size & 0x03)
982 bufptr[i] = readl(&arasan_nand_base->buf_dataport);
983
984 timeout = ARASAN_NAND_POLL_TIMEOUT;
985
986 while (!(readl(&arasan_nand_base->intsts_reg) &
987 ARASAN_NAND_INT_STS_XFR_CMPLT_MASK) && timeout) {
988 udelay(1);
989 timeout--;
990 }
991
992 if (!timeout)
993 puts("ERROR:arasan_nand_read_buf timedout:Xfer CMPLT\n");
994
995 reg_val = readl(&arasan_nand_base->intsts_enr);
996 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
997 &arasan_nand_base->intsts_enr);
998 reg_val = readl(&arasan_nand_base->intsts_reg);
999 writel(reg_val | ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
1000 &arasan_nand_base->intsts_reg);
1001}
1002
1003static u8 arasan_nand_read_byte(struct mtd_info *mtd)
1004{
Scott Wood17fed142016-05-30 13:57:56 -05001005 struct nand_chip *chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301006 u32 size;
1007 u8 val;
1008 struct nand_onfi_params *p;
1009
1010 if (buf_index == 0) {
1011 p = &chip->onfi_params;
1012 if (curr_cmd->cmd1 == NAND_CMD_READID)
1013 size = 4;
1014 else if (curr_cmd->cmd1 == NAND_CMD_PARAM)
1015 size = sizeof(struct nand_onfi_params);
1016 else if (curr_cmd->cmd1 == NAND_CMD_RNDOUT)
1017 size = le16_to_cpu(p->ext_param_page_length) * 16;
1018 else if (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES)
1019 size = 4;
1020 else if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1021 return readb(&arasan_nand_base->flash_sts_reg);
1022 else
1023 size = 8;
1024 chip->read_buf(mtd, &buf_data[0], size);
1025 }
1026
1027 val = *(&buf_data[0] + buf_index);
1028 buf_index++;
1029
1030 return val;
1031}
1032
1033static void arasan_nand_cmd_function(struct mtd_info *mtd, unsigned int command,
1034 int column, int page_addr)
1035{
1036 u32 i, ret = 0;
Scott Wood17fed142016-05-30 13:57:56 -05001037 struct nand_chip *chip = mtd_to_nand(mtd);
1038 struct arasan_nand_info *nand = nand_get_controller_data(chip);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301039
1040 curr_cmd = NULL;
1041 writel(ARASAN_NAND_INT_STS_XFR_CMPLT_MASK,
1042 &arasan_nand_base->intsts_enr);
1043
1044 if ((command == NAND_CMD_READOOB) &&
1045 (mtd->writesize > 512)) {
1046 column += mtd->writesize;
1047 command = NAND_CMD_READ0;
1048 }
1049
1050 /* Get the command format */
1051 for (i = 0; (arasan_nand_commands[i].cmd1 != NAND_CMD_NONE ||
1052 arasan_nand_commands[i].cmd2 != NAND_CMD_NONE); i++) {
1053 if (command == arasan_nand_commands[i].cmd1) {
1054 curr_cmd = &arasan_nand_commands[i];
1055 break;
1056 }
1057 }
1058
1059 if (curr_cmd == NULL) {
1060 printf("Unsupported Command; 0x%x\n", command);
1061 return;
1062 }
1063
1064 if (curr_cmd->cmd1 == NAND_CMD_RESET)
1065 ret = arasan_nand_reset(curr_cmd);
1066
1067 if ((curr_cmd->cmd1 == NAND_CMD_READID) ||
1068 (curr_cmd->cmd1 == NAND_CMD_PARAM) ||
1069 (curr_cmd->cmd1 == NAND_CMD_RNDOUT) ||
1070 (curr_cmd->cmd1 == NAND_CMD_GET_FEATURES) ||
1071 (curr_cmd->cmd1 == NAND_CMD_READ0))
1072 ret = arasan_nand_send_rdcmd(curr_cmd, column, page_addr, mtd);
1073
1074 if ((curr_cmd->cmd1 == NAND_CMD_SET_FEATURES) ||
1075 (curr_cmd->cmd1 == NAND_CMD_SEQIN)) {
1076 nand->page = page_addr;
1077 ret = arasan_nand_send_wrcmd(curr_cmd, column, page_addr, mtd);
1078 }
1079
1080 if (curr_cmd->cmd1 == NAND_CMD_ERASE1)
1081 ret = arasan_nand_erase(curr_cmd, column, page_addr, mtd);
1082
1083 if (curr_cmd->cmd1 == NAND_CMD_STATUS)
1084 ret = arasan_nand_read_status(curr_cmd, column, page_addr, mtd);
1085
1086 if (ret != 0)
1087 printf("ERROR:%s:command:0x%x\n", __func__, curr_cmd->cmd1);
1088}
1089
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +05301090static void arasan_check_ondie(struct mtd_info *mtd)
1091{
1092 struct nand_chip *nand_chip = mtd_to_nand(mtd);
1093 struct arasan_nand_info *nand = nand_get_controller_data(nand_chip);
1094 u8 maf_id, dev_id;
1095 u8 get_feature[4];
1096 u8 set_feature[4] = {ENABLE_ONDIE_ECC, 0x00, 0x00, 0x00};
1097 u32 i;
1098
1099 /* Send the command for reading device ID */
1100 nand_chip->cmdfunc(mtd, NAND_CMD_RESET, -1, -1);
1101 nand_chip->cmdfunc(mtd, NAND_CMD_READID, 0, -1);
1102
1103 /* Read manufacturer and device IDs */
1104 maf_id = nand_chip->read_byte(mtd);
1105 dev_id = nand_chip->read_byte(mtd);
1106
1107 if ((maf_id == NAND_MFR_MICRON) &&
1108 ((dev_id == 0xf1) || (dev_id == 0xa1) || (dev_id == 0xb1) ||
1109 (dev_id == 0xaa) || (dev_id == 0xba) || (dev_id == 0xda) ||
1110 (dev_id == 0xca) || (dev_id == 0xac) || (dev_id == 0xbc) ||
1111 (dev_id == 0xdc) || (dev_id == 0xcc) || (dev_id == 0xa3) ||
1112 (dev_id == 0xb3) || (dev_id == 0xd3) || (dev_id == 0xc3))) {
1113 nand_chip->cmdfunc(mtd, NAND_CMD_SET_FEATURES,
1114 ONDIE_ECC_FEATURE_ADDR, -1);
1115
1116 nand_chip->write_buf(mtd, &set_feature[0], 4);
1117 nand_chip->cmdfunc(mtd, NAND_CMD_GET_FEATURES,
1118 ONDIE_ECC_FEATURE_ADDR, -1);
1119
1120 for (i = 0; i < 4; i++)
1121 get_feature[i] = nand_chip->read_byte(mtd);
1122
1123 if (get_feature[0] & ENABLE_ONDIE_ECC)
1124 nand->on_die_ecc_enabled = true;
1125 else
1126 printf("%s: Unable to enable OnDie ECC\n", __func__);
1127
1128 /* Use the BBT pattern descriptors */
1129 nand_chip->bbt_td = &bbt_main_descr;
1130 nand_chip->bbt_md = &bbt_mirror_descr;
1131 }
1132}
1133
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301134static int arasan_nand_ecc_init(struct mtd_info *mtd)
1135{
1136 int found = -1;
Siva Durga Prasad Paladugub0a4f132018-01-04 16:04:22 +05301137 u32 regval, eccpos_start, i, eccaddr;
Scott Wood17fed142016-05-30 13:57:56 -05001138 struct nand_chip *nand_chip = mtd_to_nand(mtd);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301139
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301140 for (i = 0; i < ARRAY_SIZE(ecc_matrix); i++) {
1141 if ((ecc_matrix[i].pagesize == mtd->writesize) &&
1142 (ecc_matrix[i].ecc_codeword_size >=
1143 nand_chip->ecc_step_ds)) {
1144 if (ecc_matrix[i].eccbits >=
1145 nand_chip->ecc_strength_ds) {
1146 found = i;
1147 break;
1148 }
1149 found = i;
1150 }
1151 }
1152
1153 if (found < 0)
1154 return 1;
1155
Siva Durga Prasad Paladugub0a4f132018-01-04 16:04:22 +05301156 eccaddr = mtd->writesize + mtd->oobsize -
1157 ecc_matrix[found].eccsize;
1158
1159 regval = eccaddr |
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301160 (ecc_matrix[found].eccsize << ARASAN_NAND_ECC_SIZE_SHIFT) |
1161 (ecc_matrix[found].bch << ARASAN_NAND_ECC_BCH_SHIFT);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301162 writel(regval, &arasan_nand_base->ecc_reg);
1163
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301164 if (ecc_matrix[found].bch) {
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301165 regval = readl(&arasan_nand_base->memadr_reg2);
1166 regval &= ~ARASAN_NAND_MEM_ADDR2_BCH_MASK;
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301167 regval |= (ecc_matrix[found].bchval <<
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301168 ARASAN_NAND_MEM_ADDR2_BCH_SHIFT);
1169 writel(regval, &arasan_nand_base->memadr_reg2);
1170 }
1171
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301172 nand_oob.eccbytes = ecc_matrix[found].eccsize;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301173 eccpos_start = mtd->oobsize - nand_oob.eccbytes;
1174
1175 for (i = 0; i < nand_oob.eccbytes; i++)
1176 nand_oob.eccpos[i] = eccpos_start + i;
1177
1178 nand_oob.oobfree[0].offset = 2;
1179 nand_oob.oobfree[0].length = eccpos_start - 2;
1180
Siva Durga Prasad Paladugudb796632016-05-25 15:20:38 +05301181 nand_chip->ecc.size = ecc_matrix[found].ecc_codeword_size;
1182 nand_chip->ecc.strength = ecc_matrix[found].eccbits;
1183 nand_chip->ecc.bytes = ecc_matrix[found].eccsize;
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301184 nand_chip->ecc.layout = &nand_oob;
1185
1186 return 0;
1187}
1188
1189static int arasan_nand_init(struct nand_chip *nand_chip, int devnum)
1190{
1191 struct arasan_nand_info *nand;
1192 struct mtd_info *mtd;
1193 int err = -1;
1194
1195 nand = calloc(1, sizeof(struct arasan_nand_info));
1196 if (!nand) {
1197 printf("%s: failed to allocate\n", __func__);
1198 return err;
1199 }
1200
1201 nand->nand_base = arasan_nand_base;
Scott Wood17fed142016-05-30 13:57:56 -05001202 mtd = nand_to_mtd(nand_chip);
1203 nand_set_controller_data(nand_chip, nand);
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301204
1205 /* Set the driver entry points for MTD */
1206 nand_chip->cmdfunc = arasan_nand_cmd_function;
1207 nand_chip->select_chip = arasan_nand_select_chip;
1208 nand_chip->read_byte = arasan_nand_read_byte;
1209
1210 /* Buffer read/write routines */
1211 nand_chip->read_buf = arasan_nand_read_buf;
1212 nand_chip->write_buf = arasan_nand_write_buf;
1213 nand_chip->bbt_options = NAND_BBT_USE_FLASH;
1214
1215 writel(0x0, &arasan_nand_base->cmd_reg);
1216 writel(0x0, &arasan_nand_base->pgm_reg);
1217
1218 /* first scan to find the device and get the page size */
1219 if (nand_scan_ident(mtd, 1, NULL)) {
1220 printf("%s: nand_scan_ident failed\n", __func__);
1221 goto fail;
1222 }
1223
Siva Durga Prasad Paladugu80c889c2018-01-04 16:04:20 +05301224 nand_chip->ecc.mode = NAND_ECC_HW;
1225 nand_chip->ecc.hwctl = NULL;
1226 nand_chip->ecc.read_page = arasan_nand_read_page_hwecc;
1227 nand_chip->ecc.write_page = arasan_nand_write_page_hwecc;
1228 nand_chip->ecc.read_oob = arasan_nand_read_oob;
1229 nand_chip->ecc.write_oob = arasan_nand_write_oob;
1230
Siva Durga Prasad Paladugu45e512c2018-01-04 16:04:21 +05301231 arasan_check_ondie(mtd);
1232
1233 /*
1234 * If on die supported, then give priority to on-die ecc and use
1235 * it instead of controller ecc.
1236 */
1237 if (nand->on_die_ecc_enabled) {
1238 nand_chip->ecc.strength = 1;
1239 nand_chip->ecc.size = mtd->writesize;
1240 nand_chip->ecc.bytes = 0;
1241 nand_chip->ecc.layout = &ondie_nand_oob_64;
1242 } else {
1243 if (arasan_nand_ecc_init(mtd)) {
1244 printf("%s: nand_ecc_init failed\n", __func__);
1245 goto fail;
1246 }
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301247 }
1248
1249 if (nand_scan_tail(mtd)) {
1250 printf("%s: nand_scan_tail failed\n", __func__);
1251 goto fail;
1252 }
1253
Scott Wood2c1b7e12016-05-30 13:57:55 -05001254 if (nand_register(devnum, mtd)) {
Siva Durga Prasad Paladuguc7b0bc72015-11-17 14:30:10 +05301255 printf("Nand Register Fail\n");
1256 goto fail;
1257 }
1258
1259 return 0;
1260fail:
1261 free(nand);
1262 return err;
1263}
1264
1265void board_nand_init(void)
1266{
1267 struct nand_chip *nand = &nand_chip[0];
1268
1269 if (arasan_nand_init(nand, 0))
1270 puts("NAND init failed\n");
1271}