Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 2 | /* |
| 3 | * (C) Copyright 2012 |
| 4 | * Konstantin Kozhevnikov, Cogent Embedded |
| 5 | * |
| 6 | * based on nand_spl_simple code |
| 7 | * |
| 8 | * (C) Copyright 2006-2008 |
| 9 | * Stefan Roese, DENX Software Engineering, sr@denx.de. |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 10 | */ |
| 11 | |
| 12 | #include <common.h> |
| 13 | #include <nand.h> |
| 14 | #include <asm/io.h> |
Simon Glass | dbd7954 | 2020-05-10 11:40:11 -0600 | [diff] [blame] | 15 | #include <linux/delay.h> |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 16 | #include <linux/mtd/nand_ecc.h> |
Tom Rini | 3bde7e2 | 2021-09-22 14:50:35 -0400 | [diff] [blame] | 17 | #include <linux/mtd/rawnand.h> |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 18 | |
Tom Rini | b421349 | 2022-11-12 17:36:51 -0500 | [diff] [blame] | 19 | static int nand_ecc_pos[] = CFG_SYS_NAND_ECCPOS; |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 20 | static struct mtd_info *mtd; |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 21 | static struct nand_chip nand_chip; |
| 22 | |
| 23 | #define ECCSTEPS (CONFIG_SYS_NAND_PAGE_SIZE / \ |
Tom Rini | b421349 | 2022-11-12 17:36:51 -0500 | [diff] [blame] | 24 | CFG_SYS_NAND_ECCSIZE) |
| 25 | #define ECCTOTAL (ECCSTEPS * CFG_SYS_NAND_ECCBYTES) |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 26 | |
| 27 | |
| 28 | /* |
| 29 | * NAND command for large page NAND devices (2k) |
| 30 | */ |
| 31 | static int nand_command(int block, int page, uint32_t offs, |
| 32 | u8 cmd) |
| 33 | { |
Scott Wood | 17fed14 | 2016-05-30 13:57:56 -0500 | [diff] [blame] | 34 | struct nand_chip *this = mtd_to_nand(mtd); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 35 | int page_addr = page + block * CONFIG_SYS_NAND_PAGE_COUNT; |
| 36 | void (*hwctrl)(struct mtd_info *mtd, int cmd, |
| 37 | unsigned int ctrl) = this->cmd_ctrl; |
| 38 | |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 39 | while (!this->dev_ready(mtd)) |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 40 | ; |
| 41 | |
| 42 | /* Emulate NAND_CMD_READOOB */ |
| 43 | if (cmd == NAND_CMD_READOOB) { |
| 44 | offs += CONFIG_SYS_NAND_PAGE_SIZE; |
| 45 | cmd = NAND_CMD_READ0; |
| 46 | } |
| 47 | |
| 48 | /* Begin command latch cycle */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 49 | hwctrl(mtd, cmd, NAND_CTRL_CLE | NAND_CTRL_CHANGE); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 50 | |
| 51 | if (cmd == NAND_CMD_RESET) { |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 52 | hwctrl(mtd, NAND_CMD_NONE, NAND_NCE | NAND_CTRL_CHANGE); |
Cooper Jr., Franklin | dc2b941 | 2017-04-06 15:54:14 -0500 | [diff] [blame] | 53 | |
| 54 | /* |
| 55 | * Apply this short delay always to ensure that we do wait |
| 56 | * tWB in any case on any machine. |
| 57 | */ |
| 58 | ndelay(150); |
| 59 | |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 60 | while (!this->dev_ready(mtd)) |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 61 | ; |
| 62 | return 0; |
| 63 | } |
| 64 | |
| 65 | /* Shift the offset from byte addressing to word addressing. */ |
Brian Norris | 6767522 | 2014-05-06 00:46:17 +0530 | [diff] [blame] | 66 | if ((this->options & NAND_BUSWIDTH_16) && !nand_opcode_8bits(cmd)) |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 67 | offs >>= 1; |
| 68 | |
| 69 | /* Set ALE and clear CLE to start address cycle */ |
| 70 | /* Column address */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 71 | hwctrl(mtd, offs & 0xff, |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 72 | NAND_CTRL_ALE | NAND_CTRL_CHANGE); /* A[7:0] */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 73 | hwctrl(mtd, (offs >> 8) & 0xff, NAND_CTRL_ALE); /* A[11:9] */ |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 74 | /* Row address */ |
Rostislav Lisovy | 278d903 | 2014-09-09 15:54:30 +0200 | [diff] [blame] | 75 | if (cmd != NAND_CMD_RNDOUT) { |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 76 | hwctrl(mtd, (page_addr & 0xff), |
Rostislav Lisovy | 278d903 | 2014-09-09 15:54:30 +0200 | [diff] [blame] | 77 | NAND_CTRL_ALE); /* A[19:12] */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 78 | hwctrl(mtd, ((page_addr >> 8) & 0xff), |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 79 | NAND_CTRL_ALE); /* A[27:20] */ |
| 80 | #ifdef CONFIG_SYS_NAND_5_ADDR_CYCLE |
Rostislav Lisovy | 278d903 | 2014-09-09 15:54:30 +0200 | [diff] [blame] | 81 | /* One more address cycle for devices > 128MiB */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 82 | hwctrl(mtd, (page_addr >> 16) & 0x0f, |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 83 | NAND_CTRL_ALE); /* A[31:28] */ |
| 84 | #endif |
Rostislav Lisovy | 278d903 | 2014-09-09 15:54:30 +0200 | [diff] [blame] | 85 | } |
| 86 | |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 87 | hwctrl(mtd, NAND_CMD_NONE, NAND_NCE | NAND_CTRL_CHANGE); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 88 | |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 89 | |
Cooper Jr., Franklin | dc2b941 | 2017-04-06 15:54:14 -0500 | [diff] [blame] | 90 | /* |
| 91 | * Program and erase have their own busy handlers status, sequential |
| 92 | * in and status need no delay. |
| 93 | */ |
| 94 | switch (cmd) { |
| 95 | case NAND_CMD_CACHEDPROG: |
| 96 | case NAND_CMD_PAGEPROG: |
| 97 | case NAND_CMD_ERASE1: |
| 98 | case NAND_CMD_ERASE2: |
| 99 | case NAND_CMD_SEQIN: |
| 100 | case NAND_CMD_RNDIN: |
| 101 | case NAND_CMD_STATUS: |
| 102 | return 0; |
| 103 | |
| 104 | case NAND_CMD_RNDOUT: |
| 105 | /* No ready / busy check necessary */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 106 | hwctrl(mtd, NAND_CMD_RNDOUTSTART, NAND_CTRL_CLE | |
Cooper Jr., Franklin | dc2b941 | 2017-04-06 15:54:14 -0500 | [diff] [blame] | 107 | NAND_CTRL_CHANGE); |
| 108 | hwctrl(mtd, NAND_CMD_NONE, NAND_NCE | NAND_CTRL_CHANGE); |
| 109 | return 0; |
| 110 | |
| 111 | case NAND_CMD_READ0: |
| 112 | /* Latch in address */ |
| 113 | hwctrl(mtd, NAND_CMD_READSTART, |
| 114 | NAND_CTRL_CLE | NAND_CTRL_CHANGE); |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 115 | hwctrl(mtd, NAND_CMD_NONE, NAND_NCE | NAND_CTRL_CHANGE); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 116 | } |
| 117 | |
Cooper Jr., Franklin | dc2b941 | 2017-04-06 15:54:14 -0500 | [diff] [blame] | 118 | /* |
| 119 | * Apply this short delay always to ensure that we do wait tWB in |
| 120 | * any case on any machine. |
| 121 | */ |
| 122 | ndelay(150); |
| 123 | |
| 124 | while (!this->dev_ready(mtd)) |
| 125 | ; |
| 126 | |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 127 | return 0; |
| 128 | } |
| 129 | |
| 130 | static int nand_is_bad_block(int block) |
| 131 | { |
Scott Wood | 17fed14 | 2016-05-30 13:57:56 -0500 | [diff] [blame] | 132 | struct nand_chip *this = mtd_to_nand(mtd); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 133 | |
| 134 | nand_command(block, 0, CONFIG_SYS_NAND_BAD_BLOCK_POS, |
| 135 | NAND_CMD_READOOB); |
| 136 | |
| 137 | /* |
| 138 | * Read one byte (or two if it's a 16 bit chip). |
| 139 | */ |
| 140 | if (this->options & NAND_BUSWIDTH_16) { |
| 141 | if (readw(this->IO_ADDR_R) != 0xffff) |
| 142 | return 1; |
| 143 | } else { |
| 144 | if (readb(this->IO_ADDR_R) != 0xff) |
| 145 | return 1; |
| 146 | } |
| 147 | |
| 148 | return 0; |
| 149 | } |
| 150 | |
| 151 | static int nand_read_page(int block, int page, void *dst) |
| 152 | { |
Scott Wood | 17fed14 | 2016-05-30 13:57:56 -0500 | [diff] [blame] | 153 | struct nand_chip *this = mtd_to_nand(mtd); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 154 | u_char ecc_calc[ECCTOTAL]; |
| 155 | u_char ecc_code[ECCTOTAL]; |
| 156 | u_char oob_data[CONFIG_SYS_NAND_OOBSIZE]; |
| 157 | int i; |
Tom Rini | b421349 | 2022-11-12 17:36:51 -0500 | [diff] [blame] | 158 | int eccsize = CFG_SYS_NAND_ECCSIZE; |
| 159 | int eccbytes = CFG_SYS_NAND_ECCBYTES; |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 160 | int eccsteps = ECCSTEPS; |
| 161 | uint8_t *p = dst; |
| 162 | uint32_t data_pos = 0; |
| 163 | uint8_t *oob = &oob_data[0] + nand_ecc_pos[0]; |
| 164 | uint32_t oob_pos = eccsize * eccsteps + nand_ecc_pos[0]; |
| 165 | |
| 166 | nand_command(block, page, 0, NAND_CMD_READ0); |
| 167 | |
| 168 | for (i = 0; eccsteps; eccsteps--, i += eccbytes, p += eccsize) { |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 169 | this->ecc.hwctl(mtd, NAND_ECC_READ); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 170 | nand_command(block, page, data_pos, NAND_CMD_RNDOUT); |
| 171 | |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 172 | this->read_buf(mtd, p, eccsize); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 173 | |
| 174 | nand_command(block, page, oob_pos, NAND_CMD_RNDOUT); |
| 175 | |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 176 | this->read_buf(mtd, oob, eccbytes); |
| 177 | this->ecc.calculate(mtd, p, &ecc_calc[i]); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 178 | |
| 179 | data_pos += eccsize; |
| 180 | oob_pos += eccbytes; |
| 181 | oob += eccbytes; |
| 182 | } |
| 183 | |
| 184 | /* Pick the ECC bytes out of the oob data */ |
| 185 | for (i = 0; i < ECCTOTAL; i++) |
| 186 | ecc_code[i] = oob_data[nand_ecc_pos[i]]; |
| 187 | |
| 188 | eccsteps = ECCSTEPS; |
| 189 | p = dst; |
| 190 | |
| 191 | for (i = 0 ; eccsteps; eccsteps--, i += eccbytes, p += eccsize) { |
| 192 | /* No chance to do something with the possible error message |
| 193 | * from correct_data(). We just hope that all possible errors |
| 194 | * are corrected by this routine. |
| 195 | */ |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 196 | this->ecc.correct(mtd, p, &ecc_code[i], &ecc_calc[i]); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 197 | } |
| 198 | |
| 199 | return 0; |
| 200 | } |
| 201 | |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 202 | /* nand_init() - initialize data to make nand usable by SPL */ |
| 203 | void nand_init(void) |
| 204 | { |
| 205 | /* |
| 206 | * Init board specific nand support |
| 207 | */ |
Boris Brezillon | 3b5f884 | 2016-06-15 20:56:10 +0200 | [diff] [blame] | 208 | mtd = nand_to_mtd(&nand_chip); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 209 | nand_chip.IO_ADDR_R = nand_chip.IO_ADDR_W = |
Tom Rini | b421349 | 2022-11-12 17:36:51 -0500 | [diff] [blame] | 210 | (void __iomem *)CFG_SYS_NAND_BASE; |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 211 | board_nand_init(&nand_chip); |
| 212 | |
| 213 | if (nand_chip.select_chip) |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 214 | nand_chip.select_chip(mtd, 0); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 215 | |
| 216 | /* NAND chip may require reset after power-on */ |
| 217 | nand_command(0, 0, 0, NAND_CMD_RESET); |
| 218 | } |
| 219 | |
| 220 | /* Unselect after operation */ |
| 221 | void nand_deselect(void) |
| 222 | { |
| 223 | if (nand_chip.select_chip) |
Scott Wood | 2c1b7e1 | 2016-05-30 13:57:55 -0500 | [diff] [blame] | 224 | nand_chip.select_chip(mtd, -1); |
Ilya Yanok | 15d67a5 | 2012-11-06 13:06:34 +0000 | [diff] [blame] | 225 | } |
Ladislav Michl | c6a4200 | 2017-04-16 15:31:59 +0200 | [diff] [blame] | 226 | |
| 227 | #include "nand_spl_loaders.c" |