blob: f2ecf47f8d4cad68061a8e8875ce642e5e45f074 [file] [log] [blame]
Stefan Roesecdb295c2018-08-16 18:05:08 +02001// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2018 Stefan Roese <sr@denx.de>
4 *
5 * Derived from drivers/mtd/nand/spi/micron.c
6 * Copyright (c) 2016-2017 Micron Technology, Inc.
7 */
8
9#ifndef __UBOOT__
10#include <linux/device.h>
11#include <linux/kernel.h>
12#endif
13#include <linux/mtd/spinand.h>
14
Stefan Roese584957a2019-01-24 17:18:19 +010015#define SPINAND_MFR_GIGADEVICE 0xC8
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +010016
Stefan Roese584957a2019-01-24 17:18:19 +010017#define GD5FXGQ4XA_STATUS_ECC_1_7_BITFLIPS (1 << 4)
18#define GD5FXGQ4XA_STATUS_ECC_8_BITFLIPS (3 << 4)
Stefan Roesecdb295c2018-08-16 18:05:08 +020019
Reto Schneiderb97bb612021-02-11 13:05:48 +010020#define GD5FXGQ5XE_STATUS_ECC_1_4_BITFLIPS (1 << 4)
21#define GD5FXGQ5XE_STATUS_ECC_4_BITFLIPS (3 << 4)
Stefan Roesecdb295c2018-08-16 18:05:08 +020022
Reto Schneiderb97bb612021-02-11 13:05:48 +010023#define GD5FXGQXXEXXG_REG_STATUS2 0xf0
24
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +010025#define GD5FXGQ4UXFXXG_STATUS_ECC_MASK (7 << 4)
26#define GD5FXGQ4UXFXXG_STATUS_ECC_NO_BITFLIPS (0 << 4)
27#define GD5FXGQ4UXFXXG_STATUS_ECC_1_3_BITFLIPS (1 << 4)
28#define GD5FXGQ4UXFXXG_STATUS_ECC_UNCOR_ERROR (7 << 4)
29
30static SPINAND_OP_VARIANTS(read_cache_variants,
Hauke Mehrtens3d431b92021-02-11 13:05:47 +010031 SPINAND_PAGE_READ_FROM_CACHE_QUADIO_OP(0, 1, NULL, 0),
Stefan Roesecdb295c2018-08-16 18:05:08 +020032 SPINAND_PAGE_READ_FROM_CACHE_X4_OP(0, 1, NULL, 0),
33 SPINAND_PAGE_READ_FROM_CACHE_DUALIO_OP(0, 1, NULL, 0),
34 SPINAND_PAGE_READ_FROM_CACHE_X2_OP(0, 1, NULL, 0),
35 SPINAND_PAGE_READ_FROM_CACHE_OP(true, 0, 1, NULL, 0),
36 SPINAND_PAGE_READ_FROM_CACHE_OP(false, 0, 1, NULL, 0));
37
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +010038static SPINAND_OP_VARIANTS(read_cache_variants_f,
39 SPINAND_PAGE_READ_FROM_CACHE_QUADIO_OP(0, 1, NULL, 0),
40 SPINAND_PAGE_READ_FROM_CACHE_X4_OP_3A(0, 1, NULL, 0),
Reto Schneiderb97bb612021-02-11 13:05:48 +010041 SPINAND_PAGE_READ_FROM_CACHE_DUALIO_OP(0, 1, NULL, 0),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +010042 SPINAND_PAGE_READ_FROM_CACHE_X2_OP_3A(0, 1, NULL, 0),
43 SPINAND_PAGE_READ_FROM_CACHE_OP_3A(true, 0, 1, NULL, 0),
44 SPINAND_PAGE_READ_FROM_CACHE_OP_3A(false, 0, 0, NULL, 0));
Reto Schneiderb97bb612021-02-11 13:05:48 +010045
Stefan Roesecdb295c2018-08-16 18:05:08 +020046static SPINAND_OP_VARIANTS(write_cache_variants,
47 SPINAND_PROG_LOAD_X4(true, 0, NULL, 0),
48 SPINAND_PROG_LOAD(true, 0, NULL, 0));
49
50static SPINAND_OP_VARIANTS(update_cache_variants,
51 SPINAND_PROG_LOAD_X4(false, 0, NULL, 0),
52 SPINAND_PROG_LOAD(false, 0, NULL, 0));
53
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +010054static int gd5fxgq4xa_ooblayout_ecc(struct mtd_info *mtd, int section,
55 struct mtd_oob_region *region)
56{
57 if (section > 3)
58 return -ERANGE;
59
60 region->offset = (16 * section) + 8;
61 region->length = 8;
62
63 return 0;
64}
65
66static int gd5fxgq4xa_ooblayout_free(struct mtd_info *mtd, int section,
67 struct mtd_oob_region *region)
68{
69 if (section > 3)
70 return -ERANGE;
71
72 if (section) {
73 region->offset = 16 * section;
74 region->length = 8;
75 } else {
76 /* section 0 has one byte reserved for bad block mark */
77 region->offset = 1;
78 region->length = 7;
79 }
80 return 0;
81}
82
83static const struct mtd_ooblayout_ops gd5fxgq4xa_ooblayout = {
84 .ecc = gd5fxgq4xa_ooblayout_ecc,
85 .rfree = gd5fxgq4xa_ooblayout_free,
86};
87
88static int gd5fxgq4xa_ecc_get_status(struct spinand_device *spinand,
89 u8 status)
90{
91 switch (status & STATUS_ECC_MASK) {
92 case STATUS_ECC_NO_BITFLIPS:
93 return 0;
94
95 case GD5FXGQ4XA_STATUS_ECC_1_7_BITFLIPS:
96 /* 1-7 bits are flipped. return the maximum. */
97 return 7;
98
99 case GD5FXGQ4XA_STATUS_ECC_8_BITFLIPS:
100 return 8;
101
102 case STATUS_ECC_UNCOR_ERROR:
103 return -EBADMSG;
104
105 default:
106 break;
107 }
108
109 return -EINVAL;
110}
111
112static int gd5fxgqx_variant2_ooblayout_ecc(struct mtd_info *mtd, int section,
Stefan Roese584957a2019-01-24 17:18:19 +0100113 struct mtd_oob_region *region)
Stefan Roesecdb295c2018-08-16 18:05:08 +0200114{
115 if (section)
116 return -ERANGE;
117
118 region->offset = 64;
119 region->length = 64;
120
121 return 0;
122}
123
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100124static int gd5fxgqx_variant2_ooblayout_free(struct mtd_info *mtd, int section,
Stefan Roese584957a2019-01-24 17:18:19 +0100125 struct mtd_oob_region *region)
Stefan Roesecdb295c2018-08-16 18:05:08 +0200126{
127 if (section)
128 return -ERANGE;
129
Stefan Roese584957a2019-01-24 17:18:19 +0100130 /* Reserve 1 bytes for the BBM. */
131 region->offset = 1;
132 region->length = 63;
Stefan Roesecdb295c2018-08-16 18:05:08 +0200133
134 return 0;
135}
136
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100137/* Valid for Q4/Q5 and Q6 (untested) devices */
138static const struct mtd_ooblayout_ops gd5fxgqx_variant2_ooblayout = {
139 .ecc = gd5fxgqx_variant2_ooblayout_ecc,
140 .rfree = gd5fxgqx_variant2_ooblayout_free,
141};
142
143static int gd5fxgq4xc_ooblayout_256_ecc(struct mtd_info *mtd, int section,
144 struct mtd_oob_region *oobregion)
145{
146 if (section)
147 return -ERANGE;
148
149 oobregion->offset = 128;
150 oobregion->length = 128;
151
152 return 0;
153}
154
155static int gd5fxgq4xc_ooblayout_256_free(struct mtd_info *mtd, int section,
156 struct mtd_oob_region *oobregion)
157{
158 if (section)
159 return -ERANGE;
160
161 oobregion->offset = 1;
162 oobregion->length = 127;
163
164 return 0;
165}
166
167static const struct mtd_ooblayout_ops gd5fxgq4xc_oob_256_ops = {
168 .ecc = gd5fxgq4xc_ooblayout_256_ecc,
169 .rfree = gd5fxgq4xc_ooblayout_256_free,
170};
171
172static int gd5fxgq4uexxg_ecc_get_status(struct spinand_device *spinand,
Stefan Roese584957a2019-01-24 17:18:19 +0100173 u8 status)
Stefan Roesecdb295c2018-08-16 18:05:08 +0200174{
Stefan Roese584957a2019-01-24 17:18:19 +0100175 u8 status2;
Reto Schneiderb97bb612021-02-11 13:05:48 +0100176 struct spi_mem_op op = SPINAND_GET_FEATURE_OP(GD5FXGQXXEXXG_REG_STATUS2,
Stefan Roese584957a2019-01-24 17:18:19 +0100177 &status2);
178 int ret;
Stefan Roesecdb295c2018-08-16 18:05:08 +0200179
Stefan Roese584957a2019-01-24 17:18:19 +0100180 switch (status & STATUS_ECC_MASK) {
Stefan Roesecdb295c2018-08-16 18:05:08 +0200181 case STATUS_ECC_NO_BITFLIPS:
182 return 0;
183
Stefan Roese584957a2019-01-24 17:18:19 +0100184 case GD5FXGQ4XA_STATUS_ECC_1_7_BITFLIPS:
185 /*
186 * Read status2 register to determine a more fine grained
187 * bit error status
188 */
189 ret = spi_mem_exec_op(spinand->slave, &op);
190 if (ret)
191 return ret;
Stefan Roesecdb295c2018-08-16 18:05:08 +0200192
Stefan Roese584957a2019-01-24 17:18:19 +0100193 /*
194 * 4 ... 7 bits are flipped (1..4 can't be detected, so
195 * report the maximum of 4 in this case
196 */
197 /* bits sorted this way (3...0): ECCS1,ECCS0,ECCSE1,ECCSE0 */
198 return ((status & STATUS_ECC_MASK) >> 2) |
199 ((status2 & STATUS_ECC_MASK) >> 4);
200
201 case GD5FXGQ4XA_STATUS_ECC_8_BITFLIPS:
Stefan Roesecdb295c2018-08-16 18:05:08 +0200202 return 8;
203
204 case STATUS_ECC_UNCOR_ERROR:
205 return -EBADMSG;
206
207 default:
208 break;
209 }
210
211 return -EINVAL;
212}
213
Reto Schneiderb97bb612021-02-11 13:05:48 +0100214static int gd5fxgq5xexxg_ecc_get_status(struct spinand_device *spinand,
215 u8 status)
216{
217 u8 status2;
218 struct spi_mem_op op = SPINAND_GET_FEATURE_OP(GD5FXGQXXEXXG_REG_STATUS2,
219 &status2);
220 int ret;
221
222 switch (status & STATUS_ECC_MASK) {
223 case STATUS_ECC_NO_BITFLIPS:
224 return 0;
225
226 case GD5FXGQ5XE_STATUS_ECC_1_4_BITFLIPS:
227 /*
228 * Read status2 register to determine a more fine grained
229 * bit error status
230 */
231 ret = spi_mem_exec_op(spinand->slave, &op);
232 if (ret)
233 return ret;
234
235 /*
236 * 1 ... 4 bits are flipped (and corrected)
237 */
238 /* bits sorted this way (1...0): ECCSE1, ECCSE0 */
239 return ((status2 & STATUS_ECC_MASK) >> 4) + 1;
240
241 case STATUS_ECC_UNCOR_ERROR:
242 return -EBADMSG;
243
244 default:
245 break;
246 }
247
248 return -EINVAL;
249}
250
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100251static int gd5fxgq4ufxxg_ecc_get_status(struct spinand_device *spinand,
252 u8 status)
253{
254 switch (status & GD5FXGQ4UXFXXG_STATUS_ECC_MASK) {
255 case GD5FXGQ4UXFXXG_STATUS_ECC_NO_BITFLIPS:
256 return 0;
257
258 case GD5FXGQ4UXFXXG_STATUS_ECC_1_3_BITFLIPS:
259 return 3;
260
261 case GD5FXGQ4UXFXXG_STATUS_ECC_UNCOR_ERROR:
262 return -EBADMSG;
263
264 default: /* (2 << 4) through (6 << 4) are 4-8 corrected errors */
265 return ((status & GD5FXGQ4UXFXXG_STATUS_ECC_MASK) >> 4) + 2;
266 }
267
268 return -EINVAL;
269}
Stefan Roese584957a2019-01-24 17:18:19 +0100270
Stefan Roesecdb295c2018-08-16 18:05:08 +0200271static const struct spinand_info gigadevice_spinand_table[] = {
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100272 SPINAND_INFO("GD5F1GQ4xA",
273 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xf1),
274 NAND_MEMORG(1, 2048, 64, 64, 1024, 20, 1, 1, 1),
275 NAND_ECCREQ(8, 512),
276 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
277 &write_cache_variants,
278 &update_cache_variants),
279 SPINAND_HAS_QE_BIT,
280 SPINAND_ECCINFO(&gd5fxgq4xa_ooblayout,
281 gd5fxgq4xa_ecc_get_status)),
282 SPINAND_INFO("GD5F2GQ4xA",
283 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xf2),
284 NAND_MEMORG(1, 2048, 64, 64, 2048, 40, 1, 1, 1),
285 NAND_ECCREQ(8, 512),
286 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
287 &write_cache_variants,
288 &update_cache_variants),
289 SPINAND_HAS_QE_BIT,
290 SPINAND_ECCINFO(&gd5fxgq4xa_ooblayout,
291 gd5fxgq4xa_ecc_get_status)),
292 SPINAND_INFO("GD5F4GQ4xA",
293 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xf4),
294 NAND_MEMORG(1, 2048, 64, 64, 4096, 80, 1, 1, 1),
295 NAND_ECCREQ(8, 512),
296 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
297 &write_cache_variants,
298 &update_cache_variants),
299 SPINAND_HAS_QE_BIT,
300 SPINAND_ECCINFO(&gd5fxgq4xa_ooblayout,
301 gd5fxgq4xa_ecc_get_status)),
302 SPINAND_INFO("GD5F4GQ4RC",
303 SPINAND_ID(SPINAND_READID_METHOD_OPCODE, 0xa4, 0x68),
304 NAND_MEMORG(1, 4096, 256, 64, 2048, 40, 1, 1, 1),
305 NAND_ECCREQ(8, 512),
306 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_f,
307 &write_cache_variants,
308 &update_cache_variants),
309 SPINAND_HAS_QE_BIT,
310 SPINAND_ECCINFO(&gd5fxgq4xc_oob_256_ops,
311 gd5fxgq4ufxxg_ecc_get_status)),
312 SPINAND_INFO("GD5F4GQ4UC",
313 SPINAND_ID(SPINAND_READID_METHOD_OPCODE, 0xb4, 0x68),
314 NAND_MEMORG(1, 4096, 256, 64, 2048, 40, 1, 1, 1),
315 NAND_ECCREQ(8, 512),
316 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_f,
317 &write_cache_variants,
318 &update_cache_variants),
319 SPINAND_HAS_QE_BIT,
320 SPINAND_ECCINFO(&gd5fxgq4xc_oob_256_ops,
321 gd5fxgq4ufxxg_ecc_get_status)),
Mikhail Kshevetskiy72010312023-01-10 12:58:38 +0100322 SPINAND_INFO("GD5F1GQ4UExxG",
323 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_ADDR, 0xd1),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100324 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
325 NAND_ECCREQ(8, 512),
326 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
327 &write_cache_variants,
328 &update_cache_variants),
329 SPINAND_HAS_QE_BIT,
330 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
331 gd5fxgq4uexxg_ecc_get_status)),
332 SPINAND_INFO("GD5F1GQ4UFxxG",
333 SPINAND_ID(SPINAND_READID_METHOD_OPCODE, 0xb1, 0x48),
334 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
Stefan Roese584957a2019-01-24 17:18:19 +0100335 NAND_ECCREQ(8, 512),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100336 SPINAND_INFO_OP_VARIANTS(&read_cache_variants_f,
Stefan Roesecdb295c2018-08-16 18:05:08 +0200337 &write_cache_variants,
338 &update_cache_variants),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100339 SPINAND_HAS_QE_BIT,
340 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
341 gd5fxgq4ufxxg_ecc_get_status)),
Mikhail Kshevetskiy72010312023-01-10 12:58:38 +0100342 SPINAND_INFO("GD5F1GQ5UExxG",
343 SPINAND_ID(SPINAND_READID_METHOD_OPCODE_DUMMY, 0x51),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100344 NAND_MEMORG(1, 2048, 128, 64, 1024, 20, 1, 1, 1),
Reto Schneiderb97bb612021-02-11 13:05:48 +0100345 NAND_ECCREQ(4, 512),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100346 SPINAND_INFO_OP_VARIANTS(&read_cache_variants,
Reto Schneiderb97bb612021-02-11 13:05:48 +0100347 &write_cache_variants,
348 &update_cache_variants),
Mikhail Kshevetskiy2a1e78b2023-01-10 12:58:40 +0100349 SPINAND_HAS_QE_BIT,
350 SPINAND_ECCINFO(&gd5fxgqx_variant2_ooblayout,
Reto Schneiderb97bb612021-02-11 13:05:48 +0100351 gd5fxgq5xexxg_ecc_get_status)),
Stefan Roesecdb295c2018-08-16 18:05:08 +0200352};
353
Stefan Roesecdb295c2018-08-16 18:05:08 +0200354static const struct spinand_manufacturer_ops gigadevice_spinand_manuf_ops = {
Stefan Roesecdb295c2018-08-16 18:05:08 +0200355};
356
357const struct spinand_manufacturer gigadevice_spinand_manufacturer = {
358 .id = SPINAND_MFR_GIGADEVICE,
359 .name = "GigaDevice",
Mikhail Kshevetskiy72010312023-01-10 12:58:38 +0100360 .chips = gigadevice_spinand_table,
361 .nchips = ARRAY_SIZE(gigadevice_spinand_table),
Stefan Roesecdb295c2018-08-16 18:05:08 +0200362 .ops = &gigadevice_spinand_manuf_ops,
363};