blob: dbbeee4c5bb2b7aefb5ae570e62b332f3bebe6fb [file] [log] [blame]
Lionel Debieve2c0ba882019-09-24 17:39:49 +02001/*
Yann Gautiere0b01d72020-03-18 14:07:55 +01002 * Copyright (c) 2019-2020, STMicroelectronics - All Rights Reserved
Lionel Debieve2c0ba882019-09-24 17:39:49 +02003 *
4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5 */
6
7#include <assert.h>
8#include <errno.h>
9#include <limits.h>
10#include <stdint.h>
11
12#include <libfdt.h>
13
14#include <platform_def.h>
15
16#include <common/debug.h>
17#include <drivers/delay_timer.h>
18#include <drivers/raw_nand.h>
19#include <drivers/st/stm32_fmc2_nand.h>
20#include <drivers/st/stm32_gpio.h>
21#include <drivers/st/stm32mp_reset.h>
22#include <lib/mmio.h>
23#include <lib/utils_def.h>
24
Etienne Carrieref02647a2019-12-08 08:14:40 +010025/* Timeout for device interface reset */
26#define TIMEOUT_US_1_MS 1000U
27
Lionel Debieve2c0ba882019-09-24 17:39:49 +020028/* FMC2 Compatibility */
29#define DT_FMC2_COMPAT "st,stm32mp15-fmc2"
30#define MAX_CS 2U
31
32/* FMC2 Controller Registers */
33#define FMC2_BCR1 0x00U
34#define FMC2_PCR 0x80U
35#define FMC2_SR 0x84U
36#define FMC2_PMEM 0x88U
37#define FMC2_PATT 0x8CU
38#define FMC2_HECCR 0x94U
39#define FMC2_BCHISR 0x254U
40#define FMC2_BCHDSR0 0x27CU
41#define FMC2_BCHDSR1 0x280U
42#define FMC2_BCHDSR2 0x284U
43#define FMC2_BCHDSR3 0x288U
44#define FMC2_BCHDSR4 0x28CU
45
46/* FMC2_BCR1 register */
47#define FMC2_BCR1_FMC2EN BIT(31)
48/* FMC2_PCR register */
49#define FMC2_PCR_PWAITEN BIT(1)
50#define FMC2_PCR_PBKEN BIT(2)
51#define FMC2_PCR_PWID_MASK GENMASK_32(5, 4)
52#define FMC2_PCR_PWID(x) (((x) << 4) & FMC2_PCR_PWID_MASK)
53#define FMC2_PCR_PWID_8 0x0U
54#define FMC2_PCR_PWID_16 0x1U
55#define FMC2_PCR_ECCEN BIT(6)
56#define FMC2_PCR_ECCALG BIT(8)
57#define FMC2_PCR_TCLR_MASK GENMASK_32(12, 9)
58#define FMC2_PCR_TCLR(x) (((x) << 9) & FMC2_PCR_TCLR_MASK)
59#define FMC2_PCR_TCLR_DEFAULT 0xFU
60#define FMC2_PCR_TAR_MASK GENMASK_32(16, 13)
61#define FMC2_PCR_TAR(x) (((x) << 13) & FMC2_PCR_TAR_MASK)
62#define FMC2_PCR_TAR_DEFAULT 0xFU
63#define FMC2_PCR_ECCSS_MASK GENMASK_32(19, 17)
64#define FMC2_PCR_ECCSS(x) (((x) << 17) & FMC2_PCR_ECCSS_MASK)
65#define FMC2_PCR_ECCSS_512 0x1U
66#define FMC2_PCR_ECCSS_2048 0x3U
67#define FMC2_PCR_BCHECC BIT(24)
68#define FMC2_PCR_WEN BIT(25)
69/* FMC2_SR register */
70#define FMC2_SR_NWRF BIT(6)
71/* FMC2_PMEM register*/
72#define FMC2_PMEM_MEMSET(x) (((x) & GENMASK_32(7, 0)) << 0)
73#define FMC2_PMEM_MEMWAIT(x) (((x) & GENMASK_32(7, 0)) << 8)
74#define FMC2_PMEM_MEMHOLD(x) (((x) & GENMASK_32(7, 0)) << 16)
75#define FMC2_PMEM_MEMHIZ(x) (((x) & GENMASK_32(7, 0)) << 24)
76#define FMC2_PMEM_DEFAULT 0x0A0A0A0AU
77/* FMC2_PATT register */
78#define FMC2_PATT_ATTSET(x) (((x) & GENMASK_32(7, 0)) << 0)
79#define FMC2_PATT_ATTWAIT(x) (((x) & GENMASK_32(7, 0)) << 8)
80#define FMC2_PATT_ATTHOLD(x) (((x) & GENMASK_32(7, 0)) << 16)
81#define FMC2_PATT_ATTHIZ(x) (((x) & GENMASK_32(7, 0)) << 24)
82#define FMC2_PATT_DEFAULT 0x0A0A0A0AU
83/* FMC2_BCHISR register */
84#define FMC2_BCHISR_DERF BIT(1)
85/* FMC2_BCHDSR0 register */
86#define FMC2_BCHDSR0_DUE BIT(0)
87#define FMC2_BCHDSR0_DEF BIT(1)
88#define FMC2_BCHDSR0_DEN_MASK GENMASK_32(7, 4)
89#define FMC2_BCHDSR0_DEN_SHIFT 4U
90/* FMC2_BCHDSR1 register */
91#define FMC2_BCHDSR1_EBP1_MASK GENMASK_32(12, 0)
92#define FMC2_BCHDSR1_EBP2_MASK GENMASK_32(28, 16)
93#define FMC2_BCHDSR1_EBP2_SHIFT 16U
94/* FMC2_BCHDSR2 register */
95#define FMC2_BCHDSR2_EBP3_MASK GENMASK_32(12, 0)
96#define FMC2_BCHDSR2_EBP4_MASK GENMASK_32(28, 16)
97#define FMC2_BCHDSR2_EBP4_SHIFT 16U
98/* FMC2_BCHDSR3 register */
99#define FMC2_BCHDSR3_EBP5_MASK GENMASK_32(12, 0)
100#define FMC2_BCHDSR3_EBP6_MASK GENMASK_32(28, 16)
101#define FMC2_BCHDSR3_EBP6_SHIFT 16U
102/* FMC2_BCHDSR4 register */
103#define FMC2_BCHDSR4_EBP7_MASK GENMASK_32(12, 0)
104#define FMC2_BCHDSR4_EBP8_MASK GENMASK_32(28, 16)
105#define FMC2_BCHDSR4_EBP8_SHIFT 16U
106
107/* Timings */
108#define FMC2_THIZ 0x01U
109#define FMC2_TIO 8000U
110#define FMC2_TSYNC 3000U
111#define FMC2_PCR_TIMING_MASK GENMASK_32(3, 0)
112#define FMC2_PMEM_PATT_TIMING_MASK GENMASK_32(7, 0)
113
114#define FMC2_BBM_LEN 2U
115#define FMC2_MAX_ECC_BYTES 14U
116#define TIMEOUT_US_10_MS 10000U
117#define FMC2_PSEC_PER_MSEC (1000UL * 1000UL * 1000UL)
118
119enum stm32_fmc2_ecc {
120 FMC2_ECC_HAM = 1U,
121 FMC2_ECC_BCH4 = 4U,
122 FMC2_ECC_BCH8 = 8U
123};
124
125struct stm32_fmc2_cs_reg {
126 uintptr_t data_base;
127 uintptr_t cmd_base;
128 uintptr_t addr_base;
129};
130
131struct stm32_fmc2_nand_timings {
132 uint8_t tclr;
133 uint8_t tar;
134 uint8_t thiz;
135 uint8_t twait;
136 uint8_t thold_mem;
137 uint8_t tset_mem;
138 uint8_t thold_att;
139 uint8_t tset_att;
140};
141
142struct stm32_fmc2_nfc {
143 uintptr_t reg_base;
144 struct stm32_fmc2_cs_reg cs[MAX_CS];
145 unsigned long clock_id;
146 unsigned int reset_id;
147 uint8_t cs_sel;
148};
149
150static struct stm32_fmc2_nfc stm32_fmc2;
151
152static uintptr_t fmc2_base(void)
153{
154 return stm32_fmc2.reg_base;
155}
156
157static void stm32_fmc2_nand_setup_timing(void)
158{
159 struct stm32_fmc2_nand_timings tims;
160 unsigned long hclk = stm32mp_clk_get_rate(stm32_fmc2.clock_id);
161 unsigned long hclkp = FMC2_PSEC_PER_MSEC / (hclk / 1000U);
162 unsigned long timing, tar, tclr, thiz, twait;
163 unsigned long tset_mem, tset_att, thold_mem, thold_att;
164 uint32_t pcr, pmem, patt;
165
166 tar = MAX(hclkp, NAND_TAR_MIN);
167 timing = div_round_up(tar, hclkp) - 1U;
168 tims.tar = MIN(timing, (unsigned long)FMC2_PCR_TIMING_MASK);
169
170 tclr = MAX(hclkp, NAND_TCLR_MIN);
171 timing = div_round_up(tclr, hclkp) - 1U;
172 tims.tclr = MIN(timing, (unsigned long)FMC2_PCR_TIMING_MASK);
173
174 tims.thiz = FMC2_THIZ;
175 thiz = (tims.thiz + 1U) * hclkp;
176
177 /*
178 * tWAIT > tRP
179 * tWAIT > tWP
180 * tWAIT > tREA + tIO
181 */
182 twait = MAX(hclkp, NAND_TRP_MIN);
183 twait = MAX(twait, NAND_TWP_MIN);
184 twait = MAX(twait, NAND_TREA_MAX + FMC2_TIO);
185 timing = div_round_up(twait, hclkp);
186 tims.twait = CLAMP(timing, 1UL,
187 (unsigned long)FMC2_PMEM_PATT_TIMING_MASK);
188
189 /*
190 * tSETUP_MEM > tCS - tWAIT
191 * tSETUP_MEM > tALS - tWAIT
192 * tSETUP_MEM > tDS - (tWAIT - tHIZ)
193 */
194 tset_mem = hclkp;
195 if ((twait < NAND_TCS_MIN) && (tset_mem < (NAND_TCS_MIN - twait))) {
196 tset_mem = NAND_TCS_MIN - twait;
197 }
198 if ((twait < NAND_TALS_MIN) && (tset_mem < (NAND_TALS_MIN - twait))) {
199 tset_mem = NAND_TALS_MIN - twait;
200 }
201 if ((twait > thiz) && ((twait - thiz) < NAND_TDS_MIN) &&
202 (tset_mem < (NAND_TDS_MIN - (twait - thiz)))) {
203 tset_mem = NAND_TDS_MIN - (twait - thiz);
204 }
205 timing = div_round_up(tset_mem, hclkp);
206 tims.tset_mem = CLAMP(timing, 1UL,
207 (unsigned long)FMC2_PMEM_PATT_TIMING_MASK);
208
209 /*
210 * tHOLD_MEM > tCH
211 * tHOLD_MEM > tREH - tSETUP_MEM
212 * tHOLD_MEM > max(tRC, tWC) - (tSETUP_MEM + tWAIT)
213 */
214 thold_mem = MAX(hclkp, NAND_TCH_MIN);
215 if ((tset_mem < NAND_TREH_MIN) &&
216 (thold_mem < (NAND_TREH_MIN - tset_mem))) {
217 thold_mem = NAND_TREH_MIN - tset_mem;
218 }
219 if (((tset_mem + twait) < NAND_TRC_MIN) &&
220 (thold_mem < (NAND_TRC_MIN - (tset_mem + twait)))) {
221 thold_mem = NAND_TRC_MIN - (tset_mem + twait);
222 }
223 if (((tset_mem + twait) < NAND_TWC_MIN) &&
224 (thold_mem < (NAND_TWC_MIN - (tset_mem + twait)))) {
225 thold_mem = NAND_TWC_MIN - (tset_mem + twait);
226 }
227 timing = div_round_up(thold_mem, hclkp);
228 tims.thold_mem = CLAMP(timing, 1UL,
229 (unsigned long)FMC2_PMEM_PATT_TIMING_MASK);
230
231 /*
232 * tSETUP_ATT > tCS - tWAIT
233 * tSETUP_ATT > tCLS - tWAIT
234 * tSETUP_ATT > tALS - tWAIT
235 * tSETUP_ATT > tRHW - tHOLD_MEM
236 * tSETUP_ATT > tDS - (tWAIT - tHIZ)
237 */
238 tset_att = hclkp;
239 if ((twait < NAND_TCS_MIN) && (tset_att < (NAND_TCS_MIN - twait))) {
240 tset_att = NAND_TCS_MIN - twait;
241 }
242 if ((twait < NAND_TCLS_MIN) && (tset_att < (NAND_TCLS_MIN - twait))) {
243 tset_att = NAND_TCLS_MIN - twait;
244 }
245 if ((twait < NAND_TALS_MIN) && (tset_att < (NAND_TALS_MIN - twait))) {
246 tset_att = NAND_TALS_MIN - twait;
247 }
248 if ((thold_mem < NAND_TRHW_MIN) &&
249 (tset_att < (NAND_TRHW_MIN - thold_mem))) {
250 tset_att = NAND_TRHW_MIN - thold_mem;
251 }
252 if ((twait > thiz) && ((twait - thiz) < NAND_TDS_MIN) &&
253 (tset_att < (NAND_TDS_MIN - (twait - thiz)))) {
254 tset_att = NAND_TDS_MIN - (twait - thiz);
255 }
256 timing = div_round_up(tset_att, hclkp);
257 tims.tset_att = CLAMP(timing, 1UL,
258 (unsigned long)FMC2_PMEM_PATT_TIMING_MASK);
259
260 /*
261 * tHOLD_ATT > tALH
262 * tHOLD_ATT > tCH
263 * tHOLD_ATT > tCLH
264 * tHOLD_ATT > tCOH
265 * tHOLD_ATT > tDH
266 * tHOLD_ATT > tWB + tIO + tSYNC - tSETUP_MEM
267 * tHOLD_ATT > tADL - tSETUP_MEM
268 * tHOLD_ATT > tWH - tSETUP_MEM
269 * tHOLD_ATT > tWHR - tSETUP_MEM
270 * tHOLD_ATT > tRC - (tSETUP_ATT + tWAIT)
271 * tHOLD_ATT > tWC - (tSETUP_ATT + tWAIT)
272 */
273 thold_att = MAX(hclkp, NAND_TALH_MIN);
274 thold_att = MAX(thold_att, NAND_TCH_MIN);
275 thold_att = MAX(thold_att, NAND_TCLH_MIN);
276 thold_att = MAX(thold_att, NAND_TCOH_MIN);
277 thold_att = MAX(thold_att, NAND_TDH_MIN);
278 if (((NAND_TWB_MAX + FMC2_TIO + FMC2_TSYNC) > tset_mem) &&
279 (thold_att < (NAND_TWB_MAX + FMC2_TIO + FMC2_TSYNC - tset_mem))) {
280 thold_att = NAND_TWB_MAX + FMC2_TIO + FMC2_TSYNC - tset_mem;
281 }
282 if ((tset_mem < NAND_TADL_MIN) &&
283 (thold_att < (NAND_TADL_MIN - tset_mem))) {
284 thold_att = NAND_TADL_MIN - tset_mem;
285 }
286 if ((tset_mem < NAND_TWH_MIN) &&
287 (thold_att < (NAND_TWH_MIN - tset_mem))) {
288 thold_att = NAND_TWH_MIN - tset_mem;
289 }
290 if ((tset_mem < NAND_TWHR_MIN) &&
291 (thold_att < (NAND_TWHR_MIN - tset_mem))) {
292 thold_att = NAND_TWHR_MIN - tset_mem;
293 }
294 if (((tset_att + twait) < NAND_TRC_MIN) &&
295 (thold_att < (NAND_TRC_MIN - (tset_att + twait)))) {
296 thold_att = NAND_TRC_MIN - (tset_att + twait);
297 }
298 if (((tset_att + twait) < NAND_TWC_MIN) &&
299 (thold_att < (NAND_TWC_MIN - (tset_att + twait)))) {
300 thold_att = NAND_TWC_MIN - (tset_att + twait);
301 }
302 timing = div_round_up(thold_att, hclkp);
303 tims.thold_att = CLAMP(timing, 1UL,
304 (unsigned long)FMC2_PMEM_PATT_TIMING_MASK);
305
306 VERBOSE("NAND timings: %u - %u - %u - %u - %u - %u - %u - %u\n",
307 tims.tclr, tims.tar, tims.thiz, tims.twait,
308 tims.thold_mem, tims.tset_mem,
309 tims.thold_att, tims.tset_att);
310
311 /* Set tclr/tar timings */
312 pcr = mmio_read_32(fmc2_base() + FMC2_PCR);
313 pcr &= ~FMC2_PCR_TCLR_MASK;
314 pcr |= FMC2_PCR_TCLR(tims.tclr);
315 pcr &= ~FMC2_PCR_TAR_MASK;
316 pcr |= FMC2_PCR_TAR(tims.tar);
317
318 /* Set tset/twait/thold/thiz timings in common bank */
319 pmem = FMC2_PMEM_MEMSET(tims.tset_mem);
320 pmem |= FMC2_PMEM_MEMWAIT(tims.twait);
321 pmem |= FMC2_PMEM_MEMHOLD(tims.thold_mem);
322 pmem |= FMC2_PMEM_MEMHIZ(tims.thiz);
323
324 /* Set tset/twait/thold/thiz timings in attribute bank */
325 patt = FMC2_PATT_ATTSET(tims.tset_att);
326 patt |= FMC2_PATT_ATTWAIT(tims.twait);
327 patt |= FMC2_PATT_ATTHOLD(tims.thold_att);
328 patt |= FMC2_PATT_ATTHIZ(tims.thiz);
329
330 mmio_write_32(fmc2_base() + FMC2_PCR, pcr);
331 mmio_write_32(fmc2_base() + FMC2_PMEM, pmem);
332 mmio_write_32(fmc2_base() + FMC2_PATT, patt);
333}
334
335static void stm32_fmc2_set_buswidth_16(bool set)
336{
337 mmio_clrsetbits_32(fmc2_base() + FMC2_PCR, FMC2_PCR_PWID_MASK,
338 (set ? FMC2_PCR_PWID(FMC2_PCR_PWID_16) : 0U));
339}
340
341static void stm32_fmc2_set_ecc(bool enable)
342{
343 mmio_clrsetbits_32(fmc2_base() + FMC2_PCR, FMC2_PCR_ECCEN,
344 (enable ? FMC2_PCR_ECCEN : 0U));
345}
346
347static int stm32_fmc2_ham_correct(uint8_t *buffer, uint8_t *eccbuffer,
348 uint8_t *ecc)
349{
350 uint8_t xor_ecc_ones;
351 uint16_t xor_ecc_1b, xor_ecc_2b, xor_ecc_3b;
352 union {
353 uint32_t val;
354 uint8_t bytes[4];
355 } xor_ecc;
356
357 /* Page size--------ECC_Code Size
358 * 256---------------22 bits LSB (ECC_CODE & 0x003FFFFF)
359 * 512---------------24 bits (ECC_CODE & 0x00FFFFFF)
360 * 1024--------------26 bits (ECC_CODE & 0x03FFFFFF)
361 * 2048--------------28 bits (ECC_CODE & 0x0FFFFFFF)
362 * 4096--------------30 bits (ECC_CODE & 0x3FFFFFFF)
363 * 8192--------------32 bits (ECC_CODE & 0xFFFFFFFF)
364 */
365
366 /* For Page size 512, ECC_Code size 24 bits */
367 xor_ecc_1b = ecc[0] ^ eccbuffer[0];
368 xor_ecc_2b = ecc[1] ^ eccbuffer[1];
369 xor_ecc_3b = ecc[2] ^ eccbuffer[2];
370
Yann Gautiere0b01d72020-03-18 14:07:55 +0100371 xor_ecc.val = 0U;
Lionel Debieve2c0ba882019-09-24 17:39:49 +0200372 xor_ecc.bytes[2] = xor_ecc_3b;
373 xor_ecc.bytes[1] = xor_ecc_2b;
374 xor_ecc.bytes[0] = xor_ecc_1b;
375
376 if (xor_ecc.val == 0U) {
377 return 0; /* No Error */
378 }
379
380 xor_ecc_ones = __builtin_popcount(xor_ecc.val);
381 if (xor_ecc_ones < 23U) {
382 if (xor_ecc_ones == 12U) {
383 uint16_t bit_address, byte_address;
384
385 /* Correctable ERROR */
386 bit_address = ((xor_ecc_1b >> 1) & BIT(0)) |
387 ((xor_ecc_1b >> 2) & BIT(1)) |
388 ((xor_ecc_1b >> 3) & BIT(2));
389
390 byte_address = ((xor_ecc_1b >> 7) & BIT(0)) |
391 ((xor_ecc_2b) & BIT(1)) |
392 ((xor_ecc_2b >> 1) & BIT(2)) |
393 ((xor_ecc_2b >> 2) & BIT(3)) |
394 ((xor_ecc_2b >> 3) & BIT(4)) |
395 ((xor_ecc_3b << 4) & BIT(5)) |
396 ((xor_ecc_3b << 3) & BIT(6)) |
397 ((xor_ecc_3b << 2) & BIT(7)) |
398 ((xor_ecc_3b << 1) & BIT(8));
399
400 /* Correct bit error in the data */
401 buffer[byte_address] =
402 buffer[byte_address] ^ BIT(bit_address);
403 VERBOSE("Hamming: 1 ECC error corrected\n");
404
405 return 0;
406 }
407
408 /* Non Correctable ERROR */
409 ERROR("%s: Uncorrectable ECC Errors\n", __func__);
410 return -1;
411 }
412
413 /* ECC ERROR */
414 ERROR("%s: Hamming correction error\n", __func__);
415 return -1;
416}
417
418
419static int stm32_fmc2_ham_calculate(uint8_t *buffer, uint8_t *ecc)
420{
421 uint32_t heccr;
422 uint64_t timeout = timeout_init_us(TIMEOUT_US_10_MS);
423
424 while ((mmio_read_32(fmc2_base() + FMC2_SR) & FMC2_SR_NWRF) == 0U) {
425 if (timeout_elapsed(timeout)) {
426 return -ETIMEDOUT;
427 }
428 }
429
430 heccr = mmio_read_32(fmc2_base() + FMC2_HECCR);
431
432 ecc[0] = heccr;
433 ecc[1] = heccr >> 8;
434 ecc[2] = heccr >> 16;
435
436 /* Disable ECC */
437 stm32_fmc2_set_ecc(false);
438
439 return 0;
440}
441
442static int stm32_fmc2_bch_correct(uint8_t *buffer, unsigned int eccsize)
443{
444 uint32_t bchdsr0, bchdsr1, bchdsr2, bchdsr3, bchdsr4;
445 uint16_t pos[8];
446 int i, den;
447 uint64_t timeout = timeout_init_us(TIMEOUT_US_10_MS);
448
449 while ((mmio_read_32(fmc2_base() + FMC2_BCHISR) &
450 FMC2_BCHISR_DERF) == 0U) {
451 if (timeout_elapsed(timeout)) {
452 return -ETIMEDOUT;
453 }
454 }
455
456 bchdsr0 = mmio_read_32(fmc2_base() + FMC2_BCHDSR0);
457 bchdsr1 = mmio_read_32(fmc2_base() + FMC2_BCHDSR1);
458 bchdsr2 = mmio_read_32(fmc2_base() + FMC2_BCHDSR2);
459 bchdsr3 = mmio_read_32(fmc2_base() + FMC2_BCHDSR3);
460 bchdsr4 = mmio_read_32(fmc2_base() + FMC2_BCHDSR4);
461
462 /* Disable ECC */
463 stm32_fmc2_set_ecc(false);
464
465 /* No error found */
466 if ((bchdsr0 & FMC2_BCHDSR0_DEF) == 0U) {
467 return 0;
468 }
469
470 /* Too many errors detected */
471 if ((bchdsr0 & FMC2_BCHDSR0_DUE) != 0U) {
472 return -EBADMSG;
473 }
474
475 pos[0] = bchdsr1 & FMC2_BCHDSR1_EBP1_MASK;
476 pos[1] = (bchdsr1 & FMC2_BCHDSR1_EBP2_MASK) >> FMC2_BCHDSR1_EBP2_SHIFT;
477 pos[2] = bchdsr2 & FMC2_BCHDSR2_EBP3_MASK;
478 pos[3] = (bchdsr2 & FMC2_BCHDSR2_EBP4_MASK) >> FMC2_BCHDSR2_EBP4_SHIFT;
479 pos[4] = bchdsr3 & FMC2_BCHDSR3_EBP5_MASK;
480 pos[5] = (bchdsr3 & FMC2_BCHDSR3_EBP6_MASK) >> FMC2_BCHDSR3_EBP6_SHIFT;
481 pos[6] = bchdsr4 & FMC2_BCHDSR4_EBP7_MASK;
482 pos[7] = (bchdsr4 & FMC2_BCHDSR4_EBP8_MASK) >> FMC2_BCHDSR4_EBP8_SHIFT;
483
484 den = (bchdsr0 & FMC2_BCHDSR0_DEN_MASK) >> FMC2_BCHDSR0_DEN_SHIFT;
485 for (i = 0; i < den; i++) {
486 if (pos[i] < (eccsize * 8U)) {
487 uint8_t bitmask = BIT(pos[i] % 8U);
488 uint32_t offset = pos[i] / 8U;
489
490 *(buffer + offset) ^= bitmask;
491 }
492 }
493
494 return 0;
495}
496
497static void stm32_fmc2_hwctl(struct nand_device *nand)
498{
499 stm32_fmc2_set_ecc(false);
500
501 if (nand->ecc.max_bit_corr != FMC2_ECC_HAM) {
502 mmio_clrbits_32(fmc2_base() + FMC2_PCR, FMC2_PCR_WEN);
503 }
504
505 stm32_fmc2_set_ecc(true);
506}
507
508static int stm32_fmc2_read_page(struct nand_device *nand,
509 unsigned int page, uintptr_t buffer)
510{
511 unsigned int eccsize = nand->ecc.size;
512 unsigned int eccbytes = nand->ecc.bytes;
513 unsigned int eccsteps = nand->page_size / eccsize;
514 uint8_t ecc_corr[FMC2_MAX_ECC_BYTES];
515 uint8_t ecc_cal[FMC2_MAX_ECC_BYTES] = {0U};
516 uint8_t *p;
517 unsigned int i;
518 unsigned int s;
519 int ret;
520
521 VERBOSE(">%s page %i buffer %lx\n", __func__, page, buffer);
522
523 ret = nand_read_page_cmd(page, 0U, 0U, 0U);
524 if (ret != 0) {
525 return ret;
526 }
527
528 for (s = 0U, i = nand->page_size + FMC2_BBM_LEN, p = (uint8_t *)buffer;
529 s < eccsteps;
530 s++, i += eccbytes, p += eccsize) {
531 stm32_fmc2_hwctl(nand);
532
533 /* Read the NAND page sector (512 bytes) */
534 ret = nand_change_read_column_cmd(s * eccsize, (uintptr_t)p,
535 eccsize);
536 if (ret != 0) {
537 return ret;
538 }
539
540 if (nand->ecc.max_bit_corr == FMC2_ECC_HAM) {
541 ret = stm32_fmc2_ham_calculate(p, ecc_cal);
542 if (ret != 0) {
543 return ret;
544 }
545 }
546
547 /* Read the corresponding ECC bytes */
548 ret = nand_change_read_column_cmd(i, (uintptr_t)ecc_corr,
549 eccbytes);
550 if (ret != 0) {
551 return ret;
552 }
553
554 /* Correct the data */
555 if (nand->ecc.max_bit_corr == FMC2_ECC_HAM) {
556 ret = stm32_fmc2_ham_correct(p, ecc_corr, ecc_cal);
557 } else {
558 ret = stm32_fmc2_bch_correct(p, eccsize);
559 }
560
561 if (ret != 0) {
562 return ret;
563 }
564 }
565
566 return 0;
567}
568
569static void stm32_fmc2_read_data(struct nand_device *nand,
570 uint8_t *buff, unsigned int length,
571 bool use_bus8)
572{
573 uintptr_t data_base = stm32_fmc2.cs[stm32_fmc2.cs_sel].data_base;
574
575 if (use_bus8 && (nand->buswidth == NAND_BUS_WIDTH_16)) {
576 stm32_fmc2_set_buswidth_16(false);
577 }
578
579 if ((((uintptr_t)buff & BIT(0)) != 0U) && (length != 0U)) {
580 *buff = mmio_read_8(data_base);
581 buff += sizeof(uint8_t);
582 length -= sizeof(uint8_t);
583 }
584
585 if ((((uintptr_t)buff & GENMASK_32(1, 0)) != 0U) &&
586 (length >= sizeof(uint16_t))) {
587 *(uint16_t *)buff = mmio_read_16(data_base);
588 buff += sizeof(uint16_t);
589 length -= sizeof(uint16_t);
590 }
591
592 /* 32bit aligned */
593 while (length >= sizeof(uint32_t)) {
594 *(uint32_t *)buff = mmio_read_32(data_base);
595 buff += sizeof(uint32_t);
596 length -= sizeof(uint32_t);
597 }
598
599 /* Read remaining bytes */
600 if (length >= sizeof(uint16_t)) {
601 *(uint16_t *)buff = mmio_read_16(data_base);
602 buff += sizeof(uint16_t);
603 length -= sizeof(uint16_t);
604 }
605
606 if (length != 0U) {
607 *buff = mmio_read_8(data_base);
608 }
609
610 if (use_bus8 && (nand->buswidth == NAND_BUS_WIDTH_16)) {
611 /* Reconfigure bus width to 16-bit */
612 stm32_fmc2_set_buswidth_16(true);
613 }
614}
615
616static void stm32_fmc2_write_data(struct nand_device *nand,
617 uint8_t *buff, unsigned int length,
618 bool use_bus8)
619{
620 uintptr_t data_base = stm32_fmc2.cs[stm32_fmc2.cs_sel].data_base;
621
622 if (use_bus8 && (nand->buswidth == NAND_BUS_WIDTH_16)) {
623 /* Reconfigure bus width to 8-bit */
624 stm32_fmc2_set_buswidth_16(false);
625 }
626
627 if ((((uintptr_t)buff & BIT(0)) != 0U) && (length != 0U)) {
628 mmio_write_8(data_base, *buff);
629 buff += sizeof(uint8_t);
630 length -= sizeof(uint8_t);
631 }
632
633 if ((((uintptr_t)buff & GENMASK_32(1, 0)) != 0U) &&
634 (length >= sizeof(uint16_t))) {
635 mmio_write_16(data_base, *(uint16_t *)buff);
636 buff += sizeof(uint16_t);
637 length -= sizeof(uint16_t);
638 }
639
640 /* 32bits aligned */
641 while (length >= sizeof(uint32_t)) {
642 mmio_write_32(data_base, *(uint32_t *)buff);
643 buff += sizeof(uint32_t);
644 length -= sizeof(uint32_t);
645 }
646
647 /* Read remaining bytes */
648 if (length >= sizeof(uint16_t)) {
649 mmio_write_16(data_base, *(uint16_t *)buff);
650 buff += sizeof(uint16_t);
651 length -= sizeof(uint16_t);
652 }
653
654 if (length != 0U) {
655 mmio_write_8(data_base, *buff);
656 }
657
658 if (use_bus8 && (nand->buswidth == NAND_BUS_WIDTH_16)) {
659 /* Reconfigure bus width to 16-bit */
660 stm32_fmc2_set_buswidth_16(true);
661 }
662}
663
664static void stm32_fmc2_ctrl_init(void)
665{
666 uint32_t pcr = mmio_read_32(fmc2_base() + FMC2_PCR);
667 uint32_t bcr1 = mmio_read_32(fmc2_base() + FMC2_BCR1);
668
669 /* Enable wait feature and NAND flash memory bank */
670 pcr |= FMC2_PCR_PWAITEN;
671 pcr |= FMC2_PCR_PBKEN;
672
673 /* Set buswidth to 8 bits mode for identification */
674 pcr &= ~FMC2_PCR_PWID_MASK;
675
676 /* ECC logic is disabled */
677 pcr &= ~FMC2_PCR_ECCEN;
678
679 /* Default mode */
680 pcr &= ~FMC2_PCR_ECCALG;
681 pcr &= ~FMC2_PCR_BCHECC;
682 pcr &= ~FMC2_PCR_WEN;
683
684 /* Set default ECC sector size */
685 pcr &= ~FMC2_PCR_ECCSS_MASK;
686 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_2048);
687
688 /* Set default TCLR/TAR timings */
689 pcr &= ~FMC2_PCR_TCLR_MASK;
690 pcr |= FMC2_PCR_TCLR(FMC2_PCR_TCLR_DEFAULT);
691 pcr &= ~FMC2_PCR_TAR_MASK;
692 pcr |= FMC2_PCR_TAR(FMC2_PCR_TAR_DEFAULT);
693
694 /* Enable FMC2 controller */
695 bcr1 |= FMC2_BCR1_FMC2EN;
696
697 mmio_write_32(fmc2_base() + FMC2_BCR1, bcr1);
698 mmio_write_32(fmc2_base() + FMC2_PCR, pcr);
699 mmio_write_32(fmc2_base() + FMC2_PMEM, FMC2_PMEM_DEFAULT);
700 mmio_write_32(fmc2_base() + FMC2_PATT, FMC2_PATT_DEFAULT);
701}
702
703static int stm32_fmc2_exec(struct nand_req *req)
704{
705 int ret = 0;
706
707 switch (req->type & NAND_REQ_MASK) {
708 case NAND_REQ_CMD:
709 VERBOSE("Write CMD %x\n", (uint8_t)req->type);
710 mmio_write_8(stm32_fmc2.cs[stm32_fmc2.cs_sel].cmd_base,
711 (uint8_t)req->type);
712 break;
713 case NAND_REQ_ADDR:
714 VERBOSE("Write ADDR %x\n", *(req->addr));
715 mmio_write_8(stm32_fmc2.cs[stm32_fmc2.cs_sel].addr_base,
716 *(req->addr));
717 break;
718 case NAND_REQ_DATAIN:
719 VERBOSE("Read data\n");
720 stm32_fmc2_read_data(req->nand, req->addr, req->length,
721 ((req->type & NAND_REQ_BUS_WIDTH_8) !=
722 0U));
723 break;
724 case NAND_REQ_DATAOUT:
725 VERBOSE("Write data\n");
726 stm32_fmc2_write_data(req->nand, req->addr, req->length,
727 ((req->type & NAND_REQ_BUS_WIDTH_8) !=
728 0U));
729 break;
730 case NAND_REQ_WAIT:
731 VERBOSE("WAIT Ready\n");
732 ret = nand_wait_ready(req->delay_ms);
733 break;
734 default:
735 ret = -EINVAL;
736 break;
737 };
738
739 return ret;
740}
741
742static void stm32_fmc2_setup(struct nand_device *nand)
743{
744 uint32_t pcr = mmio_read_32(fmc2_base() + FMC2_PCR);
745
746 /* Set buswidth */
747 pcr &= ~FMC2_PCR_PWID_MASK;
748 if (nand->buswidth == NAND_BUS_WIDTH_16) {
749 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_16);
750 }
751
752 if (nand->ecc.mode == NAND_ECC_HW) {
753 nand->mtd_read_page = stm32_fmc2_read_page;
754
755 pcr &= ~FMC2_PCR_ECCALG;
756 pcr &= ~FMC2_PCR_BCHECC;
757
758 pcr &= ~FMC2_PCR_ECCSS_MASK;
759 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_512);
760
761 switch (nand->ecc.max_bit_corr) {
762 case FMC2_ECC_HAM:
763 nand->ecc.bytes = 3;
764 break;
765 case FMC2_ECC_BCH8:
766 pcr |= FMC2_PCR_ECCALG;
767 pcr |= FMC2_PCR_BCHECC;
768 nand->ecc.bytes = 13;
769 break;
770 default:
771 /* Use FMC2 ECC BCH4 */
772 pcr |= FMC2_PCR_ECCALG;
773 nand->ecc.bytes = 7;
774 break;
775 }
776
777 if ((nand->buswidth & NAND_BUS_WIDTH_16) != 0) {
778 nand->ecc.bytes++;
779 }
780 }
781
782 mmio_write_32(stm32_fmc2.reg_base + FMC2_PCR, pcr);
783}
784
785static const struct nand_ctrl_ops ctrl_ops = {
786 .setup = stm32_fmc2_setup,
787 .exec = stm32_fmc2_exec
788};
789
790int stm32_fmc2_init(void)
791{
792 int fmc_node;
793 int fmc_subnode = 0;
794 int nchips = 0;
795 unsigned int i;
796 void *fdt = NULL;
797 const fdt32_t *cuint;
798 struct dt_node_info info;
Etienne Carrieref02647a2019-12-08 08:14:40 +0100799 int ret;
Lionel Debieve2c0ba882019-09-24 17:39:49 +0200800
801 if (fdt_get_address(&fdt) == 0) {
802 return -FDT_ERR_NOTFOUND;
803 }
804
805 fmc_node = dt_get_node(&info, -1, DT_FMC2_COMPAT);
806 if (fmc_node == -FDT_ERR_NOTFOUND) {
807 WARN("No FMC2 node found\n");
808 return fmc_node;
809 }
810
811 if (info.status == DT_DISABLED) {
812 return -FDT_ERR_NOTFOUND;
813 }
814
815 stm32_fmc2.reg_base = info.base;
816
817 if ((info.clock < 0) || (info.reset < 0)) {
818 return -FDT_ERR_BADVALUE;
819 }
820
821 stm32_fmc2.clock_id = (unsigned long)info.clock;
822 stm32_fmc2.reset_id = (unsigned int)info.reset;
823
824 cuint = fdt_getprop(fdt, fmc_node, "reg", NULL);
825 if (cuint == NULL) {
826 return -FDT_ERR_BADVALUE;
827 }
828
829 cuint += 2;
830
831 for (i = 0U; i < MAX_CS; i++) {
832 stm32_fmc2.cs[i].data_base = fdt32_to_cpu(*cuint);
833 stm32_fmc2.cs[i].cmd_base = fdt32_to_cpu(*(cuint + 2));
834 stm32_fmc2.cs[i].addr_base = fdt32_to_cpu(*(cuint + 4));
835 cuint += 6;
836 }
837
838 /* Pinctrl initialization */
839 if (dt_set_pinctrl_config(fmc_node) != 0) {
840 return -FDT_ERR_BADVALUE;
841 }
842
843 /* Parse flash nodes */
844 fdt_for_each_subnode(fmc_subnode, fdt, fmc_node) {
845 nchips++;
846 }
847
848 if (nchips != 1) {
849 WARN("Only one SLC NAND device supported\n");
850 return -FDT_ERR_BADVALUE;
851 }
852
853 fdt_for_each_subnode(fmc_subnode, fdt, fmc_node) {
854 /* Get chip select */
855 cuint = fdt_getprop(fdt, fmc_subnode, "reg", NULL);
856 if (cuint == NULL) {
857 WARN("Chip select not well defined\n");
858 return -FDT_ERR_BADVALUE;
859 }
860 stm32_fmc2.cs_sel = fdt32_to_cpu(*cuint);
861 VERBOSE("NAND CS %i\n", stm32_fmc2.cs_sel);
862 }
863
864 /* Enable Clock */
865 stm32mp_clk_enable(stm32_fmc2.clock_id);
866
867 /* Reset IP */
Etienne Carrieref02647a2019-12-08 08:14:40 +0100868 ret = stm32mp_reset_assert(stm32_fmc2.reset_id, TIMEOUT_US_1_MS);
869 if (ret != 0) {
870 panic();
871 }
872 ret = stm32mp_reset_deassert(stm32_fmc2.reset_id, TIMEOUT_US_1_MS);
873 if (ret != 0) {
874 panic();
875 }
Lionel Debieve2c0ba882019-09-24 17:39:49 +0200876
877 /* Setup default IP registers */
878 stm32_fmc2_ctrl_init();
879
880 /* Setup default timings */
881 stm32_fmc2_nand_setup_timing();
882
883 /* Init NAND RAW framework */
884 nand_raw_ctrl_init(&ctrl_ops);
885
886 return 0;
887}