blob: fc81b07343f964df777b31e74f9ab70770259d33 [file] [log] [blame]
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
8#include <common.h>
Simon Glass63334482019-11-14 12:57:39 -07009#include <cpu_func.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053011#include <asm/arch/sys_proto.h>
Simon Glass274e0b02020-05-10 11:39:56 -060012#include <asm/cache.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053013#include <asm/io.h>
14#include <clk.h>
15#include <dm.h>
16#include <malloc.h>
17#include <memalign.h>
18#include <spi.h>
Brandon Maier4d9cce72021-01-20 10:39:46 -060019#include <spi-mem.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053020#include <ubi_uboot.h>
21#include <wait_bit.h>
Simon Glass9bc15642020-02-03 07:36:16 -070022#include <dm/device_compat.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060023#include <linux/bitops.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070024#include <linux/err.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053025
26#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
27#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
28#define GQSPI_CONFIG_DMA_MODE (2 << 30)
29#define GQSPI_CONFIG_CPHA_MASK BIT(2)
30#define GQSPI_CONFIG_CPOL_MASK BIT(1)
31
32/*
33 * QSPI Interrupt Registers bit Masks
34 *
35 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
36 * bit definitions.
37 */
38#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
39#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
40#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
41#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -060042#define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053043#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
44 GQSPI_IXR_RXNEMTY_MASK)
45
46/*
47 * QSPI Enable Register bit Masks
48 *
49 * This register is used to enable or disable the QSPI controller
50 */
51#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
52
53#define GQSPI_GFIFO_LOW_BUS BIT(14)
54#define GQSPI_GFIFO_CS_LOWER BIT(12)
55#define GQSPI_GFIFO_UP_BUS BIT(15)
56#define GQSPI_GFIFO_CS_UPPER BIT(13)
57#define GQSPI_SPI_MODE_QSPI (3 << 10)
58#define GQSPI_SPI_MODE_SPI BIT(10)
59#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
60#define GQSPI_IMD_DATA_CS_ASSERT 5
61#define GQSPI_IMD_DATA_CS_DEASSERT 5
62#define GQSPI_GFIFO_TX BIT(16)
63#define GQSPI_GFIFO_RX BIT(17)
64#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
65#define GQSPI_GFIFO_IMD_MASK 0xFF
66#define GQSPI_GFIFO_EXP_MASK BIT(9)
67#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
68#define GQSPI_STRT_GEN_FIFO BIT(28)
69#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
70#define GQSPI_GFIFO_WP_HOLD BIT(19)
71#define GQSPI_BAUD_DIV_MASK (7 << 3)
72#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
73#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
74#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
75#define GQSPI_DMA_DST_I_STS_MASK 0xFE
76#define MODEBITS 0x6
77
78#define GQSPI_GFIFO_SELECT BIT(0)
79#define GQSPI_FIFO_THRESHOLD 1
80
81#define SPI_XFER_ON_BOTH 0
82#define SPI_XFER_ON_LOWER 1
83#define SPI_XFER_ON_UPPER 2
84
85#define GQSPI_DMA_ALIGN 0x4
86#define GQSPI_MAX_BAUD_RATE_VAL 7
87#define GQSPI_DFLT_BAUD_RATE_VAL 2
88
89#define GQSPI_TIMEOUT 100000000
90
91#define GQSPI_BAUD_DIV_SHIFT 2
92#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
93#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
94#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
95#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
96#define GQSPI_USE_DATA_DLY 0x1
97#define GQSPI_USE_DATA_DLY_SHIFT 31
98#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
99#define GQSPI_DATA_DLY_ADJ_SHIFT 28
100#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
101#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
102#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
103#define IOU_TAPDLY_BYPASS_OFST 0xFF180390
104#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
105#define GQSPI_FREQ_40MHZ 40000000
106#define GQSPI_FREQ_100MHZ 100000000
107#define GQSPI_FREQ_150MHZ 150000000
108#define IOU_TAPDLY_BYPASS_MASK 0x7
109
110#define GQSPI_REG_OFFSET 0x100
111#define GQSPI_DMA_REG_OFFSET 0x800
112
113/* QSPI register offsets */
114struct zynqmp_qspi_regs {
115 u32 confr; /* 0x00 */
116 u32 isr; /* 0x04 */
117 u32 ier; /* 0x08 */
118 u32 idisr; /* 0x0C */
119 u32 imaskr; /* 0x10 */
120 u32 enbr; /* 0x14 */
121 u32 dr; /* 0x18 */
122 u32 txd0r; /* 0x1C */
123 u32 drxr; /* 0x20 */
124 u32 sicr; /* 0x24 */
125 u32 txftr; /* 0x28 */
126 u32 rxftr; /* 0x2C */
127 u32 gpior; /* 0x30 */
128 u32 reserved0; /* 0x34 */
129 u32 lpbkdly; /* 0x38 */
130 u32 reserved1; /* 0x3C */
131 u32 genfifo; /* 0x40 */
132 u32 gqspisel; /* 0x44 */
133 u32 reserved2; /* 0x48 */
134 u32 gqfifoctrl; /* 0x4C */
135 u32 gqfthr; /* 0x50 */
136 u32 gqpollcfg; /* 0x54 */
137 u32 gqpollto; /* 0x58 */
138 u32 gqxfersts; /* 0x5C */
139 u32 gqfifosnap; /* 0x60 */
140 u32 gqrxcpy; /* 0x64 */
141 u32 reserved3[36]; /* 0x68 */
142 u32 gqspidlyadj; /* 0xF8 */
143};
144
145struct zynqmp_qspi_dma_regs {
146 u32 dmadst; /* 0x00 */
147 u32 dmasize; /* 0x04 */
148 u32 dmasts; /* 0x08 */
149 u32 dmactrl; /* 0x0C */
150 u32 reserved0; /* 0x10 */
151 u32 dmaisr; /* 0x14 */
152 u32 dmaier; /* 0x18 */
153 u32 dmaidr; /* 0x1C */
154 u32 dmaimr; /* 0x20 */
155 u32 dmactrl2; /* 0x24 */
156 u32 dmadstmsb; /* 0x28 */
157};
158
Simon Glassb75b15b2020-12-03 16:55:23 -0700159struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530160 struct zynqmp_qspi_regs *regs;
161 struct zynqmp_qspi_dma_regs *dma_regs;
162 u32 frequency;
163 u32 speed_hz;
164};
165
166struct zynqmp_qspi_priv {
167 struct zynqmp_qspi_regs *regs;
168 struct zynqmp_qspi_dma_regs *dma_regs;
169 const void *tx_buf;
170 void *rx_buf;
171 unsigned int len;
172 int bytes_to_transfer;
173 int bytes_to_receive;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600174 const struct spi_mem_op *op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530175};
176
Simon Glassaad29ae2020-12-03 16:55:21 -0700177static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530178{
Simon Glass95588622020-12-22 19:30:28 -0700179 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530180
181 debug("%s\n", __func__);
182
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900183 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530184 GQSPI_REG_OFFSET);
185 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900186 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530187
188 return 0;
189}
190
191static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
192{
193 u32 config_reg;
194 struct zynqmp_qspi_regs *regs = priv->regs;
195
196 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
197 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
198 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
199 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
200 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
201
202 config_reg = readl(&regs->confr);
203 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
204 GQSPI_CONFIG_MODE_EN_MASK);
205 config_reg |= GQSPI_CONFIG_DMA_MODE |
206 GQSPI_GFIFO_WP_HOLD |
207 GQSPI_DFLT_BAUD_RATE_DIV;
208 writel(config_reg, &regs->confr);
209
210 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
211}
212
213static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
214{
215 u32 gqspi_fifo_reg = 0;
216
217 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
218 GQSPI_GFIFO_CS_LOWER;
219
220 return gqspi_fifo_reg;
221}
222
Brandon Maier4d9cce72021-01-20 10:39:46 -0600223static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
224{
225 switch (buswidth) {
226 case 1:
227 return GQSPI_SPI_MODE_SPI;
228 case 2:
229 return GQSPI_SPI_MODE_DUAL_SPI;
230 case 4:
231 return GQSPI_SPI_MODE_QSPI;
232 default:
233 debug("Unsupported bus width %u\n", buswidth);
234 return GQSPI_SPI_MODE_SPI;
235 }
236}
237
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530238static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
239 u32 gqspi_fifo_reg)
240{
241 struct zynqmp_qspi_regs *regs = priv->regs;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600242 u32 config_reg, ier;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530243 int ret = 0;
244
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600245 config_reg = readl(&regs->confr);
246 /* Manual start if needed */
247 config_reg |= GQSPI_STRT_GEN_FIFO;
248 writel(config_reg, &regs->confr);
249
250 /* Enable interrupts */
251 ier = readl(&regs->ier);
252 ier |= GQSPI_IXR_GFNFULL_MASK;
253 writel(ier, &regs->ier);
254
255 /* Wait until the fifo is not full to write the new command */
256 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFNFULL_MASK, 1,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530257 GQSPI_TIMEOUT, 1);
258 if (ret)
259 printf("%s Timeout\n", __func__);
260
261 writel(gqspi_fifo_reg, &regs->genfifo);
262}
263
264static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
265{
266 u32 gqspi_fifo_reg = 0;
267
268 if (is_on) {
269 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
270 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
271 GQSPI_IMD_DATA_CS_ASSERT;
272 } else {
273 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
274 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
275 }
276
277 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
278
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600279 /* Dummy generic FIFO entry */
280 zynqmp_qspi_fill_gen_fifo(priv, 0);
281
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530282 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
283}
284
285void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
286{
Simon Glass95588622020-12-22 19:30:28 -0700287 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530288 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
289 struct zynqmp_qspi_regs *regs = priv->regs;
290 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
291 u32 reqhz = 0;
292
293 clk_rate = plat->frequency;
294 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
295
296 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
297 __func__, reqhz, clk_rate, baudrateval);
298
299 if (reqhz < GQSPI_FREQ_40MHZ) {
300 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
301 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
302 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530303 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530304 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
305 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
306 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
307 lpbkdlyadj = readl(&regs->lpbkdly);
308 lpbkdlyadj |= (GQSPI_LPBK_DLY_ADJ_LPBK_MASK);
309 datadlyadj = readl(&regs->gqspidlyadj);
310 datadlyadj |= ((GQSPI_USE_DATA_DLY << GQSPI_USE_DATA_DLY_SHIFT)
311 | (GQSPI_DATA_DLY_ADJ_VALUE <<
312 GQSPI_DATA_DLY_ADJ_SHIFT));
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530313 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530314 lpbkdlyadj = readl(&regs->lpbkdly);
315 lpbkdlyadj |= ((GQSPI_LPBK_DLY_ADJ_LPBK_MASK) |
316 GQSPI_LPBK_DLY_ADJ_DLY_0);
317 }
318
319 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST, IOU_TAPDLY_BYPASS_MASK,
320 tapdlybypass);
321 writel(lpbkdlyadj, &regs->lpbkdly);
322 writel(datadlyadj, &regs->gqspidlyadj);
323}
324
325static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
326{
Simon Glass95588622020-12-22 19:30:28 -0700327 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530328 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
329 struct zynqmp_qspi_regs *regs = priv->regs;
330 u32 confr;
331 u8 baud_rate_val = 0;
332
333 debug("%s\n", __func__);
334 if (speed > plat->frequency)
335 speed = plat->frequency;
336
Brandon Maierb8003d52021-01-20 14:28:30 -0600337 if (plat->speed_hz != speed) {
338 /* Set the clock frequency */
339 /* If speed == 0, default to lowest speed */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530340 while ((baud_rate_val < 8) &&
341 ((plat->frequency /
342 (2 << baud_rate_val)) > speed))
343 baud_rate_val++;
344
345 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
346 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
347
348 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530349
Brandon Maierb8003d52021-01-20 14:28:30 -0600350 confr = readl(&regs->confr);
351 confr &= ~GQSPI_BAUD_DIV_MASK;
352 confr |= (baud_rate_val << 3);
353 writel(confr, &regs->confr);
354 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
355
356 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
357 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530358
359 return 0;
360}
361
362static int zynqmp_qspi_probe(struct udevice *bus)
363{
Simon Glassb75b15b2020-12-03 16:55:23 -0700364 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530365 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
366 struct clk clk;
367 unsigned long clock;
368 int ret;
369
370 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
371
372 priv->regs = plat->regs;
373 priv->dma_regs = plat->dma_regs;
374
375 ret = clk_get_by_index(bus, 0, &clk);
376 if (ret < 0) {
Sean Anderson241232a2020-09-15 10:45:12 -0400377 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530378 return ret;
379 }
380
381 clock = clk_get_rate(&clk);
382 if (IS_ERR_VALUE(clock)) {
Sean Anderson241232a2020-09-15 10:45:12 -0400383 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530384 return clock;
385 }
386 debug("%s: CLK %ld\n", __func__, clock);
387
388 ret = clk_enable(&clk);
Michal Simek41710952021-02-09 15:28:15 +0100389 if (ret) {
Sean Anderson241232a2020-09-15 10:45:12 -0400390 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530391 return ret;
392 }
393 plat->frequency = clock;
394 plat->speed_hz = plat->frequency / 2;
395
396 /* init the zynq spi hw */
397 zynqmp_qspi_init_hw(priv);
398
399 return 0;
400}
401
402static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
403{
404 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
405 struct zynqmp_qspi_regs *regs = priv->regs;
406 u32 confr;
407
408 debug("%s\n", __func__);
409 /* Set the SPI Clock phase and polarities */
410 confr = readl(&regs->confr);
411 confr &= ~(GQSPI_CONFIG_CPHA_MASK |
412 GQSPI_CONFIG_CPOL_MASK);
413
414 if (mode & SPI_CPHA)
415 confr |= GQSPI_CONFIG_CPHA_MASK;
416 if (mode & SPI_CPOL)
417 confr |= GQSPI_CONFIG_CPOL_MASK;
418
419 writel(confr, &regs->confr);
420
421 return 0;
422}
423
424static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
425{
426 u32 data;
427 int ret = 0;
428 struct zynqmp_qspi_regs *regs = priv->regs;
429 u32 *buf = (u32 *)priv->tx_buf;
430 u32 len = size;
431
432 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(&regs->isr),
433 size);
434
435 while (size) {
436 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
437 GQSPI_TIMEOUT, 1);
438 if (ret) {
439 printf("%s: Timeout\n", __func__);
440 return ret;
441 }
442
443 if (size >= 4) {
444 writel(*buf, &regs->txd0r);
445 buf++;
446 size -= 4;
447 } else {
448 switch (size) {
449 case 1:
450 data = *((u8 *)buf);
451 buf += 1;
452 data |= GENMASK(31, 8);
453 break;
454 case 2:
455 data = *((u16 *)buf);
456 buf += 2;
457 data |= GENMASK(31, 16);
458 break;
459 case 3:
T Karthik Reddycc59fc92020-11-19 05:00:36 -0700460 data = *buf;
461 buf += 3;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530462 data |= GENMASK(31, 24);
463 break;
464 }
465 writel(data, &regs->txd0r);
466 size = 0;
467 }
468 }
469
470 priv->tx_buf += len;
471 return 0;
472}
473
474static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
475{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600476 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530477 u32 gen_fifo_cmd;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600478 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530479
Brandon Maier4d9cce72021-01-20 10:39:46 -0600480 /* Send opcode */
481 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
482 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
483 gen_fifo_cmd |= GQSPI_GFIFO_TX;
484 gen_fifo_cmd |= op->cmd.opcode;
485 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
486
487 /* Send address */
488 for (i = 0; i < op->addr.nbytes; i++) {
489 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
490
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530491 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600492 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
493 gen_fifo_cmd |= GQSPI_GFIFO_TX;
494 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530495
496 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
497
498 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
499 }
Brandon Maier4d9cce72021-01-20 10:39:46 -0600500
501 /* Send dummy */
502 if (op->dummy.nbytes) {
503 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
504
505 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
506 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
507 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
508 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
509 gen_fifo_cmd |= dummy_cycles;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530510 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
511 }
512}
513
514static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
515 u32 *gen_fifo_cmd)
516{
517 u32 expval = 8;
518 u32 len;
519
520 while (1) {
521 if (priv->len > 255) {
522 if (priv->len & (1 << expval)) {
523 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
524 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
525 *gen_fifo_cmd |= expval;
526 priv->len -= (1 << expval);
527 return expval;
528 }
529 expval++;
530 } else {
531 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
532 GQSPI_GFIFO_EXP_MASK);
533 *gen_fifo_cmd |= (u8)priv->len;
534 len = (u8)priv->len;
535 priv->len = 0;
536 return len;
537 }
538 }
539}
540
541static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
542{
543 u32 gen_fifo_cmd;
544 u32 len;
545 int ret = 0;
546
547 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600548 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530549 gen_fifo_cmd |= GQSPI_GFIFO_TX |
550 GQSPI_GFIFO_DATA_XFR_MASK;
551
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530552 while (priv->len) {
553 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
554 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
555
556 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
557
558 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
559 ret = zynqmp_qspi_fill_tx_fifo(priv,
560 1 << len);
561 else
562 ret = zynqmp_qspi_fill_tx_fifo(priv,
563 len);
564
565 if (ret)
566 return ret;
567 }
568 return ret;
569}
570
571static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
572 u32 gen_fifo_cmd, u32 *buf)
573{
574 u32 addr;
575 u32 size, len;
576 u32 actuallen = priv->len;
577 int ret = 0;
578 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
579
580 writel((unsigned long)buf, &dma_regs->dmadst);
581 writel(roundup(priv->len, ARCH_DMA_MINALIGN), &dma_regs->dmasize);
582 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
583 addr = (unsigned long)buf;
584 size = roundup(priv->len, ARCH_DMA_MINALIGN);
585 flush_dcache_range(addr, addr + size);
586
587 while (priv->len) {
588 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
589 if (!(gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK) &&
590 (len % ARCH_DMA_MINALIGN)) {
591 gen_fifo_cmd &= ~GENMASK(7, 0);
592 gen_fifo_cmd |= roundup(len, ARCH_DMA_MINALIGN);
593 }
594 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
595
596 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
597 }
598
599 ret = wait_for_bit_le32(&dma_regs->dmaisr, GQSPI_DMA_DST_I_STS_DONE,
600 1, GQSPI_TIMEOUT, 1);
601 if (ret) {
602 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
603 return -ETIMEDOUT;
604 }
605
606 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
607
608 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
609 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
610 actuallen);
611
612 if (buf != priv->rx_buf)
613 memcpy(priv->rx_buf, buf, actuallen);
614
615 return 0;
616}
617
618static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
619{
620 u32 gen_fifo_cmd;
621 u32 *buf;
622 u32 actuallen = priv->len;
623
624 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600625 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530626 gen_fifo_cmd |= GQSPI_GFIFO_RX |
627 GQSPI_GFIFO_DATA_XFR_MASK;
628
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530629 /*
630 * Check if receive buffer is aligned to 4 byte and length
631 * is multiples of four byte as we are using dma to receive.
632 */
633 if (!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
634 !(actuallen % GQSPI_DMA_ALIGN)) {
635 buf = (u32 *)priv->rx_buf;
636 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
637 }
638
639 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len,
640 GQSPI_DMA_ALIGN));
641 buf = (u32 *)tmp;
642 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
643}
644
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530645static int zynqmp_qspi_claim_bus(struct udevice *dev)
646{
647 struct udevice *bus = dev->parent;
648 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
649 struct zynqmp_qspi_regs *regs = priv->regs;
650
651 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
652
653 return 0;
654}
655
656static int zynqmp_qspi_release_bus(struct udevice *dev)
657{
658 struct udevice *bus = dev->parent;
659 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
660 struct zynqmp_qspi_regs *regs = priv->regs;
661
662 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
663
664 return 0;
665}
666
Brandon Maier4d9cce72021-01-20 10:39:46 -0600667static int zynqmp_qspi_exec_op(struct spi_slave *slave,
668 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530669{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600670 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
671 int ret = 0;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530672
Brandon Maier4d9cce72021-01-20 10:39:46 -0600673 priv->op = op;
674 priv->tx_buf = op->data.buf.out;
675 priv->rx_buf = op->data.buf.in;
676 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530677
Brandon Maier4d9cce72021-01-20 10:39:46 -0600678 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530679
Brandon Maier4d9cce72021-01-20 10:39:46 -0600680 /* Send opcode, addr, dummy */
681 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530682
Brandon Maier4d9cce72021-01-20 10:39:46 -0600683 /* Request the transfer */
684 if (op->data.dir == SPI_MEM_DATA_IN)
685 ret = zynqmp_qspi_genfifo_fill_rx(priv);
686 else if (op->data.dir == SPI_MEM_DATA_OUT)
687 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530688
Brandon Maier4d9cce72021-01-20 10:39:46 -0600689 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530690
Brandon Maier4d9cce72021-01-20 10:39:46 -0600691 return ret;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530692}
693
Brandon Maier4d9cce72021-01-20 10:39:46 -0600694static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
695 .exec_op = zynqmp_qspi_exec_op,
696};
697
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530698static const struct dm_spi_ops zynqmp_qspi_ops = {
699 .claim_bus = zynqmp_qspi_claim_bus,
700 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530701 .set_speed = zynqmp_qspi_set_speed,
702 .set_mode = zynqmp_qspi_set_mode,
Brandon Maier4d9cce72021-01-20 10:39:46 -0600703 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530704};
705
706static const struct udevice_id zynqmp_qspi_ids[] = {
707 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simeked373eb2018-11-29 08:48:28 +0100708 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530709 { }
710};
711
712U_BOOT_DRIVER(zynqmp_qspi) = {
713 .name = "zynqmp_qspi",
714 .id = UCLASS_SPI,
715 .of_match = zynqmp_qspi_ids,
716 .ops = &zynqmp_qspi_ops,
Simon Glassaad29ae2020-12-03 16:55:21 -0700717 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glassb75b15b2020-12-03 16:55:23 -0700718 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass8a2b47f2020-12-03 16:55:17 -0700719 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530720 .probe = zynqmp_qspi_probe,
721};