blob: c772bae3ccf25b8c9a4127bde2aac0e634289484 [file] [log] [blame]
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
8#include <common.h>
Simon Glass63334482019-11-14 12:57:39 -07009#include <cpu_func.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053011#include <asm/arch/sys_proto.h>
Simon Glass274e0b02020-05-10 11:39:56 -060012#include <asm/cache.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053013#include <asm/io.h>
14#include <clk.h>
15#include <dm.h>
16#include <malloc.h>
17#include <memalign.h>
18#include <spi.h>
Brandon Maier4d9cce72021-01-20 10:39:46 -060019#include <spi-mem.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053020#include <ubi_uboot.h>
21#include <wait_bit.h>
Simon Glass9bc15642020-02-03 07:36:16 -070022#include <dm/device_compat.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060023#include <linux/bitops.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070024#include <linux/err.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053025
26#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
27#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
28#define GQSPI_CONFIG_DMA_MODE (2 << 30)
29#define GQSPI_CONFIG_CPHA_MASK BIT(2)
30#define GQSPI_CONFIG_CPOL_MASK BIT(1)
31
32/*
33 * QSPI Interrupt Registers bit Masks
34 *
35 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
36 * bit definitions.
37 */
38#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
39#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +053040#define GQSPI_IXR_TXFIFOEMPTY_MASK 0x00000100 /* QSPI TX FIFO is Empty */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053041#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
42#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -060043#define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053044#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
45 GQSPI_IXR_RXNEMTY_MASK)
46
47/*
48 * QSPI Enable Register bit Masks
49 *
50 * This register is used to enable or disable the QSPI controller
51 */
52#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
53
54#define GQSPI_GFIFO_LOW_BUS BIT(14)
55#define GQSPI_GFIFO_CS_LOWER BIT(12)
56#define GQSPI_GFIFO_UP_BUS BIT(15)
57#define GQSPI_GFIFO_CS_UPPER BIT(13)
58#define GQSPI_SPI_MODE_QSPI (3 << 10)
59#define GQSPI_SPI_MODE_SPI BIT(10)
60#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
61#define GQSPI_IMD_DATA_CS_ASSERT 5
62#define GQSPI_IMD_DATA_CS_DEASSERT 5
63#define GQSPI_GFIFO_TX BIT(16)
64#define GQSPI_GFIFO_RX BIT(17)
65#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
66#define GQSPI_GFIFO_IMD_MASK 0xFF
67#define GQSPI_GFIFO_EXP_MASK BIT(9)
68#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
69#define GQSPI_STRT_GEN_FIFO BIT(28)
70#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
71#define GQSPI_GFIFO_WP_HOLD BIT(19)
72#define GQSPI_BAUD_DIV_MASK (7 << 3)
73#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
74#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
75#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
76#define GQSPI_DMA_DST_I_STS_MASK 0xFE
77#define MODEBITS 0x6
78
79#define GQSPI_GFIFO_SELECT BIT(0)
80#define GQSPI_FIFO_THRESHOLD 1
Ashok Reddy Soma822a2432021-08-20 07:43:17 -060081#define GQSPI_GENFIFO_THRESHOLD 31
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053082
83#define SPI_XFER_ON_BOTH 0
84#define SPI_XFER_ON_LOWER 1
85#define SPI_XFER_ON_UPPER 2
86
87#define GQSPI_DMA_ALIGN 0x4
88#define GQSPI_MAX_BAUD_RATE_VAL 7
89#define GQSPI_DFLT_BAUD_RATE_VAL 2
90
91#define GQSPI_TIMEOUT 100000000
92
93#define GQSPI_BAUD_DIV_SHIFT 2
94#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
95#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
96#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
97#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
98#define GQSPI_USE_DATA_DLY 0x1
99#define GQSPI_USE_DATA_DLY_SHIFT 31
100#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
101#define GQSPI_DATA_DLY_ADJ_SHIFT 28
102#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
103#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
104#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
105#define IOU_TAPDLY_BYPASS_OFST 0xFF180390
106#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
107#define GQSPI_FREQ_40MHZ 40000000
108#define GQSPI_FREQ_100MHZ 100000000
109#define GQSPI_FREQ_150MHZ 150000000
110#define IOU_TAPDLY_BYPASS_MASK 0x7
111
112#define GQSPI_REG_OFFSET 0x100
113#define GQSPI_DMA_REG_OFFSET 0x800
114
115/* QSPI register offsets */
116struct zynqmp_qspi_regs {
117 u32 confr; /* 0x00 */
118 u32 isr; /* 0x04 */
119 u32 ier; /* 0x08 */
120 u32 idisr; /* 0x0C */
121 u32 imaskr; /* 0x10 */
122 u32 enbr; /* 0x14 */
123 u32 dr; /* 0x18 */
124 u32 txd0r; /* 0x1C */
125 u32 drxr; /* 0x20 */
126 u32 sicr; /* 0x24 */
127 u32 txftr; /* 0x28 */
128 u32 rxftr; /* 0x2C */
129 u32 gpior; /* 0x30 */
130 u32 reserved0; /* 0x34 */
131 u32 lpbkdly; /* 0x38 */
132 u32 reserved1; /* 0x3C */
133 u32 genfifo; /* 0x40 */
134 u32 gqspisel; /* 0x44 */
135 u32 reserved2; /* 0x48 */
136 u32 gqfifoctrl; /* 0x4C */
137 u32 gqfthr; /* 0x50 */
138 u32 gqpollcfg; /* 0x54 */
139 u32 gqpollto; /* 0x58 */
140 u32 gqxfersts; /* 0x5C */
141 u32 gqfifosnap; /* 0x60 */
142 u32 gqrxcpy; /* 0x64 */
143 u32 reserved3[36]; /* 0x68 */
144 u32 gqspidlyadj; /* 0xF8 */
145};
146
147struct zynqmp_qspi_dma_regs {
148 u32 dmadst; /* 0x00 */
149 u32 dmasize; /* 0x04 */
150 u32 dmasts; /* 0x08 */
151 u32 dmactrl; /* 0x0C */
152 u32 reserved0; /* 0x10 */
153 u32 dmaisr; /* 0x14 */
154 u32 dmaier; /* 0x18 */
155 u32 dmaidr; /* 0x1C */
156 u32 dmaimr; /* 0x20 */
157 u32 dmactrl2; /* 0x24 */
158 u32 dmadstmsb; /* 0x28 */
159};
160
Simon Glassb75b15b2020-12-03 16:55:23 -0700161struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530162 struct zynqmp_qspi_regs *regs;
163 struct zynqmp_qspi_dma_regs *dma_regs;
164 u32 frequency;
165 u32 speed_hz;
166};
167
168struct zynqmp_qspi_priv {
169 struct zynqmp_qspi_regs *regs;
170 struct zynqmp_qspi_dma_regs *dma_regs;
171 const void *tx_buf;
172 void *rx_buf;
173 unsigned int len;
174 int bytes_to_transfer;
175 int bytes_to_receive;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600176 const struct spi_mem_op *op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530177};
178
Simon Glassaad29ae2020-12-03 16:55:21 -0700179static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530180{
Simon Glass95588622020-12-22 19:30:28 -0700181 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530182
183 debug("%s\n", __func__);
184
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900185 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530186 GQSPI_REG_OFFSET);
187 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900188 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530189
190 return 0;
191}
192
193static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
194{
195 u32 config_reg;
196 struct zynqmp_qspi_regs *regs = priv->regs;
197
198 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
199 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
200 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
201 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600202 writel(GQSPI_GENFIFO_THRESHOLD, &regs->gqfthr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530203 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600204 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530205
206 config_reg = readl(&regs->confr);
207 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
208 GQSPI_CONFIG_MODE_EN_MASK);
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600209 config_reg |= GQSPI_CONFIG_DMA_MODE | GQSPI_GFIFO_WP_HOLD |
210 GQSPI_DFLT_BAUD_RATE_DIV | GQSPI_GFIFO_STRT_MODE_MASK;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530211 writel(config_reg, &regs->confr);
212
213 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
214}
215
216static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
217{
218 u32 gqspi_fifo_reg = 0;
219
220 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
221 GQSPI_GFIFO_CS_LOWER;
222
223 return gqspi_fifo_reg;
224}
225
Brandon Maier4d9cce72021-01-20 10:39:46 -0600226static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
227{
228 switch (buswidth) {
229 case 1:
230 return GQSPI_SPI_MODE_SPI;
231 case 2:
232 return GQSPI_SPI_MODE_DUAL_SPI;
233 case 4:
234 return GQSPI_SPI_MODE_QSPI;
235 default:
236 debug("Unsupported bus width %u\n", buswidth);
237 return GQSPI_SPI_MODE_SPI;
238 }
239}
240
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530241static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
242 u32 gqspi_fifo_reg)
243{
244 struct zynqmp_qspi_regs *regs = priv->regs;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600245 u32 config_reg, ier;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530246 int ret = 0;
247
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600248 writel(gqspi_fifo_reg, &regs->genfifo);
249
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600250 config_reg = readl(&regs->confr);
251 /* Manual start if needed */
252 config_reg |= GQSPI_STRT_GEN_FIFO;
253 writel(config_reg, &regs->confr);
254
255 /* Enable interrupts */
256 ier = readl(&regs->ier);
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600257 ier |= GQSPI_IXR_GFEMTY_MASK;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600258 writel(ier, &regs->ier);
259
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600260 /* Wait until the gen fifo is empty to write the new command */
261 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530262 GQSPI_TIMEOUT, 1);
263 if (ret)
264 printf("%s Timeout\n", __func__);
265
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530266}
267
268static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
269{
270 u32 gqspi_fifo_reg = 0;
271
272 if (is_on) {
273 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
274 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
275 GQSPI_IMD_DATA_CS_ASSERT;
276 } else {
277 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
278 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
279 }
280
281 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
282
283 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
284}
285
286void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
287{
Simon Glass95588622020-12-22 19:30:28 -0700288 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530289 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
290 struct zynqmp_qspi_regs *regs = priv->regs;
291 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
292 u32 reqhz = 0;
293
294 clk_rate = plat->frequency;
295 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
296
297 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
298 __func__, reqhz, clk_rate, baudrateval);
299
300 if (reqhz < GQSPI_FREQ_40MHZ) {
301 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
302 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
303 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530304 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530305 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
306 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
307 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
308 lpbkdlyadj = readl(&regs->lpbkdly);
309 lpbkdlyadj |= (GQSPI_LPBK_DLY_ADJ_LPBK_MASK);
310 datadlyadj = readl(&regs->gqspidlyadj);
311 datadlyadj |= ((GQSPI_USE_DATA_DLY << GQSPI_USE_DATA_DLY_SHIFT)
312 | (GQSPI_DATA_DLY_ADJ_VALUE <<
313 GQSPI_DATA_DLY_ADJ_SHIFT));
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530314 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530315 lpbkdlyadj = readl(&regs->lpbkdly);
316 lpbkdlyadj |= ((GQSPI_LPBK_DLY_ADJ_LPBK_MASK) |
317 GQSPI_LPBK_DLY_ADJ_DLY_0);
318 }
319
320 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST, IOU_TAPDLY_BYPASS_MASK,
321 tapdlybypass);
322 writel(lpbkdlyadj, &regs->lpbkdly);
323 writel(datadlyadj, &regs->gqspidlyadj);
324}
325
326static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
327{
Simon Glass95588622020-12-22 19:30:28 -0700328 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530329 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
330 struct zynqmp_qspi_regs *regs = priv->regs;
331 u32 confr;
332 u8 baud_rate_val = 0;
333
334 debug("%s\n", __func__);
335 if (speed > plat->frequency)
336 speed = plat->frequency;
337
Brandon Maierb8003d52021-01-20 14:28:30 -0600338 if (plat->speed_hz != speed) {
339 /* Set the clock frequency */
340 /* If speed == 0, default to lowest speed */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530341 while ((baud_rate_val < 8) &&
342 ((plat->frequency /
343 (2 << baud_rate_val)) > speed))
344 baud_rate_val++;
345
346 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
347 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
348
349 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530350
Brandon Maierb8003d52021-01-20 14:28:30 -0600351 confr = readl(&regs->confr);
352 confr &= ~GQSPI_BAUD_DIV_MASK;
353 confr |= (baud_rate_val << 3);
354 writel(confr, &regs->confr);
355 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
356
357 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
358 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530359
360 return 0;
361}
362
363static int zynqmp_qspi_probe(struct udevice *bus)
364{
Simon Glassb75b15b2020-12-03 16:55:23 -0700365 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530366 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
367 struct clk clk;
368 unsigned long clock;
369 int ret;
370
371 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
372
373 priv->regs = plat->regs;
374 priv->dma_regs = plat->dma_regs;
375
376 ret = clk_get_by_index(bus, 0, &clk);
377 if (ret < 0) {
Sean Anderson241232a2020-09-15 10:45:12 -0400378 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530379 return ret;
380 }
381
382 clock = clk_get_rate(&clk);
383 if (IS_ERR_VALUE(clock)) {
Sean Anderson241232a2020-09-15 10:45:12 -0400384 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530385 return clock;
386 }
387 debug("%s: CLK %ld\n", __func__, clock);
388
389 ret = clk_enable(&clk);
Michal Simek41710952021-02-09 15:28:15 +0100390 if (ret) {
Sean Anderson241232a2020-09-15 10:45:12 -0400391 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530392 return ret;
393 }
394 plat->frequency = clock;
395 plat->speed_hz = plat->frequency / 2;
396
397 /* init the zynq spi hw */
398 zynqmp_qspi_init_hw(priv);
399
400 return 0;
401}
402
403static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
404{
405 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
406 struct zynqmp_qspi_regs *regs = priv->regs;
407 u32 confr;
408
409 debug("%s\n", __func__);
410 /* Set the SPI Clock phase and polarities */
411 confr = readl(&regs->confr);
412 confr &= ~(GQSPI_CONFIG_CPHA_MASK |
413 GQSPI_CONFIG_CPOL_MASK);
414
415 if (mode & SPI_CPHA)
416 confr |= GQSPI_CONFIG_CPHA_MASK;
417 if (mode & SPI_CPOL)
418 confr |= GQSPI_CONFIG_CPOL_MASK;
419
420 writel(confr, &regs->confr);
421
422 return 0;
423}
424
425static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
426{
427 u32 data;
428 int ret = 0;
429 struct zynqmp_qspi_regs *regs = priv->regs;
430 u32 *buf = (u32 *)priv->tx_buf;
431 u32 len = size;
432
433 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(&regs->isr),
434 size);
435
436 while (size) {
437 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
438 GQSPI_TIMEOUT, 1);
439 if (ret) {
440 printf("%s: Timeout\n", __func__);
441 return ret;
442 }
443
444 if (size >= 4) {
445 writel(*buf, &regs->txd0r);
446 buf++;
447 size -= 4;
448 } else {
449 switch (size) {
450 case 1:
451 data = *((u8 *)buf);
452 buf += 1;
453 data |= GENMASK(31, 8);
454 break;
455 case 2:
456 data = *((u16 *)buf);
457 buf += 2;
458 data |= GENMASK(31, 16);
459 break;
460 case 3:
T Karthik Reddycc59fc92020-11-19 05:00:36 -0700461 data = *buf;
462 buf += 3;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530463 data |= GENMASK(31, 24);
464 break;
465 }
466 writel(data, &regs->txd0r);
467 size = 0;
468 }
469 }
470
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +0530471 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXFIFOEMPTY_MASK, 1,
472 GQSPI_TIMEOUT, 1);
473 if (ret) {
474 printf("%s: Timeout\n", __func__);
475 return ret;
476 }
477
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530478 priv->tx_buf += len;
479 return 0;
480}
481
482static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
483{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600484 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530485 u32 gen_fifo_cmd;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600486 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530487
Brandon Maier4d9cce72021-01-20 10:39:46 -0600488 /* Send opcode */
489 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
490 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
491 gen_fifo_cmd |= GQSPI_GFIFO_TX;
492 gen_fifo_cmd |= op->cmd.opcode;
493 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
494
495 /* Send address */
496 for (i = 0; i < op->addr.nbytes; i++) {
497 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
498
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530499 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600500 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
501 gen_fifo_cmd |= GQSPI_GFIFO_TX;
502 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530503
504 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
505
506 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
507 }
Brandon Maier4d9cce72021-01-20 10:39:46 -0600508
509 /* Send dummy */
510 if (op->dummy.nbytes) {
511 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
512
513 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
514 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
515 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
516 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
517 gen_fifo_cmd |= dummy_cycles;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530518 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
519 }
520}
521
522static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
523 u32 *gen_fifo_cmd)
524{
525 u32 expval = 8;
526 u32 len;
527
528 while (1) {
529 if (priv->len > 255) {
530 if (priv->len & (1 << expval)) {
531 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
532 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
533 *gen_fifo_cmd |= expval;
534 priv->len -= (1 << expval);
535 return expval;
536 }
537 expval++;
538 } else {
539 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
540 GQSPI_GFIFO_EXP_MASK);
541 *gen_fifo_cmd |= (u8)priv->len;
542 len = (u8)priv->len;
543 priv->len = 0;
544 return len;
545 }
546 }
547}
548
549static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
550{
551 u32 gen_fifo_cmd;
552 u32 len;
553 int ret = 0;
554
555 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600556 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530557 gen_fifo_cmd |= GQSPI_GFIFO_TX |
558 GQSPI_GFIFO_DATA_XFR_MASK;
559
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530560 while (priv->len) {
561 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
562 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
563
564 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
565
566 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
567 ret = zynqmp_qspi_fill_tx_fifo(priv,
568 1 << len);
569 else
570 ret = zynqmp_qspi_fill_tx_fifo(priv,
571 len);
572
573 if (ret)
574 return ret;
575 }
576 return ret;
577}
578
579static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
580 u32 gen_fifo_cmd, u32 *buf)
581{
582 u32 addr;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600583 u32 size;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530584 u32 actuallen = priv->len;
585 int ret = 0;
586 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
587
588 writel((unsigned long)buf, &dma_regs->dmadst);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600589 writel(roundup(priv->len, GQSPI_DMA_ALIGN), &dma_regs->dmasize);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530590 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
591 addr = (unsigned long)buf;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600592 size = roundup(priv->len, GQSPI_DMA_ALIGN);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530593 flush_dcache_range(addr, addr + size);
594
595 while (priv->len) {
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600596 zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530597 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
598
599 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
600 }
601
602 ret = wait_for_bit_le32(&dma_regs->dmaisr, GQSPI_DMA_DST_I_STS_DONE,
603 1, GQSPI_TIMEOUT, 1);
604 if (ret) {
605 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
606 return -ETIMEDOUT;
607 }
608
609 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
610
611 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
612 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
613 actuallen);
614
615 if (buf != priv->rx_buf)
616 memcpy(priv->rx_buf, buf, actuallen);
617
618 return 0;
619}
620
621static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
622{
623 u32 gen_fifo_cmd;
624 u32 *buf;
625 u32 actuallen = priv->len;
626
627 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600628 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530629 gen_fifo_cmd |= GQSPI_GFIFO_RX |
630 GQSPI_GFIFO_DATA_XFR_MASK;
631
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530632 /*
633 * Check if receive buffer is aligned to 4 byte and length
634 * is multiples of four byte as we are using dma to receive.
635 */
636 if (!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
637 !(actuallen % GQSPI_DMA_ALIGN)) {
638 buf = (u32 *)priv->rx_buf;
639 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
640 }
641
642 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len,
643 GQSPI_DMA_ALIGN));
644 buf = (u32 *)tmp;
645 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
646}
647
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530648static int zynqmp_qspi_claim_bus(struct udevice *dev)
649{
650 struct udevice *bus = dev->parent;
651 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
652 struct zynqmp_qspi_regs *regs = priv->regs;
653
654 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
655
656 return 0;
657}
658
659static int zynqmp_qspi_release_bus(struct udevice *dev)
660{
661 struct udevice *bus = dev->parent;
662 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
663 struct zynqmp_qspi_regs *regs = priv->regs;
664
665 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
666
667 return 0;
668}
669
Brandon Maier4d9cce72021-01-20 10:39:46 -0600670static int zynqmp_qspi_exec_op(struct spi_slave *slave,
671 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530672{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600673 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
674 int ret = 0;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530675
Brandon Maier4d9cce72021-01-20 10:39:46 -0600676 priv->op = op;
677 priv->tx_buf = op->data.buf.out;
678 priv->rx_buf = op->data.buf.in;
679 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530680
Brandon Maier4d9cce72021-01-20 10:39:46 -0600681 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530682
Brandon Maier4d9cce72021-01-20 10:39:46 -0600683 /* Send opcode, addr, dummy */
684 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530685
Brandon Maier4d9cce72021-01-20 10:39:46 -0600686 /* Request the transfer */
687 if (op->data.dir == SPI_MEM_DATA_IN)
688 ret = zynqmp_qspi_genfifo_fill_rx(priv);
689 else if (op->data.dir == SPI_MEM_DATA_OUT)
690 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530691
Brandon Maier4d9cce72021-01-20 10:39:46 -0600692 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530693
Brandon Maier4d9cce72021-01-20 10:39:46 -0600694 return ret;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530695}
696
Brandon Maier4d9cce72021-01-20 10:39:46 -0600697static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
698 .exec_op = zynqmp_qspi_exec_op,
699};
700
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530701static const struct dm_spi_ops zynqmp_qspi_ops = {
702 .claim_bus = zynqmp_qspi_claim_bus,
703 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530704 .set_speed = zynqmp_qspi_set_speed,
705 .set_mode = zynqmp_qspi_set_mode,
Brandon Maier4d9cce72021-01-20 10:39:46 -0600706 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530707};
708
709static const struct udevice_id zynqmp_qspi_ids[] = {
710 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simeked373eb2018-11-29 08:48:28 +0100711 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530712 { }
713};
714
715U_BOOT_DRIVER(zynqmp_qspi) = {
716 .name = "zynqmp_qspi",
717 .id = UCLASS_SPI,
718 .of_match = zynqmp_qspi_ids,
719 .ops = &zynqmp_qspi_ops,
Simon Glassaad29ae2020-12-03 16:55:21 -0700720 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glassb75b15b2020-12-03 16:55:23 -0700721 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass8a2b47f2020-12-03 16:55:17 -0700722 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530723 .probe = zynqmp_qspi_probe,
724};