blob: 2db4ae20f1eb6a454c4f450013d12a6d2b90a8dc [file] [log] [blame]
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
8#include <common.h>
Simon Glass63334482019-11-14 12:57:39 -07009#include <cpu_func.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053011#include <asm/arch/sys_proto.h>
Simon Glass274e0b02020-05-10 11:39:56 -060012#include <asm/cache.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053013#include <asm/io.h>
14#include <clk.h>
15#include <dm.h>
16#include <malloc.h>
17#include <memalign.h>
18#include <spi.h>
Brandon Maier4d9cce72021-01-20 10:39:46 -060019#include <spi-mem.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053020#include <ubi_uboot.h>
21#include <wait_bit.h>
Simon Glass9bc15642020-02-03 07:36:16 -070022#include <dm/device_compat.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060023#include <linux/bitops.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070024#include <linux/err.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053025
26#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
27#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
28#define GQSPI_CONFIG_DMA_MODE (2 << 30)
29#define GQSPI_CONFIG_CPHA_MASK BIT(2)
30#define GQSPI_CONFIG_CPOL_MASK BIT(1)
31
32/*
33 * QSPI Interrupt Registers bit Masks
34 *
35 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
36 * bit definitions.
37 */
38#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
39#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
40#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
41#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -060042#define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053043#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
44 GQSPI_IXR_RXNEMTY_MASK)
45
46/*
47 * QSPI Enable Register bit Masks
48 *
49 * This register is used to enable or disable the QSPI controller
50 */
51#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
52
53#define GQSPI_GFIFO_LOW_BUS BIT(14)
54#define GQSPI_GFIFO_CS_LOWER BIT(12)
55#define GQSPI_GFIFO_UP_BUS BIT(15)
56#define GQSPI_GFIFO_CS_UPPER BIT(13)
57#define GQSPI_SPI_MODE_QSPI (3 << 10)
58#define GQSPI_SPI_MODE_SPI BIT(10)
59#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
60#define GQSPI_IMD_DATA_CS_ASSERT 5
61#define GQSPI_IMD_DATA_CS_DEASSERT 5
62#define GQSPI_GFIFO_TX BIT(16)
63#define GQSPI_GFIFO_RX BIT(17)
64#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
65#define GQSPI_GFIFO_IMD_MASK 0xFF
66#define GQSPI_GFIFO_EXP_MASK BIT(9)
67#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
68#define GQSPI_STRT_GEN_FIFO BIT(28)
69#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
70#define GQSPI_GFIFO_WP_HOLD BIT(19)
71#define GQSPI_BAUD_DIV_MASK (7 << 3)
72#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
73#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
74#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
75#define GQSPI_DMA_DST_I_STS_MASK 0xFE
76#define MODEBITS 0x6
77
78#define GQSPI_GFIFO_SELECT BIT(0)
79#define GQSPI_FIFO_THRESHOLD 1
Ashok Reddy Soma822a2432021-08-20 07:43:17 -060080#define GQSPI_GENFIFO_THRESHOLD 31
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053081
82#define SPI_XFER_ON_BOTH 0
83#define SPI_XFER_ON_LOWER 1
84#define SPI_XFER_ON_UPPER 2
85
86#define GQSPI_DMA_ALIGN 0x4
87#define GQSPI_MAX_BAUD_RATE_VAL 7
88#define GQSPI_DFLT_BAUD_RATE_VAL 2
89
90#define GQSPI_TIMEOUT 100000000
91
92#define GQSPI_BAUD_DIV_SHIFT 2
93#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
94#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
95#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
96#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
97#define GQSPI_USE_DATA_DLY 0x1
98#define GQSPI_USE_DATA_DLY_SHIFT 31
99#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
100#define GQSPI_DATA_DLY_ADJ_SHIFT 28
101#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
102#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
103#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
104#define IOU_TAPDLY_BYPASS_OFST 0xFF180390
105#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
106#define GQSPI_FREQ_40MHZ 40000000
107#define GQSPI_FREQ_100MHZ 100000000
108#define GQSPI_FREQ_150MHZ 150000000
109#define IOU_TAPDLY_BYPASS_MASK 0x7
110
111#define GQSPI_REG_OFFSET 0x100
112#define GQSPI_DMA_REG_OFFSET 0x800
113
114/* QSPI register offsets */
115struct zynqmp_qspi_regs {
116 u32 confr; /* 0x00 */
117 u32 isr; /* 0x04 */
118 u32 ier; /* 0x08 */
119 u32 idisr; /* 0x0C */
120 u32 imaskr; /* 0x10 */
121 u32 enbr; /* 0x14 */
122 u32 dr; /* 0x18 */
123 u32 txd0r; /* 0x1C */
124 u32 drxr; /* 0x20 */
125 u32 sicr; /* 0x24 */
126 u32 txftr; /* 0x28 */
127 u32 rxftr; /* 0x2C */
128 u32 gpior; /* 0x30 */
129 u32 reserved0; /* 0x34 */
130 u32 lpbkdly; /* 0x38 */
131 u32 reserved1; /* 0x3C */
132 u32 genfifo; /* 0x40 */
133 u32 gqspisel; /* 0x44 */
134 u32 reserved2; /* 0x48 */
135 u32 gqfifoctrl; /* 0x4C */
136 u32 gqfthr; /* 0x50 */
137 u32 gqpollcfg; /* 0x54 */
138 u32 gqpollto; /* 0x58 */
139 u32 gqxfersts; /* 0x5C */
140 u32 gqfifosnap; /* 0x60 */
141 u32 gqrxcpy; /* 0x64 */
142 u32 reserved3[36]; /* 0x68 */
143 u32 gqspidlyadj; /* 0xF8 */
144};
145
146struct zynqmp_qspi_dma_regs {
147 u32 dmadst; /* 0x00 */
148 u32 dmasize; /* 0x04 */
149 u32 dmasts; /* 0x08 */
150 u32 dmactrl; /* 0x0C */
151 u32 reserved0; /* 0x10 */
152 u32 dmaisr; /* 0x14 */
153 u32 dmaier; /* 0x18 */
154 u32 dmaidr; /* 0x1C */
155 u32 dmaimr; /* 0x20 */
156 u32 dmactrl2; /* 0x24 */
157 u32 dmadstmsb; /* 0x28 */
158};
159
Simon Glassb75b15b2020-12-03 16:55:23 -0700160struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530161 struct zynqmp_qspi_regs *regs;
162 struct zynqmp_qspi_dma_regs *dma_regs;
163 u32 frequency;
164 u32 speed_hz;
165};
166
167struct zynqmp_qspi_priv {
168 struct zynqmp_qspi_regs *regs;
169 struct zynqmp_qspi_dma_regs *dma_regs;
170 const void *tx_buf;
171 void *rx_buf;
172 unsigned int len;
173 int bytes_to_transfer;
174 int bytes_to_receive;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600175 const struct spi_mem_op *op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530176};
177
Simon Glassaad29ae2020-12-03 16:55:21 -0700178static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530179{
Simon Glass95588622020-12-22 19:30:28 -0700180 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530181
182 debug("%s\n", __func__);
183
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900184 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530185 GQSPI_REG_OFFSET);
186 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900187 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530188
189 return 0;
190}
191
192static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
193{
194 u32 config_reg;
195 struct zynqmp_qspi_regs *regs = priv->regs;
196
197 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
198 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
199 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
200 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600201 writel(GQSPI_GENFIFO_THRESHOLD, &regs->gqfthr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530202 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600203 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530204
205 config_reg = readl(&regs->confr);
206 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
207 GQSPI_CONFIG_MODE_EN_MASK);
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600208 config_reg |= GQSPI_CONFIG_DMA_MODE | GQSPI_GFIFO_WP_HOLD |
209 GQSPI_DFLT_BAUD_RATE_DIV | GQSPI_GFIFO_STRT_MODE_MASK;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530210 writel(config_reg, &regs->confr);
211
212 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
213}
214
215static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
216{
217 u32 gqspi_fifo_reg = 0;
218
219 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
220 GQSPI_GFIFO_CS_LOWER;
221
222 return gqspi_fifo_reg;
223}
224
Brandon Maier4d9cce72021-01-20 10:39:46 -0600225static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
226{
227 switch (buswidth) {
228 case 1:
229 return GQSPI_SPI_MODE_SPI;
230 case 2:
231 return GQSPI_SPI_MODE_DUAL_SPI;
232 case 4:
233 return GQSPI_SPI_MODE_QSPI;
234 default:
235 debug("Unsupported bus width %u\n", buswidth);
236 return GQSPI_SPI_MODE_SPI;
237 }
238}
239
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530240static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
241 u32 gqspi_fifo_reg)
242{
243 struct zynqmp_qspi_regs *regs = priv->regs;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600244 u32 config_reg, ier;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530245 int ret = 0;
246
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600247 writel(gqspi_fifo_reg, &regs->genfifo);
248
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600249 config_reg = readl(&regs->confr);
250 /* Manual start if needed */
251 config_reg |= GQSPI_STRT_GEN_FIFO;
252 writel(config_reg, &regs->confr);
253
254 /* Enable interrupts */
255 ier = readl(&regs->ier);
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600256 ier |= GQSPI_IXR_GFEMTY_MASK;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600257 writel(ier, &regs->ier);
258
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600259 /* Wait until the gen fifo is empty to write the new command */
260 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530261 GQSPI_TIMEOUT, 1);
262 if (ret)
263 printf("%s Timeout\n", __func__);
264
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530265}
266
267static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
268{
269 u32 gqspi_fifo_reg = 0;
270
271 if (is_on) {
272 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
273 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
274 GQSPI_IMD_DATA_CS_ASSERT;
275 } else {
276 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
277 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
278 }
279
280 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
281
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600282 /* Dummy generic FIFO entry */
283 zynqmp_qspi_fill_gen_fifo(priv, 0);
284
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530285 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
286}
287
288void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
289{
Simon Glass95588622020-12-22 19:30:28 -0700290 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530291 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
292 struct zynqmp_qspi_regs *regs = priv->regs;
293 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
294 u32 reqhz = 0;
295
296 clk_rate = plat->frequency;
297 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
298
299 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
300 __func__, reqhz, clk_rate, baudrateval);
301
302 if (reqhz < GQSPI_FREQ_40MHZ) {
303 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
304 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
305 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530306 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530307 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
308 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
309 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
310 lpbkdlyadj = readl(&regs->lpbkdly);
311 lpbkdlyadj |= (GQSPI_LPBK_DLY_ADJ_LPBK_MASK);
312 datadlyadj = readl(&regs->gqspidlyadj);
313 datadlyadj |= ((GQSPI_USE_DATA_DLY << GQSPI_USE_DATA_DLY_SHIFT)
314 | (GQSPI_DATA_DLY_ADJ_VALUE <<
315 GQSPI_DATA_DLY_ADJ_SHIFT));
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530316 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530317 lpbkdlyadj = readl(&regs->lpbkdly);
318 lpbkdlyadj |= ((GQSPI_LPBK_DLY_ADJ_LPBK_MASK) |
319 GQSPI_LPBK_DLY_ADJ_DLY_0);
320 }
321
322 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST, IOU_TAPDLY_BYPASS_MASK,
323 tapdlybypass);
324 writel(lpbkdlyadj, &regs->lpbkdly);
325 writel(datadlyadj, &regs->gqspidlyadj);
326}
327
328static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
329{
Simon Glass95588622020-12-22 19:30:28 -0700330 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530331 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
332 struct zynqmp_qspi_regs *regs = priv->regs;
333 u32 confr;
334 u8 baud_rate_val = 0;
335
336 debug("%s\n", __func__);
337 if (speed > plat->frequency)
338 speed = plat->frequency;
339
Brandon Maierb8003d52021-01-20 14:28:30 -0600340 if (plat->speed_hz != speed) {
341 /* Set the clock frequency */
342 /* If speed == 0, default to lowest speed */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530343 while ((baud_rate_val < 8) &&
344 ((plat->frequency /
345 (2 << baud_rate_val)) > speed))
346 baud_rate_val++;
347
348 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
349 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
350
351 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530352
Brandon Maierb8003d52021-01-20 14:28:30 -0600353 confr = readl(&regs->confr);
354 confr &= ~GQSPI_BAUD_DIV_MASK;
355 confr |= (baud_rate_val << 3);
356 writel(confr, &regs->confr);
357 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
358
359 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
360 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530361
362 return 0;
363}
364
365static int zynqmp_qspi_probe(struct udevice *bus)
366{
Simon Glassb75b15b2020-12-03 16:55:23 -0700367 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530368 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
369 struct clk clk;
370 unsigned long clock;
371 int ret;
372
373 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
374
375 priv->regs = plat->regs;
376 priv->dma_regs = plat->dma_regs;
377
378 ret = clk_get_by_index(bus, 0, &clk);
379 if (ret < 0) {
Sean Anderson241232a2020-09-15 10:45:12 -0400380 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530381 return ret;
382 }
383
384 clock = clk_get_rate(&clk);
385 if (IS_ERR_VALUE(clock)) {
Sean Anderson241232a2020-09-15 10:45:12 -0400386 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530387 return clock;
388 }
389 debug("%s: CLK %ld\n", __func__, clock);
390
391 ret = clk_enable(&clk);
Michal Simek41710952021-02-09 15:28:15 +0100392 if (ret) {
Sean Anderson241232a2020-09-15 10:45:12 -0400393 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530394 return ret;
395 }
396 plat->frequency = clock;
397 plat->speed_hz = plat->frequency / 2;
398
399 /* init the zynq spi hw */
400 zynqmp_qspi_init_hw(priv);
401
402 return 0;
403}
404
405static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
406{
407 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
408 struct zynqmp_qspi_regs *regs = priv->regs;
409 u32 confr;
410
411 debug("%s\n", __func__);
412 /* Set the SPI Clock phase and polarities */
413 confr = readl(&regs->confr);
414 confr &= ~(GQSPI_CONFIG_CPHA_MASK |
415 GQSPI_CONFIG_CPOL_MASK);
416
417 if (mode & SPI_CPHA)
418 confr |= GQSPI_CONFIG_CPHA_MASK;
419 if (mode & SPI_CPOL)
420 confr |= GQSPI_CONFIG_CPOL_MASK;
421
422 writel(confr, &regs->confr);
423
424 return 0;
425}
426
427static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
428{
429 u32 data;
430 int ret = 0;
431 struct zynqmp_qspi_regs *regs = priv->regs;
432 u32 *buf = (u32 *)priv->tx_buf;
433 u32 len = size;
434
435 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(&regs->isr),
436 size);
437
438 while (size) {
439 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
440 GQSPI_TIMEOUT, 1);
441 if (ret) {
442 printf("%s: Timeout\n", __func__);
443 return ret;
444 }
445
446 if (size >= 4) {
447 writel(*buf, &regs->txd0r);
448 buf++;
449 size -= 4;
450 } else {
451 switch (size) {
452 case 1:
453 data = *((u8 *)buf);
454 buf += 1;
455 data |= GENMASK(31, 8);
456 break;
457 case 2:
458 data = *((u16 *)buf);
459 buf += 2;
460 data |= GENMASK(31, 16);
461 break;
462 case 3:
T Karthik Reddycc59fc92020-11-19 05:00:36 -0700463 data = *buf;
464 buf += 3;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530465 data |= GENMASK(31, 24);
466 break;
467 }
468 writel(data, &regs->txd0r);
469 size = 0;
470 }
471 }
472
473 priv->tx_buf += len;
474 return 0;
475}
476
477static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
478{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600479 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530480 u32 gen_fifo_cmd;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600481 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530482
Brandon Maier4d9cce72021-01-20 10:39:46 -0600483 /* Send opcode */
484 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
485 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
486 gen_fifo_cmd |= GQSPI_GFIFO_TX;
487 gen_fifo_cmd |= op->cmd.opcode;
488 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
489
490 /* Send address */
491 for (i = 0; i < op->addr.nbytes; i++) {
492 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
493
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530494 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600495 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
496 gen_fifo_cmd |= GQSPI_GFIFO_TX;
497 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530498
499 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
500
501 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
502 }
Brandon Maier4d9cce72021-01-20 10:39:46 -0600503
504 /* Send dummy */
505 if (op->dummy.nbytes) {
506 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
507
508 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
509 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
510 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
511 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
512 gen_fifo_cmd |= dummy_cycles;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530513 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
514 }
515}
516
517static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
518 u32 *gen_fifo_cmd)
519{
520 u32 expval = 8;
521 u32 len;
522
523 while (1) {
524 if (priv->len > 255) {
525 if (priv->len & (1 << expval)) {
526 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
527 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
528 *gen_fifo_cmd |= expval;
529 priv->len -= (1 << expval);
530 return expval;
531 }
532 expval++;
533 } else {
534 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
535 GQSPI_GFIFO_EXP_MASK);
536 *gen_fifo_cmd |= (u8)priv->len;
537 len = (u8)priv->len;
538 priv->len = 0;
539 return len;
540 }
541 }
542}
543
544static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
545{
546 u32 gen_fifo_cmd;
547 u32 len;
548 int ret = 0;
549
550 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600551 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530552 gen_fifo_cmd |= GQSPI_GFIFO_TX |
553 GQSPI_GFIFO_DATA_XFR_MASK;
554
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530555 while (priv->len) {
556 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
557 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
558
559 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
560
561 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
562 ret = zynqmp_qspi_fill_tx_fifo(priv,
563 1 << len);
564 else
565 ret = zynqmp_qspi_fill_tx_fifo(priv,
566 len);
567
568 if (ret)
569 return ret;
570 }
571 return ret;
572}
573
574static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
575 u32 gen_fifo_cmd, u32 *buf)
576{
577 u32 addr;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600578 u32 size;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530579 u32 actuallen = priv->len;
580 int ret = 0;
581 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
582
583 writel((unsigned long)buf, &dma_regs->dmadst);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600584 writel(roundup(priv->len, GQSPI_DMA_ALIGN), &dma_regs->dmasize);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530585 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
586 addr = (unsigned long)buf;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600587 size = roundup(priv->len, GQSPI_DMA_ALIGN);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530588 flush_dcache_range(addr, addr + size);
589
590 while (priv->len) {
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600591 zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530592 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
593
594 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
595 }
596
597 ret = wait_for_bit_le32(&dma_regs->dmaisr, GQSPI_DMA_DST_I_STS_DONE,
598 1, GQSPI_TIMEOUT, 1);
599 if (ret) {
600 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
601 return -ETIMEDOUT;
602 }
603
604 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
605
606 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
607 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
608 actuallen);
609
610 if (buf != priv->rx_buf)
611 memcpy(priv->rx_buf, buf, actuallen);
612
613 return 0;
614}
615
616static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
617{
618 u32 gen_fifo_cmd;
619 u32 *buf;
620 u32 actuallen = priv->len;
621
622 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600623 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530624 gen_fifo_cmd |= GQSPI_GFIFO_RX |
625 GQSPI_GFIFO_DATA_XFR_MASK;
626
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530627 /*
628 * Check if receive buffer is aligned to 4 byte and length
629 * is multiples of four byte as we are using dma to receive.
630 */
631 if (!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
632 !(actuallen % GQSPI_DMA_ALIGN)) {
633 buf = (u32 *)priv->rx_buf;
634 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
635 }
636
637 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len,
638 GQSPI_DMA_ALIGN));
639 buf = (u32 *)tmp;
640 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
641}
642
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530643static int zynqmp_qspi_claim_bus(struct udevice *dev)
644{
645 struct udevice *bus = dev->parent;
646 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
647 struct zynqmp_qspi_regs *regs = priv->regs;
648
649 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
650
651 return 0;
652}
653
654static int zynqmp_qspi_release_bus(struct udevice *dev)
655{
656 struct udevice *bus = dev->parent;
657 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
658 struct zynqmp_qspi_regs *regs = priv->regs;
659
660 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
661
662 return 0;
663}
664
Brandon Maier4d9cce72021-01-20 10:39:46 -0600665static int zynqmp_qspi_exec_op(struct spi_slave *slave,
666 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530667{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600668 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
669 int ret = 0;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530670
Brandon Maier4d9cce72021-01-20 10:39:46 -0600671 priv->op = op;
672 priv->tx_buf = op->data.buf.out;
673 priv->rx_buf = op->data.buf.in;
674 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530675
Brandon Maier4d9cce72021-01-20 10:39:46 -0600676 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530677
Brandon Maier4d9cce72021-01-20 10:39:46 -0600678 /* Send opcode, addr, dummy */
679 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530680
Brandon Maier4d9cce72021-01-20 10:39:46 -0600681 /* Request the transfer */
682 if (op->data.dir == SPI_MEM_DATA_IN)
683 ret = zynqmp_qspi_genfifo_fill_rx(priv);
684 else if (op->data.dir == SPI_MEM_DATA_OUT)
685 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530686
Brandon Maier4d9cce72021-01-20 10:39:46 -0600687 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530688
Brandon Maier4d9cce72021-01-20 10:39:46 -0600689 return ret;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530690}
691
Brandon Maier4d9cce72021-01-20 10:39:46 -0600692static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
693 .exec_op = zynqmp_qspi_exec_op,
694};
695
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530696static const struct dm_spi_ops zynqmp_qspi_ops = {
697 .claim_bus = zynqmp_qspi_claim_bus,
698 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530699 .set_speed = zynqmp_qspi_set_speed,
700 .set_mode = zynqmp_qspi_set_mode,
Brandon Maier4d9cce72021-01-20 10:39:46 -0600701 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530702};
703
704static const struct udevice_id zynqmp_qspi_ids[] = {
705 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simeked373eb2018-11-29 08:48:28 +0100706 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530707 { }
708};
709
710U_BOOT_DRIVER(zynqmp_qspi) = {
711 .name = "zynqmp_qspi",
712 .id = UCLASS_SPI,
713 .of_match = zynqmp_qspi_ids,
714 .ops = &zynqmp_qspi_ops,
Simon Glassaad29ae2020-12-03 16:55:21 -0700715 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glassb75b15b2020-12-03 16:55:23 -0700716 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass8a2b47f2020-12-03 16:55:17 -0700717 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530718 .probe = zynqmp_qspi_probe,
719};