blob: 78a1b487311063e8a48f8aeda0aa3eeacaa43cfb [file] [log] [blame]
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
8#include <common.h>
Simon Glass63334482019-11-14 12:57:39 -07009#include <cpu_func.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053011#include <asm/arch/sys_proto.h>
Simon Glass274e0b02020-05-10 11:39:56 -060012#include <asm/cache.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053013#include <asm/io.h>
14#include <clk.h>
15#include <dm.h>
16#include <malloc.h>
17#include <memalign.h>
18#include <spi.h>
Brandon Maier4d9cce72021-01-20 10:39:46 -060019#include <spi-mem.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053020#include <ubi_uboot.h>
21#include <wait_bit.h>
Simon Glass9bc15642020-02-03 07:36:16 -070022#include <dm/device_compat.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060023#include <linux/bitops.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070024#include <linux/err.h>
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -060025#include <zynqmp_firmware.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053026
27#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
28#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
29#define GQSPI_CONFIG_DMA_MODE (2 << 30)
30#define GQSPI_CONFIG_CPHA_MASK BIT(2)
31#define GQSPI_CONFIG_CPOL_MASK BIT(1)
32
33/*
34 * QSPI Interrupt Registers bit Masks
35 *
36 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
37 * bit definitions.
38 */
39#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
40#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +053041#define GQSPI_IXR_TXFIFOEMPTY_MASK 0x00000100 /* QSPI TX FIFO is Empty */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053042#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
43#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -060044#define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053045#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
46 GQSPI_IXR_RXNEMTY_MASK)
47
48/*
49 * QSPI Enable Register bit Masks
50 *
51 * This register is used to enable or disable the QSPI controller
52 */
53#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
54
55#define GQSPI_GFIFO_LOW_BUS BIT(14)
56#define GQSPI_GFIFO_CS_LOWER BIT(12)
57#define GQSPI_GFIFO_UP_BUS BIT(15)
58#define GQSPI_GFIFO_CS_UPPER BIT(13)
59#define GQSPI_SPI_MODE_QSPI (3 << 10)
60#define GQSPI_SPI_MODE_SPI BIT(10)
61#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
62#define GQSPI_IMD_DATA_CS_ASSERT 5
63#define GQSPI_IMD_DATA_CS_DEASSERT 5
64#define GQSPI_GFIFO_TX BIT(16)
65#define GQSPI_GFIFO_RX BIT(17)
66#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
67#define GQSPI_GFIFO_IMD_MASK 0xFF
68#define GQSPI_GFIFO_EXP_MASK BIT(9)
69#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
70#define GQSPI_STRT_GEN_FIFO BIT(28)
71#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
72#define GQSPI_GFIFO_WP_HOLD BIT(19)
73#define GQSPI_BAUD_DIV_MASK (7 << 3)
74#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
75#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
76#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
77#define GQSPI_DMA_DST_I_STS_MASK 0xFE
78#define MODEBITS 0x6
79
80#define GQSPI_GFIFO_SELECT BIT(0)
81#define GQSPI_FIFO_THRESHOLD 1
Ashok Reddy Soma822a2432021-08-20 07:43:17 -060082#define GQSPI_GENFIFO_THRESHOLD 31
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053083
84#define SPI_XFER_ON_BOTH 0
85#define SPI_XFER_ON_LOWER 1
86#define SPI_XFER_ON_UPPER 2
87
88#define GQSPI_DMA_ALIGN 0x4
89#define GQSPI_MAX_BAUD_RATE_VAL 7
90#define GQSPI_DFLT_BAUD_RATE_VAL 2
91
92#define GQSPI_TIMEOUT 100000000
93
94#define GQSPI_BAUD_DIV_SHIFT 2
95#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
96#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
97#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
98#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
99#define GQSPI_USE_DATA_DLY 0x1
100#define GQSPI_USE_DATA_DLY_SHIFT 31
101#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
102#define GQSPI_DATA_DLY_ADJ_SHIFT 28
103#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
104#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
105#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -0600106#define IOU_TAPDLY_BYPASS_OFST !IS_ENABLED(CONFIG_ARCH_VERSAL) ? \
107 0xFF180390 : 0xF103003C
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530108#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -0600109#define GQSPI_FREQ_37_5MHZ 37500000
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530110#define GQSPI_FREQ_40MHZ 40000000
111#define GQSPI_FREQ_100MHZ 100000000
112#define GQSPI_FREQ_150MHZ 150000000
113#define IOU_TAPDLY_BYPASS_MASK 0x7
114
115#define GQSPI_REG_OFFSET 0x100
116#define GQSPI_DMA_REG_OFFSET 0x800
117
118/* QSPI register offsets */
119struct zynqmp_qspi_regs {
120 u32 confr; /* 0x00 */
121 u32 isr; /* 0x04 */
122 u32 ier; /* 0x08 */
123 u32 idisr; /* 0x0C */
124 u32 imaskr; /* 0x10 */
125 u32 enbr; /* 0x14 */
126 u32 dr; /* 0x18 */
127 u32 txd0r; /* 0x1C */
128 u32 drxr; /* 0x20 */
129 u32 sicr; /* 0x24 */
130 u32 txftr; /* 0x28 */
131 u32 rxftr; /* 0x2C */
132 u32 gpior; /* 0x30 */
133 u32 reserved0; /* 0x34 */
134 u32 lpbkdly; /* 0x38 */
135 u32 reserved1; /* 0x3C */
136 u32 genfifo; /* 0x40 */
137 u32 gqspisel; /* 0x44 */
138 u32 reserved2; /* 0x48 */
139 u32 gqfifoctrl; /* 0x4C */
140 u32 gqfthr; /* 0x50 */
141 u32 gqpollcfg; /* 0x54 */
142 u32 gqpollto; /* 0x58 */
143 u32 gqxfersts; /* 0x5C */
144 u32 gqfifosnap; /* 0x60 */
145 u32 gqrxcpy; /* 0x64 */
146 u32 reserved3[36]; /* 0x68 */
147 u32 gqspidlyadj; /* 0xF8 */
148};
149
150struct zynqmp_qspi_dma_regs {
151 u32 dmadst; /* 0x00 */
152 u32 dmasize; /* 0x04 */
153 u32 dmasts; /* 0x08 */
154 u32 dmactrl; /* 0x0C */
155 u32 reserved0; /* 0x10 */
156 u32 dmaisr; /* 0x14 */
157 u32 dmaier; /* 0x18 */
158 u32 dmaidr; /* 0x1C */
159 u32 dmaimr; /* 0x20 */
160 u32 dmactrl2; /* 0x24 */
161 u32 dmadstmsb; /* 0x28 */
162};
163
Simon Glassb75b15b2020-12-03 16:55:23 -0700164struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530165 struct zynqmp_qspi_regs *regs;
166 struct zynqmp_qspi_dma_regs *dma_regs;
167 u32 frequency;
168 u32 speed_hz;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600169 unsigned int io_mode;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530170};
171
172struct zynqmp_qspi_priv {
173 struct zynqmp_qspi_regs *regs;
174 struct zynqmp_qspi_dma_regs *dma_regs;
175 const void *tx_buf;
176 void *rx_buf;
177 unsigned int len;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600178 unsigned int io_mode;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530179 int bytes_to_transfer;
180 int bytes_to_receive;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600181 const struct spi_mem_op *op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530182};
183
Simon Glassaad29ae2020-12-03 16:55:21 -0700184static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530185{
Simon Glass95588622020-12-22 19:30:28 -0700186 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530187
188 debug("%s\n", __func__);
189
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900190 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530191 GQSPI_REG_OFFSET);
192 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900193 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530194
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600195 plat->io_mode = dev_read_bool(bus, "has-io-mode");
196
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530197 return 0;
198}
199
200static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
201{
202 u32 config_reg;
203 struct zynqmp_qspi_regs *regs = priv->regs;
204
205 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
206 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
207 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
208 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600209 writel(GQSPI_GENFIFO_THRESHOLD, &regs->gqfthr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530210 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600211 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530212
213 config_reg = readl(&regs->confr);
214 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
215 GQSPI_CONFIG_MODE_EN_MASK);
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600216 config_reg |= GQSPI_GFIFO_WP_HOLD | GQSPI_DFLT_BAUD_RATE_DIV;
217 config_reg |= GQSPI_GFIFO_STRT_MODE_MASK;
218 if (!priv->io_mode)
219 config_reg |= GQSPI_CONFIG_DMA_MODE;
220
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530221 writel(config_reg, &regs->confr);
222
223 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
224}
225
226static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
227{
228 u32 gqspi_fifo_reg = 0;
229
230 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
231 GQSPI_GFIFO_CS_LOWER;
232
233 return gqspi_fifo_reg;
234}
235
Brandon Maier4d9cce72021-01-20 10:39:46 -0600236static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
237{
238 switch (buswidth) {
239 case 1:
240 return GQSPI_SPI_MODE_SPI;
241 case 2:
242 return GQSPI_SPI_MODE_DUAL_SPI;
243 case 4:
244 return GQSPI_SPI_MODE_QSPI;
245 default:
246 debug("Unsupported bus width %u\n", buswidth);
247 return GQSPI_SPI_MODE_SPI;
248 }
249}
250
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530251static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
252 u32 gqspi_fifo_reg)
253{
254 struct zynqmp_qspi_regs *regs = priv->regs;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600255 u32 config_reg, ier;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530256 int ret = 0;
257
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600258 writel(gqspi_fifo_reg, &regs->genfifo);
259
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600260 config_reg = readl(&regs->confr);
261 /* Manual start if needed */
262 config_reg |= GQSPI_STRT_GEN_FIFO;
263 writel(config_reg, &regs->confr);
264
265 /* Enable interrupts */
266 ier = readl(&regs->ier);
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600267 ier |= GQSPI_IXR_GFEMTY_MASK;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600268 writel(ier, &regs->ier);
269
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600270 /* Wait until the gen fifo is empty to write the new command */
271 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530272 GQSPI_TIMEOUT, 1);
273 if (ret)
274 printf("%s Timeout\n", __func__);
275
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530276}
277
278static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
279{
280 u32 gqspi_fifo_reg = 0;
281
282 if (is_on) {
283 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
284 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
285 GQSPI_IMD_DATA_CS_ASSERT;
286 } else {
287 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
288 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
289 }
290
291 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
292
293 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
294}
295
296void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
297{
Simon Glass95588622020-12-22 19:30:28 -0700298 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530299 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
300 struct zynqmp_qspi_regs *regs = priv->regs;
301 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
302 u32 reqhz = 0;
303
304 clk_rate = plat->frequency;
305 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
306
307 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
308 __func__, reqhz, clk_rate, baudrateval);
309
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -0600310 if (!IS_ENABLED(CONFIG_ARCH_VERSAL)) {
311 if (reqhz <= GQSPI_FREQ_40MHZ) {
312 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
313 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
314 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
315 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
316 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
317 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
318 datadlyadj = (GQSPI_USE_DATA_DLY <<
319 GQSPI_USE_DATA_DLY_SHIFT) |
320 (GQSPI_DATA_DLY_ADJ_VALUE <<
321 GQSPI_DATA_DLY_ADJ_SHIFT);
322 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
323 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
324 GQSPI_LPBK_DLY_ADJ_DLY_0;
325 }
326 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST,
327 IOU_TAPDLY_BYPASS_MASK, tapdlybypass);
328 } else {
329 if (reqhz <= GQSPI_FREQ_37_5MHZ) {
330 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
331 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
332 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
333 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
334 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
335 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
336 datadlyadj = GQSPI_USE_DATA_DLY <<
337 GQSPI_USE_DATA_DLY_SHIFT;
338 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
339 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
340 (GQSPI_LPBK_DLY_ADJ_DLY_1 <<
341 GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT);
342 }
343 writel(tapdlybypass, IOU_TAPDLY_BYPASS_OFST);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530344 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530345 writel(lpbkdlyadj, &regs->lpbkdly);
346 writel(datadlyadj, &regs->gqspidlyadj);
347}
348
349static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
350{
Simon Glass95588622020-12-22 19:30:28 -0700351 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530352 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
353 struct zynqmp_qspi_regs *regs = priv->regs;
354 u32 confr;
355 u8 baud_rate_val = 0;
356
357 debug("%s\n", __func__);
358 if (speed > plat->frequency)
359 speed = plat->frequency;
360
Brandon Maierb8003d52021-01-20 14:28:30 -0600361 if (plat->speed_hz != speed) {
362 /* Set the clock frequency */
363 /* If speed == 0, default to lowest speed */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530364 while ((baud_rate_val < 8) &&
365 ((plat->frequency /
366 (2 << baud_rate_val)) > speed))
367 baud_rate_val++;
368
369 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
370 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
371
372 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530373
Brandon Maierb8003d52021-01-20 14:28:30 -0600374 confr = readl(&regs->confr);
375 confr &= ~GQSPI_BAUD_DIV_MASK;
376 confr |= (baud_rate_val << 3);
377 writel(confr, &regs->confr);
378 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
379
380 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
381 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530382
383 return 0;
384}
385
386static int zynqmp_qspi_probe(struct udevice *bus)
387{
Simon Glassb75b15b2020-12-03 16:55:23 -0700388 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530389 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
390 struct clk clk;
391 unsigned long clock;
392 int ret;
393
394 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
395
396 priv->regs = plat->regs;
397 priv->dma_regs = plat->dma_regs;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600398 priv->io_mode = plat->io_mode;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530399
400 ret = clk_get_by_index(bus, 0, &clk);
401 if (ret < 0) {
Sean Anderson241232a2020-09-15 10:45:12 -0400402 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530403 return ret;
404 }
405
406 clock = clk_get_rate(&clk);
407 if (IS_ERR_VALUE(clock)) {
Sean Anderson241232a2020-09-15 10:45:12 -0400408 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530409 return clock;
410 }
411 debug("%s: CLK %ld\n", __func__, clock);
412
413 ret = clk_enable(&clk);
Michal Simek41710952021-02-09 15:28:15 +0100414 if (ret) {
Sean Anderson241232a2020-09-15 10:45:12 -0400415 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530416 return ret;
417 }
418 plat->frequency = clock;
419 plat->speed_hz = plat->frequency / 2;
420
421 /* init the zynq spi hw */
422 zynqmp_qspi_init_hw(priv);
423
424 return 0;
425}
426
427static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
428{
429 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
430 struct zynqmp_qspi_regs *regs = priv->regs;
431 u32 confr;
432
433 debug("%s\n", __func__);
434 /* Set the SPI Clock phase and polarities */
435 confr = readl(&regs->confr);
436 confr &= ~(GQSPI_CONFIG_CPHA_MASK |
437 GQSPI_CONFIG_CPOL_MASK);
438
439 if (mode & SPI_CPHA)
440 confr |= GQSPI_CONFIG_CPHA_MASK;
441 if (mode & SPI_CPOL)
442 confr |= GQSPI_CONFIG_CPOL_MASK;
443
444 writel(confr, &regs->confr);
445
446 return 0;
447}
448
449static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
450{
451 u32 data;
452 int ret = 0;
453 struct zynqmp_qspi_regs *regs = priv->regs;
454 u32 *buf = (u32 *)priv->tx_buf;
455 u32 len = size;
456
457 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(&regs->isr),
458 size);
459
460 while (size) {
461 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
462 GQSPI_TIMEOUT, 1);
463 if (ret) {
464 printf("%s: Timeout\n", __func__);
465 return ret;
466 }
467
468 if (size >= 4) {
469 writel(*buf, &regs->txd0r);
470 buf++;
471 size -= 4;
472 } else {
473 switch (size) {
474 case 1:
475 data = *((u8 *)buf);
476 buf += 1;
477 data |= GENMASK(31, 8);
478 break;
479 case 2:
480 data = *((u16 *)buf);
481 buf += 2;
482 data |= GENMASK(31, 16);
483 break;
484 case 3:
T Karthik Reddycc59fc92020-11-19 05:00:36 -0700485 data = *buf;
486 buf += 3;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530487 data |= GENMASK(31, 24);
488 break;
489 }
490 writel(data, &regs->txd0r);
491 size = 0;
492 }
493 }
494
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +0530495 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXFIFOEMPTY_MASK, 1,
496 GQSPI_TIMEOUT, 1);
497 if (ret) {
498 printf("%s: Timeout\n", __func__);
499 return ret;
500 }
501
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530502 priv->tx_buf += len;
503 return 0;
504}
505
506static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
507{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600508 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530509 u32 gen_fifo_cmd;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600510 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530511
Brandon Maier4d9cce72021-01-20 10:39:46 -0600512 /* Send opcode */
513 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
514 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
515 gen_fifo_cmd |= GQSPI_GFIFO_TX;
516 gen_fifo_cmd |= op->cmd.opcode;
517 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
518
519 /* Send address */
520 for (i = 0; i < op->addr.nbytes; i++) {
521 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
522
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530523 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600524 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
525 gen_fifo_cmd |= GQSPI_GFIFO_TX;
526 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530527
528 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
529
530 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
531 }
Brandon Maier4d9cce72021-01-20 10:39:46 -0600532
533 /* Send dummy */
534 if (op->dummy.nbytes) {
535 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
536
537 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
538 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
539 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
540 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
541 gen_fifo_cmd |= dummy_cycles;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530542 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
543 }
544}
545
546static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
547 u32 *gen_fifo_cmd)
548{
549 u32 expval = 8;
550 u32 len;
551
552 while (1) {
553 if (priv->len > 255) {
554 if (priv->len & (1 << expval)) {
555 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
556 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
557 *gen_fifo_cmd |= expval;
558 priv->len -= (1 << expval);
559 return expval;
560 }
561 expval++;
562 } else {
563 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
564 GQSPI_GFIFO_EXP_MASK);
565 *gen_fifo_cmd |= (u8)priv->len;
566 len = (u8)priv->len;
567 priv->len = 0;
568 return len;
569 }
570 }
571}
572
573static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
574{
575 u32 gen_fifo_cmd;
576 u32 len;
577 int ret = 0;
578
579 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600580 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530581 gen_fifo_cmd |= GQSPI_GFIFO_TX |
582 GQSPI_GFIFO_DATA_XFR_MASK;
583
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530584 while (priv->len) {
585 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
586 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
587
588 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
589
590 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
591 ret = zynqmp_qspi_fill_tx_fifo(priv,
592 1 << len);
593 else
594 ret = zynqmp_qspi_fill_tx_fifo(priv,
595 len);
596
597 if (ret)
598 return ret;
599 }
600 return ret;
601}
602
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600603static int zynqmp_qspi_start_io(struct zynqmp_qspi_priv *priv,
604 u32 gen_fifo_cmd, u32 *buf)
605{
606 u32 len;
607 u32 actuallen = priv->len;
608 u32 config_reg, ier, isr;
609 u32 timeout = GQSPI_TIMEOUT;
610 struct zynqmp_qspi_regs *regs = priv->regs;
611 u32 last_bits;
612 u32 *traverse = buf;
613
614 while (priv->len) {
615 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
616 /* If exponent bit is set, reset immediate to be 2^len */
617 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
618 priv->bytes_to_receive = (1 << len);
619 else
620 priv->bytes_to_receive = len;
621 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
622 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
623 /* Manual start */
624 config_reg = readl(&regs->confr);
625 config_reg |= GQSPI_STRT_GEN_FIFO;
626 writel(config_reg, &regs->confr);
627 /* Enable RX interrupts for IO mode */
628 ier = readl(&regs->ier);
629 ier |= GQSPI_IXR_ALL_MASK;
630 writel(ier, &regs->ier);
631 while (priv->bytes_to_receive && timeout) {
632 isr = readl(&regs->isr);
633 if (isr & GQSPI_IXR_RXNEMTY_MASK) {
634 if (priv->bytes_to_receive >= 4) {
635 *traverse = readl(&regs->drxr);
636 traverse++;
637 priv->bytes_to_receive -= 4;
638 } else {
639 last_bits = readl(&regs->drxr);
640 memcpy(traverse, &last_bits,
641 priv->bytes_to_receive);
642 priv->bytes_to_receive = 0;
643 }
644 timeout = GQSPI_TIMEOUT;
645 } else {
646 udelay(1);
647 timeout--;
648 }
649 }
650
651 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
652 (unsigned long)buf, (unsigned long)priv->rx_buf,
653 *buf, actuallen);
654 if (!timeout) {
655 printf("IO timeout: %d\n", readl(&regs->isr));
656 return -1;
657 }
658 }
659
660 return 0;
661}
662
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530663static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
664 u32 gen_fifo_cmd, u32 *buf)
665{
666 u32 addr;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600667 u32 size;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530668 u32 actuallen = priv->len;
669 int ret = 0;
670 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
671
672 writel((unsigned long)buf, &dma_regs->dmadst);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600673 writel(roundup(priv->len, GQSPI_DMA_ALIGN), &dma_regs->dmasize);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530674 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
675 addr = (unsigned long)buf;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600676 size = roundup(priv->len, GQSPI_DMA_ALIGN);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530677 flush_dcache_range(addr, addr + size);
678
679 while (priv->len) {
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600680 zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530681 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
682
683 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
684 }
685
686 ret = wait_for_bit_le32(&dma_regs->dmaisr, GQSPI_DMA_DST_I_STS_DONE,
687 1, GQSPI_TIMEOUT, 1);
688 if (ret) {
689 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
690 return -ETIMEDOUT;
691 }
692
693 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
694
695 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
696 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
697 actuallen);
698
699 if (buf != priv->rx_buf)
700 memcpy(priv->rx_buf, buf, actuallen);
701
702 return 0;
703}
704
705static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
706{
707 u32 gen_fifo_cmd;
708 u32 *buf;
709 u32 actuallen = priv->len;
710
711 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600712 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530713 gen_fifo_cmd |= GQSPI_GFIFO_RX |
714 GQSPI_GFIFO_DATA_XFR_MASK;
715
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530716 /*
717 * Check if receive buffer is aligned to 4 byte and length
718 * is multiples of four byte as we are using dma to receive.
719 */
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600720 if ((!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
721 !(actuallen % GQSPI_DMA_ALIGN)) || priv->io_mode) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530722 buf = (u32 *)priv->rx_buf;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600723 if (priv->io_mode)
724 return zynqmp_qspi_start_io(priv, gen_fifo_cmd, buf);
725 else
726 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530727 }
728
729 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len,
730 GQSPI_DMA_ALIGN));
731 buf = (u32 *)tmp;
732 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
733}
734
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530735static int zynqmp_qspi_claim_bus(struct udevice *dev)
736{
737 struct udevice *bus = dev->parent;
738 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
739 struct zynqmp_qspi_regs *regs = priv->regs;
740
741 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
742
743 return 0;
744}
745
746static int zynqmp_qspi_release_bus(struct udevice *dev)
747{
748 struct udevice *bus = dev->parent;
749 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
750 struct zynqmp_qspi_regs *regs = priv->regs;
751
752 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
753
754 return 0;
755}
756
Brandon Maier4d9cce72021-01-20 10:39:46 -0600757static int zynqmp_qspi_exec_op(struct spi_slave *slave,
758 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530759{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600760 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
761 int ret = 0;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530762
Brandon Maier4d9cce72021-01-20 10:39:46 -0600763 priv->op = op;
764 priv->tx_buf = op->data.buf.out;
765 priv->rx_buf = op->data.buf.in;
766 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530767
Brandon Maier4d9cce72021-01-20 10:39:46 -0600768 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530769
Brandon Maier4d9cce72021-01-20 10:39:46 -0600770 /* Send opcode, addr, dummy */
771 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530772
Brandon Maier4d9cce72021-01-20 10:39:46 -0600773 /* Request the transfer */
774 if (op->data.dir == SPI_MEM_DATA_IN)
775 ret = zynqmp_qspi_genfifo_fill_rx(priv);
776 else if (op->data.dir == SPI_MEM_DATA_OUT)
777 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530778
Brandon Maier4d9cce72021-01-20 10:39:46 -0600779 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530780
Brandon Maier4d9cce72021-01-20 10:39:46 -0600781 return ret;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530782}
783
Brandon Maier4d9cce72021-01-20 10:39:46 -0600784static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
785 .exec_op = zynqmp_qspi_exec_op,
786};
787
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530788static const struct dm_spi_ops zynqmp_qspi_ops = {
789 .claim_bus = zynqmp_qspi_claim_bus,
790 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530791 .set_speed = zynqmp_qspi_set_speed,
792 .set_mode = zynqmp_qspi_set_mode,
Brandon Maier4d9cce72021-01-20 10:39:46 -0600793 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530794};
795
796static const struct udevice_id zynqmp_qspi_ids[] = {
797 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simeked373eb2018-11-29 08:48:28 +0100798 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530799 { }
800};
801
802U_BOOT_DRIVER(zynqmp_qspi) = {
803 .name = "zynqmp_qspi",
804 .id = UCLASS_SPI,
805 .of_match = zynqmp_qspi_ids,
806 .ops = &zynqmp_qspi_ops,
Simon Glassaad29ae2020-12-03 16:55:21 -0700807 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glassb75b15b2020-12-03 16:55:23 -0700808 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass8a2b47f2020-12-03 16:55:17 -0700809 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530810 .probe = zynqmp_qspi_probe,
811};