blob: 4251bf28cd31be360e7d6a0db08ee8f2ebb48656 [file] [log] [blame]
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +05303 * (C) Copyright 2013 - 2022, Xilinx, Inc.
4 * (C) Copyright 2023, Advanced Micro Devices, Inc.
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05305 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
Ibai Erkiaga78974fb2023-10-13 13:37:27 +01008#define LOG_CATEGORY UCLASS_SPI
9
Simon Glass63334482019-11-14 12:57:39 -070010#include <cpu_func.h>
Simon Glass0f2af882020-05-10 11:40:05 -060011#include <log.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053012#include <asm/arch/sys_proto.h>
Simon Glass274e0b02020-05-10 11:39:56 -060013#include <asm/cache.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053014#include <asm/io.h>
15#include <clk.h>
16#include <dm.h>
17#include <malloc.h>
18#include <memalign.h>
19#include <spi.h>
Brandon Maier4d9cce72021-01-20 10:39:46 -060020#include <spi-mem.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053021#include <ubi_uboot.h>
22#include <wait_bit.h>
Simon Glass9bc15642020-02-03 07:36:16 -070023#include <dm/device_compat.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060024#include <linux/bitops.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070025#include <linux/err.h>
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -060026#include <linux/sizes.h>
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +053027#include <linux/mtd/spi-nor.h>
28#include "../mtd/spi/sf_internal.h"
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -060029#include <zynqmp_firmware.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053030
31#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
32#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
33#define GQSPI_CONFIG_DMA_MODE (2 << 30)
34#define GQSPI_CONFIG_CPHA_MASK BIT(2)
35#define GQSPI_CONFIG_CPOL_MASK BIT(1)
36
37/*
38 * QSPI Interrupt Registers bit Masks
39 *
40 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
41 * bit definitions.
42 */
43#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
44#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +053045#define GQSPI_IXR_TXFIFOEMPTY_MASK 0x00000100 /* QSPI TX FIFO is Empty */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053046#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
47#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -060048#define GQSPI_IXR_GFNFULL_MASK 0x00000200 /* QSPI GENFIFO not full */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053049#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
50 GQSPI_IXR_RXNEMTY_MASK)
51
52/*
53 * QSPI Enable Register bit Masks
54 *
55 * This register is used to enable or disable the QSPI controller
56 */
57#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
58
59#define GQSPI_GFIFO_LOW_BUS BIT(14)
60#define GQSPI_GFIFO_CS_LOWER BIT(12)
61#define GQSPI_GFIFO_UP_BUS BIT(15)
62#define GQSPI_GFIFO_CS_UPPER BIT(13)
63#define GQSPI_SPI_MODE_QSPI (3 << 10)
64#define GQSPI_SPI_MODE_SPI BIT(10)
65#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
66#define GQSPI_IMD_DATA_CS_ASSERT 5
67#define GQSPI_IMD_DATA_CS_DEASSERT 5
68#define GQSPI_GFIFO_TX BIT(16)
69#define GQSPI_GFIFO_RX BIT(17)
70#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
71#define GQSPI_GFIFO_IMD_MASK 0xFF
72#define GQSPI_GFIFO_EXP_MASK BIT(9)
73#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
74#define GQSPI_STRT_GEN_FIFO BIT(28)
75#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
76#define GQSPI_GFIFO_WP_HOLD BIT(19)
77#define GQSPI_BAUD_DIV_MASK (7 << 3)
78#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
79#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
80#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
81#define GQSPI_DMA_DST_I_STS_MASK 0xFE
82#define MODEBITS 0x6
83
84#define GQSPI_GFIFO_SELECT BIT(0)
85#define GQSPI_FIFO_THRESHOLD 1
Ashok Reddy Soma822a2432021-08-20 07:43:17 -060086#define GQSPI_GENFIFO_THRESHOLD 31
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053087
88#define SPI_XFER_ON_BOTH 0
89#define SPI_XFER_ON_LOWER 1
90#define SPI_XFER_ON_UPPER 2
91
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +053092#define GQSPI_SELECT_LOWER_CS BIT(0)
93#define GQSPI_SELECT_UPPER_CS BIT(1)
94
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053095#define GQSPI_DMA_ALIGN 0x4
96#define GQSPI_MAX_BAUD_RATE_VAL 7
97#define GQSPI_DFLT_BAUD_RATE_VAL 2
98
99#define GQSPI_TIMEOUT 100000000
100
101#define GQSPI_BAUD_DIV_SHIFT 2
102#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
T Karthik Reddy751533d2022-11-23 02:04:51 -0700103#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x1
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530104#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
105#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
106#define GQSPI_USE_DATA_DLY 0x1
107#define GQSPI_USE_DATA_DLY_SHIFT 31
108#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
109#define GQSPI_DATA_DLY_ADJ_SHIFT 28
110#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
111#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
112#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
Ashok Reddy Somaef3e30b2022-11-16 16:40:30 +0100113#define IOU_TAPDLY_BYPASS_OFST !(IS_ENABLED(CONFIG_ARCH_VERSAL) || \
Michal Simek71bfd392024-05-29 16:48:01 +0200114 IS_ENABLED(CONFIG_ARCH_VERSAL_NET) || \
115 IS_ENABLED(CONFIG_ARCH_VERSAL2)) ? \
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -0600116 0xFF180390 : 0xF103003C
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530117#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -0600118#define GQSPI_FREQ_37_5MHZ 37500000
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530119#define GQSPI_FREQ_40MHZ 40000000
120#define GQSPI_FREQ_100MHZ 100000000
121#define GQSPI_FREQ_150MHZ 150000000
122#define IOU_TAPDLY_BYPASS_MASK 0x7
123
124#define GQSPI_REG_OFFSET 0x100
125#define GQSPI_DMA_REG_OFFSET 0x800
126
127/* QSPI register offsets */
128struct zynqmp_qspi_regs {
129 u32 confr; /* 0x00 */
130 u32 isr; /* 0x04 */
131 u32 ier; /* 0x08 */
132 u32 idisr; /* 0x0C */
133 u32 imaskr; /* 0x10 */
134 u32 enbr; /* 0x14 */
135 u32 dr; /* 0x18 */
136 u32 txd0r; /* 0x1C */
137 u32 drxr; /* 0x20 */
138 u32 sicr; /* 0x24 */
139 u32 txftr; /* 0x28 */
140 u32 rxftr; /* 0x2C */
141 u32 gpior; /* 0x30 */
142 u32 reserved0; /* 0x34 */
143 u32 lpbkdly; /* 0x38 */
144 u32 reserved1; /* 0x3C */
145 u32 genfifo; /* 0x40 */
146 u32 gqspisel; /* 0x44 */
147 u32 reserved2; /* 0x48 */
148 u32 gqfifoctrl; /* 0x4C */
149 u32 gqfthr; /* 0x50 */
150 u32 gqpollcfg; /* 0x54 */
151 u32 gqpollto; /* 0x58 */
152 u32 gqxfersts; /* 0x5C */
153 u32 gqfifosnap; /* 0x60 */
154 u32 gqrxcpy; /* 0x64 */
155 u32 reserved3[36]; /* 0x68 */
156 u32 gqspidlyadj; /* 0xF8 */
157};
158
159struct zynqmp_qspi_dma_regs {
160 u32 dmadst; /* 0x00 */
161 u32 dmasize; /* 0x04 */
162 u32 dmasts; /* 0x08 */
163 u32 dmactrl; /* 0x0C */
164 u32 reserved0; /* 0x10 */
165 u32 dmaisr; /* 0x14 */
166 u32 dmaier; /* 0x18 */
167 u32 dmaidr; /* 0x1C */
168 u32 dmaimr; /* 0x20 */
169 u32 dmactrl2; /* 0x24 */
170 u32 dmadstmsb; /* 0x28 */
171};
172
Simon Glassb75b15b2020-12-03 16:55:23 -0700173struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530174 struct zynqmp_qspi_regs *regs;
175 struct zynqmp_qspi_dma_regs *dma_regs;
176 u32 frequency;
177 u32 speed_hz;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600178 unsigned int io_mode;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530179};
180
181struct zynqmp_qspi_priv {
182 struct zynqmp_qspi_regs *regs;
183 struct zynqmp_qspi_dma_regs *dma_regs;
184 const void *tx_buf;
185 void *rx_buf;
186 unsigned int len;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600187 unsigned int io_mode;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530188 int bytes_to_transfer;
189 int bytes_to_receive;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600190 const struct spi_mem_op *op;
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530191 unsigned int is_parallel;
192 unsigned int u_page;
193 unsigned int bus;
194 unsigned int stripe;
195 unsigned int flags;
196 u32 max_hz;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530197};
198
Simon Glassaad29ae2020-12-03 16:55:21 -0700199static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530200{
Simon Glass95588622020-12-22 19:30:28 -0700201 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530202
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900203 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530204 GQSPI_REG_OFFSET);
205 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900206 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530207
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600208 plat->io_mode = dev_read_bool(bus, "has-io-mode");
209
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530210 return 0;
211}
212
213static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
214{
215 u32 config_reg;
216 struct zynqmp_qspi_regs *regs = priv->regs;
217
218 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
219 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
220 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
221 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600222 writel(GQSPI_GENFIFO_THRESHOLD, &regs->gqfthr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530223 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600224 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530225
226 config_reg = readl(&regs->confr);
227 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
228 GQSPI_CONFIG_MODE_EN_MASK);
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600229 config_reg |= GQSPI_GFIFO_WP_HOLD | GQSPI_DFLT_BAUD_RATE_DIV;
230 config_reg |= GQSPI_GFIFO_STRT_MODE_MASK;
231 if (!priv->io_mode)
232 config_reg |= GQSPI_CONFIG_DMA_MODE;
233
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530234 writel(config_reg, &regs->confr);
235
236 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
237}
238
239static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
240{
241 u32 gqspi_fifo_reg = 0;
242
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530243 if (priv->is_parallel) {
244 if (priv->bus == SPI_XFER_ON_BOTH)
245 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
246 GQSPI_GFIFO_UP_BUS |
247 GQSPI_GFIFO_CS_UPPER |
248 GQSPI_GFIFO_CS_LOWER;
249 else if (priv->bus == SPI_XFER_ON_LOWER)
250 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
251 GQSPI_GFIFO_CS_UPPER |
252 GQSPI_GFIFO_CS_LOWER;
253 else if (priv->bus == SPI_XFER_ON_UPPER)
254 gqspi_fifo_reg = GQSPI_GFIFO_UP_BUS |
255 GQSPI_GFIFO_CS_LOWER |
256 GQSPI_GFIFO_CS_UPPER;
257 else
258 debug("Wrong Bus selection:0x%x\n", priv->bus);
259 } else {
260 if (priv->u_page)
261 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
262 GQSPI_GFIFO_CS_UPPER;
263 else
264 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
265 GQSPI_GFIFO_CS_LOWER;
266 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530267
268 return gqspi_fifo_reg;
269}
270
Brandon Maier4d9cce72021-01-20 10:39:46 -0600271static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
272{
273 switch (buswidth) {
274 case 1:
275 return GQSPI_SPI_MODE_SPI;
276 case 2:
277 return GQSPI_SPI_MODE_DUAL_SPI;
278 case 4:
279 return GQSPI_SPI_MODE_QSPI;
280 default:
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100281 log_warning("Unsupported bus width %u\n", buswidth);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600282 return GQSPI_SPI_MODE_SPI;
283 }
284}
285
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530286static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
287 u32 gqspi_fifo_reg)
288{
289 struct zynqmp_qspi_regs *regs = priv->regs;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600290 u32 config_reg, ier;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530291 int ret = 0;
292
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100293 log_content("%s, GFIFO_CMD: 0x%X\n", __func__, gqspi_fifo_reg);
294
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600295 writel(gqspi_fifo_reg, &regs->genfifo);
296
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600297 config_reg = readl(&regs->confr);
298 /* Manual start if needed */
299 config_reg |= GQSPI_STRT_GEN_FIFO;
300 writel(config_reg, &regs->confr);
301
302 /* Enable interrupts */
303 ier = readl(&regs->ier);
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600304 ier |= GQSPI_IXR_GFEMTY_MASK;
Ashok Reddy Soma2af829f2021-05-25 06:36:27 -0600305 writel(ier, &regs->ier);
306
Ashok Reddy Soma1c35adc2021-08-20 07:43:16 -0600307 /* Wait until the gen fifo is empty to write the new command */
308 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530309 GQSPI_TIMEOUT, 1);
310 if (ret)
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100311 log_warning("%s, Timeout\n", __func__);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530312
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530313}
314
315static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
316{
317 u32 gqspi_fifo_reg = 0;
318
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100319 log_debug("%s, assert: %d\r\n", __func__, is_on);
320
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530321 if (is_on) {
322 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
323 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
324 GQSPI_IMD_DATA_CS_ASSERT;
325 } else {
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530326 if (priv->is_parallel) {
327 gqspi_fifo_reg = GQSPI_GFIFO_UP_BUS |
328 GQSPI_GFIFO_LOW_BUS;
329 } else if (priv->u_page) {
330 gqspi_fifo_reg = GQSPI_GFIFO_UP_BUS;
331 } else {
332 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
333 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
334 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530335 }
336
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530337 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
338}
339
Venkatesh Yadav Abbarapuf6dfade2022-10-04 11:07:30 +0530340static void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530341{
Simon Glass95588622020-12-22 19:30:28 -0700342 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530343 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
344 struct zynqmp_qspi_regs *regs = priv->regs;
345 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
346 u32 reqhz = 0;
347
348 clk_rate = plat->frequency;
349 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
350
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100351 log_debug("%s, clk_rate:%d, baudrateval:%d, bus_clk: %d\n",
352 __func__, clk_rate, baudrateval, reqhz);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530353
Michal Simek06995c42022-09-19 14:21:04 +0200354 if (!(IS_ENABLED(CONFIG_ARCH_VERSAL) ||
Michal Simek71bfd392024-05-29 16:48:01 +0200355 IS_ENABLED(CONFIG_ARCH_VERSAL_NET) ||
356 IS_ENABLED(CONFIG_ARCH_VERSAL2))) {
Ashok Reddy Somae3c77a62022-08-25 06:59:01 -0600357 if (reqhz <= GQSPI_FREQ_40MHZ) {
358 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
359 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
360 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
361 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
362 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
363 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
364 datadlyadj = (GQSPI_USE_DATA_DLY <<
365 GQSPI_USE_DATA_DLY_SHIFT) |
366 (GQSPI_DATA_DLY_ADJ_VALUE <<
367 GQSPI_DATA_DLY_ADJ_SHIFT);
368 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
369 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
370 GQSPI_LPBK_DLY_ADJ_DLY_0;
371 }
372 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST,
373 IOU_TAPDLY_BYPASS_MASK, tapdlybypass);
374 } else {
375 if (reqhz <= GQSPI_FREQ_37_5MHZ) {
376 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
377 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
378 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
379 tapdlybypass = TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
380 TAP_DLY_BYPASS_LQSPI_RX_SHIFT;
381 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK;
382 datadlyadj = GQSPI_USE_DATA_DLY <<
383 GQSPI_USE_DATA_DLY_SHIFT;
384 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
385 lpbkdlyadj = GQSPI_LPBK_DLY_ADJ_LPBK_MASK |
386 (GQSPI_LPBK_DLY_ADJ_DLY_1 <<
387 GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT);
388 }
389 writel(tapdlybypass, IOU_TAPDLY_BYPASS_OFST);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530390 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530391 writel(lpbkdlyadj, &regs->lpbkdly);
392 writel(datadlyadj, &regs->gqspidlyadj);
393}
394
395static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
396{
Simon Glass95588622020-12-22 19:30:28 -0700397 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530398 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
399 struct zynqmp_qspi_regs *regs = priv->regs;
400 u32 confr;
401 u8 baud_rate_val = 0;
402
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100403 log_debug("%s, Speed: %d, Max: %d\n", __func__, speed, plat->frequency);
404
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530405 /*
406 * If speed == 0 or speed > max freq, then set speed to highest
407 */
408 if (!speed || speed > priv->max_hz)
409 speed = priv->max_hz;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530410
Brandon Maierb8003d52021-01-20 14:28:30 -0600411 if (plat->speed_hz != speed) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530412 while ((baud_rate_val < 8) &&
413 ((plat->frequency /
414 (2 << baud_rate_val)) > speed))
415 baud_rate_val++;
416
417 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
418 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
419
420 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530421
Brandon Maierb8003d52021-01-20 14:28:30 -0600422 confr = readl(&regs->confr);
423 confr &= ~GQSPI_BAUD_DIV_MASK;
424 confr |= (baud_rate_val << 3);
425 writel(confr, &regs->confr);
Brandon Maierb8003d52021-01-20 14:28:30 -0600426
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100427 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
Brandon Maierb8003d52021-01-20 14:28:30 -0600428 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530429
430 return 0;
431}
432
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530433static int zynqmp_qspi_child_pre_probe(struct udevice *bus)
434{
435 struct spi_slave *slave = dev_get_parent_priv(bus);
436 struct zynqmp_qspi_priv *priv = dev_get_priv(bus->parent);
437
438 slave->multi_cs_cap = true;
439 slave->bytemode = SPI_4BYTE_MODE;
440 priv->max_hz = slave->max_hz;
441
442 return 0;
443}
444
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530445static int zynqmp_qspi_probe(struct udevice *bus)
446{
Simon Glassb75b15b2020-12-03 16:55:23 -0700447 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530448 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
449 struct clk clk;
450 unsigned long clock;
451 int ret;
452
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530453 priv->regs = plat->regs;
454 priv->dma_regs = plat->dma_regs;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600455 priv->io_mode = plat->io_mode;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530456
457 ret = clk_get_by_index(bus, 0, &clk);
458 if (ret < 0) {
Sean Anderson241232a2020-09-15 10:45:12 -0400459 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530460 return ret;
461 }
462
463 clock = clk_get_rate(&clk);
464 if (IS_ERR_VALUE(clock)) {
Sean Anderson241232a2020-09-15 10:45:12 -0400465 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530466 return clock;
467 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530468
469 ret = clk_enable(&clk);
Michal Simek41710952021-02-09 15:28:15 +0100470 if (ret) {
Sean Anderson241232a2020-09-15 10:45:12 -0400471 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530472 return ret;
473 }
474 plat->frequency = clock;
475 plat->speed_hz = plat->frequency / 2;
476
477 /* init the zynq spi hw */
478 zynqmp_qspi_init_hw(priv);
479
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100480 log_debug("%s, Rerence clock frequency: %ld\n", __func__, clock);
481
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530482 return 0;
483}
484
485static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
486{
487 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
488 struct zynqmp_qspi_regs *regs = priv->regs;
489 u32 confr;
490
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100491 log_debug("%s, 0x%X\n", __func__, mode);
492
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530493 /* Set the SPI Clock phase and polarities */
494 confr = readl(&regs->confr);
Ashok Reddy Soma7b4bded2022-08-25 06:59:05 -0600495 confr &= ~(GQSPI_CONFIG_CPHA_MASK | GQSPI_CONFIG_CPOL_MASK);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530496
497 if (mode & SPI_CPHA)
498 confr |= GQSPI_CONFIG_CPHA_MASK;
499 if (mode & SPI_CPOL)
500 confr |= GQSPI_CONFIG_CPOL_MASK;
501
502 writel(confr, &regs->confr);
503
504 return 0;
505}
506
507static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
508{
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530509 u32 data, ier;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530510 int ret = 0;
511 struct zynqmp_qspi_regs *regs = priv->regs;
512 u32 *buf = (u32 *)priv->tx_buf;
513 u32 len = size;
514
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530515 /* Enable interrupts */
516 ier = readl(&regs->ier);
517 ier |= GQSPI_IXR_ALL_MASK | GQSPI_IXR_TXFIFOEMPTY_MASK;
518 writel(ier, &regs->ier);
519
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530520 while (size) {
521 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
522 GQSPI_TIMEOUT, 1);
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100523 if (ret)
524 return log_msg_ret("Timeout\n", ret);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530525
526 if (size >= 4) {
527 writel(*buf, &regs->txd0r);
528 buf++;
529 size -= 4;
530 } else {
531 switch (size) {
532 case 1:
533 data = *((u8 *)buf);
534 buf += 1;
535 data |= GENMASK(31, 8);
536 break;
537 case 2:
538 data = *((u16 *)buf);
539 buf += 2;
540 data |= GENMASK(31, 16);
541 break;
542 case 3:
T Karthik Reddycc59fc92020-11-19 05:00:36 -0700543 data = *buf;
544 buf += 3;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530545 data |= GENMASK(31, 24);
546 break;
547 }
548 writel(data, &regs->txd0r);
549 size = 0;
550 }
551 }
552
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +0530553 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXFIFOEMPTY_MASK, 1,
554 GQSPI_TIMEOUT, 1);
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100555 if (ret)
556 return log_msg_ret("Timeout\n", ret);
Ashok Reddy Soma26f77d72021-10-19 19:43:00 +0530557
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530558 priv->tx_buf += len;
559 return 0;
560}
561
562static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
563{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600564 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530565 u32 gen_fifo_cmd;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600566 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530567
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100568 log_debug("%s, opcode: 0x%0X, addr.nbytes: %d, dummy.mbytes: %d\r\n",
569 __func__, op->cmd.opcode, op->addr.nbytes, op->dummy.nbytes);
570
Brandon Maier4d9cce72021-01-20 10:39:46 -0600571 /* Send opcode */
572 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
573 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
574 gen_fifo_cmd |= GQSPI_GFIFO_TX;
575 gen_fifo_cmd |= op->cmd.opcode;
576 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
577
578 /* Send address */
579 for (i = 0; i < op->addr.nbytes; i++) {
580 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
581
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530582 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600583 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
584 gen_fifo_cmd |= GQSPI_GFIFO_TX;
585 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530586
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530587 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
588 }
Brandon Maier4d9cce72021-01-20 10:39:46 -0600589
590 /* Send dummy */
591 if (op->dummy.nbytes) {
592 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
593
594 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
595 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
596 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
597 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
598 gen_fifo_cmd |= dummy_cycles;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530599 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
600 }
601}
602
603static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
604 u32 *gen_fifo_cmd)
605{
606 u32 expval = 8;
607 u32 len;
608
609 while (1) {
610 if (priv->len > 255) {
611 if (priv->len & (1 << expval)) {
612 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
613 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
614 *gen_fifo_cmd |= expval;
615 priv->len -= (1 << expval);
616 return expval;
617 }
618 expval++;
619 } else {
620 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
621 GQSPI_GFIFO_EXP_MASK);
622 *gen_fifo_cmd |= (u8)priv->len;
623 len = (u8)priv->len;
624 priv->len = 0;
625 return len;
626 }
627 }
628}
629
630static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
631{
632 u32 gen_fifo_cmd;
633 u32 len;
634 int ret = 0;
635
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100636 log_debug("%s, length: %d\r\n", __func__, priv->len);
637
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530638 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600639 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Ashok Reddy Soma7b4bded2022-08-25 06:59:05 -0600640 gen_fifo_cmd |= GQSPI_GFIFO_TX | GQSPI_GFIFO_DATA_XFR_MASK;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530641
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530642 if (priv->stripe)
643 gen_fifo_cmd |= GQSPI_GFIFO_STRIPE_MASK;
644
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530645 while (priv->len) {
646 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
647 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
648
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530649 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
Ashok Reddy Soma7b4bded2022-08-25 06:59:05 -0600650 ret = zynqmp_qspi_fill_tx_fifo(priv, 1 << len);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530651 else
Ashok Reddy Soma7b4bded2022-08-25 06:59:05 -0600652 ret = zynqmp_qspi_fill_tx_fifo(priv, len);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530653
654 if (ret)
655 return ret;
656 }
657 return ret;
658}
659
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600660static int zynqmp_qspi_start_io(struct zynqmp_qspi_priv *priv,
661 u32 gen_fifo_cmd, u32 *buf)
662{
663 u32 len;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600664 u32 config_reg, ier, isr;
665 u32 timeout = GQSPI_TIMEOUT;
666 struct zynqmp_qspi_regs *regs = priv->regs;
667 u32 last_bits;
668 u32 *traverse = buf;
669
670 while (priv->len) {
671 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
672 /* If exponent bit is set, reset immediate to be 2^len */
673 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
674 priv->bytes_to_receive = (1 << len);
675 else
676 priv->bytes_to_receive = len;
677 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100678
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600679 /* Manual start */
680 config_reg = readl(&regs->confr);
681 config_reg |= GQSPI_STRT_GEN_FIFO;
682 writel(config_reg, &regs->confr);
683 /* Enable RX interrupts for IO mode */
684 ier = readl(&regs->ier);
685 ier |= GQSPI_IXR_ALL_MASK;
686 writel(ier, &regs->ier);
687 while (priv->bytes_to_receive && timeout) {
688 isr = readl(&regs->isr);
689 if (isr & GQSPI_IXR_RXNEMTY_MASK) {
690 if (priv->bytes_to_receive >= 4) {
691 *traverse = readl(&regs->drxr);
692 traverse++;
693 priv->bytes_to_receive -= 4;
694 } else {
695 last_bits = readl(&regs->drxr);
696 memcpy(traverse, &last_bits,
697 priv->bytes_to_receive);
698 priv->bytes_to_receive = 0;
699 }
700 timeout = GQSPI_TIMEOUT;
701 } else {
702 udelay(1);
703 timeout--;
704 }
705 }
706
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100707 if (!timeout)
708 return log_msg_retz("Timeout\n", timeout);
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600709 }
710
711 return 0;
712}
713
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530714static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
715 u32 gen_fifo_cmd, u32 *buf)
716{
Venkatesh Yadav Abbarapufce730e2022-11-25 16:14:13 +0530717 unsigned long addr;
Ashok Reddy Soma822a2432021-08-20 07:43:17 -0600718 u32 size;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530719 u32 actuallen = priv->len;
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600720 u32 totallen = priv->len;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530721 int ret = 0;
722 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
723
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600724 while (totallen) {
725 if (totallen >= SZ_512M)
726 priv->len = SZ_256M;
727 else
728 priv->len = totallen;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530729
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600730 totallen -= priv->len; /* Save remaining bytes length to read */
731 actuallen = priv->len; /* Actual number of bytes reading */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530732
Venkatesh Yadav Abbarapufce730e2022-11-25 16:14:13 +0530733 writel(lower_32_bits((unsigned long)buf), &dma_regs->dmadst);
734 writel(upper_32_bits((unsigned long)buf) & GENMASK(11, 0),
735 &dma_regs->dmadstmsb);
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600736 writel(roundup(priv->len, GQSPI_DMA_ALIGN), &dma_regs->dmasize);
737 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
738 addr = (unsigned long)buf;
739 size = roundup(priv->len, GQSPI_DMA_ALIGN);
Ashok Reddy Soma6753c8b2023-09-15 08:47:58 +0530740 invalidate_dcache_range(addr, addr + size);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530741
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600742 while (priv->len) {
743 zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
744 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600745 }
746
747 ret = wait_for_bit_le32(&dma_regs->dmaisr,
748 GQSPI_DMA_DST_I_STS_DONE, 1,
749 GQSPI_TIMEOUT, 1);
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100750 if (ret)
751 return log_msg_ret("Timeout:\n", ret);
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600752
Venkatesh Yadav Abbarapu5f97cef2023-09-15 08:47:59 +0530753 invalidate_dcache_range(addr, addr + size);
754
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600755 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530756
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600757 if (buf != priv->rx_buf)
758 memcpy(priv->rx_buf, buf, actuallen);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530759
Ashok Reddy Soma2d322cc2022-08-25 06:59:04 -0600760 buf = (u32 *)((u8 *)buf + actuallen);
761 priv->rx_buf = (u8 *)priv->rx_buf + actuallen;
762 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530763
764 return 0;
765}
766
767static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
768{
769 u32 gen_fifo_cmd;
770 u32 *buf;
771 u32 actuallen = priv->len;
772
Ibai Erkiaga78974fb2023-10-13 13:37:27 +0100773 log_debug("%s, length: %d\r\n", __func__, priv->len);
774
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530775 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600776 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Ashok Reddy Soma7b4bded2022-08-25 06:59:05 -0600777 gen_fifo_cmd |= GQSPI_GFIFO_RX | GQSPI_GFIFO_DATA_XFR_MASK;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530778
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530779 if (priv->stripe)
780 gen_fifo_cmd |= GQSPI_GFIFO_STRIPE_MASK;
781
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530782 /*
783 * Check if receive buffer is aligned to 4 byte and length
784 * is multiples of four byte as we are using dma to receive.
785 */
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600786 if ((!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
787 !(actuallen % GQSPI_DMA_ALIGN)) || priv->io_mode) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530788 buf = (u32 *)priv->rx_buf;
Ashok Reddy Soma96db8b62022-08-25 06:59:03 -0600789 if (priv->io_mode)
790 return zynqmp_qspi_start_io(priv, gen_fifo_cmd, buf);
791 else
792 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530793 }
794
Ashok Reddy Soma7b4bded2022-08-25 06:59:05 -0600795 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len, GQSPI_DMA_ALIGN));
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530796 buf = (u32 *)tmp;
797 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
798}
799
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530800static int zynqmp_qspi_claim_bus(struct udevice *dev)
801{
802 struct udevice *bus = dev->parent;
803 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
804 struct zynqmp_qspi_regs *regs = priv->regs;
805
806 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
807
808 return 0;
809}
810
811static int zynqmp_qspi_release_bus(struct udevice *dev)
812{
813 struct udevice *bus = dev->parent;
814 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
815 struct zynqmp_qspi_regs *regs = priv->regs;
816
817 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
818
819 return 0;
820}
821
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530822static bool zynqmp_qspi_update_stripe(const struct spi_mem_op *op)
823{
824 /*
825 * This is a list of opcodes for which we must not use striped access
826 * even in dual parallel mode, but instead broadcast the same data to
827 * both chips. This is primarily erase commands and writing some
828 * registers.
829 */
830 switch (op->cmd.opcode) {
831 case SPINOR_OP_BE_4K:
832 case SPINOR_OP_BE_32K:
833 case SPINOR_OP_CHIP_ERASE:
834 case SPINOR_OP_SE:
835 case SPINOR_OP_BE_32K_4B:
836 case SPINOR_OP_SE_4B:
837 case SPINOR_OP_BE_4K_4B:
838 case SPINOR_OP_WRSR:
839 case SPINOR_OP_WREAR:
840 case SPINOR_OP_BRWR:
841 return false;
842 case SPINOR_OP_WRSR2:
843 return op->addr.nbytes != 0;
844 default:
845 return true;
846 }
847}
848
Brandon Maier4d9cce72021-01-20 10:39:46 -0600849static int zynqmp_qspi_exec_op(struct spi_slave *slave,
850 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530851{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600852 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
853 int ret = 0;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530854
Brandon Maier4d9cce72021-01-20 10:39:46 -0600855 priv->op = op;
856 priv->tx_buf = op->data.buf.out;
857 priv->rx_buf = op->data.buf.in;
858 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530859
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530860 if (slave->flags & SPI_XFER_U_PAGE)
861 priv->u_page = 1;
862 else
863 priv->u_page = 0;
864
865 if ((slave->flags & GQSPI_SELECT_LOWER_CS) &&
866 (slave->flags & GQSPI_SELECT_UPPER_CS))
867 priv->is_parallel = true;
868
869 priv->stripe = 0;
870 priv->bus = 0;
871
872 if (priv->is_parallel) {
Marek Vasuta2f75002024-11-03 00:57:31 +0100873 if (slave->flags & SPI_XFER_LOWER)
874 priv->bus = 1;
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530875 if (zynqmp_qspi_update_stripe(op))
876 priv->stripe = 1;
877 }
878
Brandon Maier4d9cce72021-01-20 10:39:46 -0600879 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530880
Brandon Maier4d9cce72021-01-20 10:39:46 -0600881 /* Send opcode, addr, dummy */
882 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530883
Brandon Maier4d9cce72021-01-20 10:39:46 -0600884 /* Request the transfer */
885 if (op->data.dir == SPI_MEM_DATA_IN)
886 ret = zynqmp_qspi_genfifo_fill_rx(priv);
887 else if (op->data.dir == SPI_MEM_DATA_OUT)
888 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530889
Brandon Maier4d9cce72021-01-20 10:39:46 -0600890 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530891
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530892 priv->is_parallel = false;
Marek Vasuta2f75002024-11-03 00:57:31 +0100893 slave->flags &= ~SPI_XFER_LOWER;
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530894
Brandon Maier4d9cce72021-01-20 10:39:46 -0600895 return ret;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530896}
897
Brandon Maier4d9cce72021-01-20 10:39:46 -0600898static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
899 .exec_op = zynqmp_qspi_exec_op,
900};
901
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530902static const struct dm_spi_ops zynqmp_qspi_ops = {
903 .claim_bus = zynqmp_qspi_claim_bus,
904 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530905 .set_speed = zynqmp_qspi_set_speed,
906 .set_mode = zynqmp_qspi_set_mode,
Brandon Maier4d9cce72021-01-20 10:39:46 -0600907 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530908};
909
910static const struct udevice_id zynqmp_qspi_ids[] = {
911 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simeked373eb2018-11-29 08:48:28 +0100912 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530913 { }
914};
915
916U_BOOT_DRIVER(zynqmp_qspi) = {
917 .name = "zynqmp_qspi",
918 .id = UCLASS_SPI,
919 .of_match = zynqmp_qspi_ids,
920 .ops = &zynqmp_qspi_ops,
Simon Glassaad29ae2020-12-03 16:55:21 -0700921 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glassb75b15b2020-12-03 16:55:23 -0700922 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass8a2b47f2020-12-03 16:55:17 -0700923 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530924 .probe = zynqmp_qspi_probe,
Venkatesh Yadav Abbarapuf0b9d772024-09-26 10:25:06 +0530925 .child_pre_probe = zynqmp_qspi_child_pre_probe,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530926};