blob: 17780066ae45498731b35348928e7ef9e5d9d22e [file] [log] [blame]
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * (C) Copyright 2018 Xilinx
4 *
5 * Xilinx ZynqMP Generic Quad-SPI(QSPI) controller driver(master mode only)
6 */
7
8#include <common.h>
Simon Glass63334482019-11-14 12:57:39 -07009#include <cpu_func.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053011#include <asm/arch/sys_proto.h>
Simon Glass274e0b02020-05-10 11:39:56 -060012#include <asm/cache.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053013#include <asm/io.h>
14#include <clk.h>
15#include <dm.h>
16#include <malloc.h>
17#include <memalign.h>
18#include <spi.h>
Brandon Maier4d9cce72021-01-20 10:39:46 -060019#include <spi-mem.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053020#include <ubi_uboot.h>
21#include <wait_bit.h>
Simon Glass9bc15642020-02-03 07:36:16 -070022#include <dm/device_compat.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060023#include <linux/bitops.h>
Simon Glassd66c5f72020-02-03 07:36:15 -070024#include <linux/err.h>
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +053025
26#define GQSPI_GFIFO_STRT_MODE_MASK BIT(29)
27#define GQSPI_CONFIG_MODE_EN_MASK (3 << 30)
28#define GQSPI_CONFIG_DMA_MODE (2 << 30)
29#define GQSPI_CONFIG_CPHA_MASK BIT(2)
30#define GQSPI_CONFIG_CPOL_MASK BIT(1)
31
32/*
33 * QSPI Interrupt Registers bit Masks
34 *
35 * All the four interrupt registers (Status/Mask/Enable/Disable) have the same
36 * bit definitions.
37 */
38#define GQSPI_IXR_TXNFULL_MASK 0x00000004 /* QSPI TX FIFO Overflow */
39#define GQSPI_IXR_TXFULL_MASK 0x00000008 /* QSPI TX FIFO is full */
40#define GQSPI_IXR_RXNEMTY_MASK 0x00000010 /* QSPI RX FIFO Not Empty */
41#define GQSPI_IXR_GFEMTY_MASK 0x00000080 /* QSPI Generic FIFO Empty */
42#define GQSPI_IXR_ALL_MASK (GQSPI_IXR_TXNFULL_MASK | \
43 GQSPI_IXR_RXNEMTY_MASK)
44
45/*
46 * QSPI Enable Register bit Masks
47 *
48 * This register is used to enable or disable the QSPI controller
49 */
50#define GQSPI_ENABLE_ENABLE_MASK 0x00000001 /* QSPI Enable Bit Mask */
51
52#define GQSPI_GFIFO_LOW_BUS BIT(14)
53#define GQSPI_GFIFO_CS_LOWER BIT(12)
54#define GQSPI_GFIFO_UP_BUS BIT(15)
55#define GQSPI_GFIFO_CS_UPPER BIT(13)
56#define GQSPI_SPI_MODE_QSPI (3 << 10)
57#define GQSPI_SPI_MODE_SPI BIT(10)
58#define GQSPI_SPI_MODE_DUAL_SPI (2 << 10)
59#define GQSPI_IMD_DATA_CS_ASSERT 5
60#define GQSPI_IMD_DATA_CS_DEASSERT 5
61#define GQSPI_GFIFO_TX BIT(16)
62#define GQSPI_GFIFO_RX BIT(17)
63#define GQSPI_GFIFO_STRIPE_MASK BIT(18)
64#define GQSPI_GFIFO_IMD_MASK 0xFF
65#define GQSPI_GFIFO_EXP_MASK BIT(9)
66#define GQSPI_GFIFO_DATA_XFR_MASK BIT(8)
67#define GQSPI_STRT_GEN_FIFO BIT(28)
68#define GQSPI_GEN_FIFO_STRT_MOD BIT(29)
69#define GQSPI_GFIFO_WP_HOLD BIT(19)
70#define GQSPI_BAUD_DIV_MASK (7 << 3)
71#define GQSPI_DFLT_BAUD_RATE_DIV BIT(3)
72#define GQSPI_GFIFO_ALL_INT_MASK 0xFBE
73#define GQSPI_DMA_DST_I_STS_DONE BIT(1)
74#define GQSPI_DMA_DST_I_STS_MASK 0xFE
75#define MODEBITS 0x6
76
77#define GQSPI_GFIFO_SELECT BIT(0)
78#define GQSPI_FIFO_THRESHOLD 1
79
80#define SPI_XFER_ON_BOTH 0
81#define SPI_XFER_ON_LOWER 1
82#define SPI_XFER_ON_UPPER 2
83
84#define GQSPI_DMA_ALIGN 0x4
85#define GQSPI_MAX_BAUD_RATE_VAL 7
86#define GQSPI_DFLT_BAUD_RATE_VAL 2
87
88#define GQSPI_TIMEOUT 100000000
89
90#define GQSPI_BAUD_DIV_SHIFT 2
91#define GQSPI_LPBK_DLY_ADJ_LPBK_SHIFT 5
92#define GQSPI_LPBK_DLY_ADJ_DLY_1 0x2
93#define GQSPI_LPBK_DLY_ADJ_DLY_1_SHIFT 3
94#define GQSPI_LPBK_DLY_ADJ_DLY_0 0x3
95#define GQSPI_USE_DATA_DLY 0x1
96#define GQSPI_USE_DATA_DLY_SHIFT 31
97#define GQSPI_DATA_DLY_ADJ_VALUE 0x2
98#define GQSPI_DATA_DLY_ADJ_SHIFT 28
99#define TAP_DLY_BYPASS_LQSPI_RX_VALUE 0x1
100#define TAP_DLY_BYPASS_LQSPI_RX_SHIFT 2
101#define GQSPI_DATA_DLY_ADJ_OFST 0x000001F8
102#define IOU_TAPDLY_BYPASS_OFST 0xFF180390
103#define GQSPI_LPBK_DLY_ADJ_LPBK_MASK 0x00000020
104#define GQSPI_FREQ_40MHZ 40000000
105#define GQSPI_FREQ_100MHZ 100000000
106#define GQSPI_FREQ_150MHZ 150000000
107#define IOU_TAPDLY_BYPASS_MASK 0x7
108
109#define GQSPI_REG_OFFSET 0x100
110#define GQSPI_DMA_REG_OFFSET 0x800
111
112/* QSPI register offsets */
113struct zynqmp_qspi_regs {
114 u32 confr; /* 0x00 */
115 u32 isr; /* 0x04 */
116 u32 ier; /* 0x08 */
117 u32 idisr; /* 0x0C */
118 u32 imaskr; /* 0x10 */
119 u32 enbr; /* 0x14 */
120 u32 dr; /* 0x18 */
121 u32 txd0r; /* 0x1C */
122 u32 drxr; /* 0x20 */
123 u32 sicr; /* 0x24 */
124 u32 txftr; /* 0x28 */
125 u32 rxftr; /* 0x2C */
126 u32 gpior; /* 0x30 */
127 u32 reserved0; /* 0x34 */
128 u32 lpbkdly; /* 0x38 */
129 u32 reserved1; /* 0x3C */
130 u32 genfifo; /* 0x40 */
131 u32 gqspisel; /* 0x44 */
132 u32 reserved2; /* 0x48 */
133 u32 gqfifoctrl; /* 0x4C */
134 u32 gqfthr; /* 0x50 */
135 u32 gqpollcfg; /* 0x54 */
136 u32 gqpollto; /* 0x58 */
137 u32 gqxfersts; /* 0x5C */
138 u32 gqfifosnap; /* 0x60 */
139 u32 gqrxcpy; /* 0x64 */
140 u32 reserved3[36]; /* 0x68 */
141 u32 gqspidlyadj; /* 0xF8 */
142};
143
144struct zynqmp_qspi_dma_regs {
145 u32 dmadst; /* 0x00 */
146 u32 dmasize; /* 0x04 */
147 u32 dmasts; /* 0x08 */
148 u32 dmactrl; /* 0x0C */
149 u32 reserved0; /* 0x10 */
150 u32 dmaisr; /* 0x14 */
151 u32 dmaier; /* 0x18 */
152 u32 dmaidr; /* 0x1C */
153 u32 dmaimr; /* 0x20 */
154 u32 dmactrl2; /* 0x24 */
155 u32 dmadstmsb; /* 0x28 */
156};
157
Simon Glassb75b15b2020-12-03 16:55:23 -0700158struct zynqmp_qspi_plat {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530159 struct zynqmp_qspi_regs *regs;
160 struct zynqmp_qspi_dma_regs *dma_regs;
161 u32 frequency;
162 u32 speed_hz;
163};
164
165struct zynqmp_qspi_priv {
166 struct zynqmp_qspi_regs *regs;
167 struct zynqmp_qspi_dma_regs *dma_regs;
168 const void *tx_buf;
169 void *rx_buf;
170 unsigned int len;
171 int bytes_to_transfer;
172 int bytes_to_receive;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600173 const struct spi_mem_op *op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530174};
175
Simon Glassaad29ae2020-12-03 16:55:21 -0700176static int zynqmp_qspi_of_to_plat(struct udevice *bus)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530177{
Simon Glass95588622020-12-22 19:30:28 -0700178 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530179
180 debug("%s\n", __func__);
181
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900182 plat->regs = (struct zynqmp_qspi_regs *)(dev_read_addr(bus) +
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530183 GQSPI_REG_OFFSET);
184 plat->dma_regs = (struct zynqmp_qspi_dma_regs *)
Masahiro Yamadaa89b4de2020-07-17 14:36:48 +0900185 (dev_read_addr(bus) + GQSPI_DMA_REG_OFFSET);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530186
187 return 0;
188}
189
190static void zynqmp_qspi_init_hw(struct zynqmp_qspi_priv *priv)
191{
192 u32 config_reg;
193 struct zynqmp_qspi_regs *regs = priv->regs;
194
195 writel(GQSPI_GFIFO_SELECT, &regs->gqspisel);
196 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->idisr);
197 writel(GQSPI_FIFO_THRESHOLD, &regs->txftr);
198 writel(GQSPI_FIFO_THRESHOLD, &regs->rxftr);
199 writel(GQSPI_GFIFO_ALL_INT_MASK, &regs->isr);
200
201 config_reg = readl(&regs->confr);
202 config_reg &= ~(GQSPI_GFIFO_STRT_MODE_MASK |
203 GQSPI_CONFIG_MODE_EN_MASK);
204 config_reg |= GQSPI_CONFIG_DMA_MODE |
205 GQSPI_GFIFO_WP_HOLD |
206 GQSPI_DFLT_BAUD_RATE_DIV;
207 writel(config_reg, &regs->confr);
208
209 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
210}
211
212static u32 zynqmp_qspi_bus_select(struct zynqmp_qspi_priv *priv)
213{
214 u32 gqspi_fifo_reg = 0;
215
216 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS |
217 GQSPI_GFIFO_CS_LOWER;
218
219 return gqspi_fifo_reg;
220}
221
Brandon Maier4d9cce72021-01-20 10:39:46 -0600222static u32 zynqmp_qspi_genfifo_mode(u8 buswidth)
223{
224 switch (buswidth) {
225 case 1:
226 return GQSPI_SPI_MODE_SPI;
227 case 2:
228 return GQSPI_SPI_MODE_DUAL_SPI;
229 case 4:
230 return GQSPI_SPI_MODE_QSPI;
231 default:
232 debug("Unsupported bus width %u\n", buswidth);
233 return GQSPI_SPI_MODE_SPI;
234 }
235}
236
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530237static void zynqmp_qspi_fill_gen_fifo(struct zynqmp_qspi_priv *priv,
238 u32 gqspi_fifo_reg)
239{
240 struct zynqmp_qspi_regs *regs = priv->regs;
241 int ret = 0;
242
243 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_GFEMTY_MASK, 1,
244 GQSPI_TIMEOUT, 1);
245 if (ret)
246 printf("%s Timeout\n", __func__);
247
248 writel(gqspi_fifo_reg, &regs->genfifo);
249}
250
251static void zynqmp_qspi_chipselect(struct zynqmp_qspi_priv *priv, int is_on)
252{
253 u32 gqspi_fifo_reg = 0;
254
255 if (is_on) {
256 gqspi_fifo_reg = zynqmp_qspi_bus_select(priv);
257 gqspi_fifo_reg |= GQSPI_SPI_MODE_SPI |
258 GQSPI_IMD_DATA_CS_ASSERT;
259 } else {
260 gqspi_fifo_reg = GQSPI_GFIFO_LOW_BUS;
261 gqspi_fifo_reg |= GQSPI_IMD_DATA_CS_DEASSERT;
262 }
263
264 debug("GFIFO_CMD_CS: 0x%x\n", gqspi_fifo_reg);
265
266 zynqmp_qspi_fill_gen_fifo(priv, gqspi_fifo_reg);
267}
268
269void zynqmp_qspi_set_tapdelay(struct udevice *bus, u32 baudrateval)
270{
Simon Glass95588622020-12-22 19:30:28 -0700271 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530272 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
273 struct zynqmp_qspi_regs *regs = priv->regs;
274 u32 tapdlybypass = 0, lpbkdlyadj = 0, datadlyadj = 0, clk_rate;
275 u32 reqhz = 0;
276
277 clk_rate = plat->frequency;
278 reqhz = (clk_rate / (GQSPI_BAUD_DIV_SHIFT << baudrateval));
279
280 debug("%s, req_hz:%d, clk_rate:%d, baudrateval:%d\n",
281 __func__, reqhz, clk_rate, baudrateval);
282
283 if (reqhz < GQSPI_FREQ_40MHZ) {
284 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
285 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
286 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530287 } else if (reqhz <= GQSPI_FREQ_100MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530288 zynqmp_mmio_read(IOU_TAPDLY_BYPASS_OFST, &tapdlybypass);
289 tapdlybypass |= (TAP_DLY_BYPASS_LQSPI_RX_VALUE <<
290 TAP_DLY_BYPASS_LQSPI_RX_SHIFT);
291 lpbkdlyadj = readl(&regs->lpbkdly);
292 lpbkdlyadj |= (GQSPI_LPBK_DLY_ADJ_LPBK_MASK);
293 datadlyadj = readl(&regs->gqspidlyadj);
294 datadlyadj |= ((GQSPI_USE_DATA_DLY << GQSPI_USE_DATA_DLY_SHIFT)
295 | (GQSPI_DATA_DLY_ADJ_VALUE <<
296 GQSPI_DATA_DLY_ADJ_SHIFT));
Siva Durga Prasad Paladugu05ddbdf2019-03-07 16:08:48 +0530297 } else if (reqhz <= GQSPI_FREQ_150MHZ) {
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530298 lpbkdlyadj = readl(&regs->lpbkdly);
299 lpbkdlyadj |= ((GQSPI_LPBK_DLY_ADJ_LPBK_MASK) |
300 GQSPI_LPBK_DLY_ADJ_DLY_0);
301 }
302
303 zynqmp_mmio_write(IOU_TAPDLY_BYPASS_OFST, IOU_TAPDLY_BYPASS_MASK,
304 tapdlybypass);
305 writel(lpbkdlyadj, &regs->lpbkdly);
306 writel(datadlyadj, &regs->gqspidlyadj);
307}
308
309static int zynqmp_qspi_set_speed(struct udevice *bus, uint speed)
310{
Simon Glass95588622020-12-22 19:30:28 -0700311 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530312 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
313 struct zynqmp_qspi_regs *regs = priv->regs;
314 u32 confr;
315 u8 baud_rate_val = 0;
316
317 debug("%s\n", __func__);
318 if (speed > plat->frequency)
319 speed = plat->frequency;
320
Brandon Maierb8003d52021-01-20 14:28:30 -0600321 if (plat->speed_hz != speed) {
322 /* Set the clock frequency */
323 /* If speed == 0, default to lowest speed */
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530324 while ((baud_rate_val < 8) &&
325 ((plat->frequency /
326 (2 << baud_rate_val)) > speed))
327 baud_rate_val++;
328
329 if (baud_rate_val > GQSPI_MAX_BAUD_RATE_VAL)
330 baud_rate_val = GQSPI_DFLT_BAUD_RATE_VAL;
331
332 plat->speed_hz = plat->frequency / (2 << baud_rate_val);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530333
Brandon Maierb8003d52021-01-20 14:28:30 -0600334 confr = readl(&regs->confr);
335 confr &= ~GQSPI_BAUD_DIV_MASK;
336 confr |= (baud_rate_val << 3);
337 writel(confr, &regs->confr);
338 zynqmp_qspi_set_tapdelay(bus, baud_rate_val);
339
340 debug("regs=%p, speed=%d\n", priv->regs, plat->speed_hz);
341 }
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530342
343 return 0;
344}
345
346static int zynqmp_qspi_probe(struct udevice *bus)
347{
Simon Glassb75b15b2020-12-03 16:55:23 -0700348 struct zynqmp_qspi_plat *plat = dev_get_plat(bus);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530349 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
350 struct clk clk;
351 unsigned long clock;
352 int ret;
353
354 debug("%s: bus:%p, priv:%p\n", __func__, bus, priv);
355
356 priv->regs = plat->regs;
357 priv->dma_regs = plat->dma_regs;
358
359 ret = clk_get_by_index(bus, 0, &clk);
360 if (ret < 0) {
Sean Anderson241232a2020-09-15 10:45:12 -0400361 dev_err(bus, "failed to get clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530362 return ret;
363 }
364
365 clock = clk_get_rate(&clk);
366 if (IS_ERR_VALUE(clock)) {
Sean Anderson241232a2020-09-15 10:45:12 -0400367 dev_err(bus, "failed to get rate\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530368 return clock;
369 }
370 debug("%s: CLK %ld\n", __func__, clock);
371
372 ret = clk_enable(&clk);
Michal Simek41710952021-02-09 15:28:15 +0100373 if (ret) {
Sean Anderson241232a2020-09-15 10:45:12 -0400374 dev_err(bus, "failed to enable clock\n");
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530375 return ret;
376 }
377 plat->frequency = clock;
378 plat->speed_hz = plat->frequency / 2;
379
380 /* init the zynq spi hw */
381 zynqmp_qspi_init_hw(priv);
382
383 return 0;
384}
385
386static int zynqmp_qspi_set_mode(struct udevice *bus, uint mode)
387{
388 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
389 struct zynqmp_qspi_regs *regs = priv->regs;
390 u32 confr;
391
392 debug("%s\n", __func__);
393 /* Set the SPI Clock phase and polarities */
394 confr = readl(&regs->confr);
395 confr &= ~(GQSPI_CONFIG_CPHA_MASK |
396 GQSPI_CONFIG_CPOL_MASK);
397
398 if (mode & SPI_CPHA)
399 confr |= GQSPI_CONFIG_CPHA_MASK;
400 if (mode & SPI_CPOL)
401 confr |= GQSPI_CONFIG_CPOL_MASK;
402
403 writel(confr, &regs->confr);
404
405 return 0;
406}
407
408static int zynqmp_qspi_fill_tx_fifo(struct zynqmp_qspi_priv *priv, u32 size)
409{
410 u32 data;
411 int ret = 0;
412 struct zynqmp_qspi_regs *regs = priv->regs;
413 u32 *buf = (u32 *)priv->tx_buf;
414 u32 len = size;
415
416 debug("TxFIFO: 0x%x, size: 0x%x\n", readl(&regs->isr),
417 size);
418
419 while (size) {
420 ret = wait_for_bit_le32(&regs->isr, GQSPI_IXR_TXNFULL_MASK, 1,
421 GQSPI_TIMEOUT, 1);
422 if (ret) {
423 printf("%s: Timeout\n", __func__);
424 return ret;
425 }
426
427 if (size >= 4) {
428 writel(*buf, &regs->txd0r);
429 buf++;
430 size -= 4;
431 } else {
432 switch (size) {
433 case 1:
434 data = *((u8 *)buf);
435 buf += 1;
436 data |= GENMASK(31, 8);
437 break;
438 case 2:
439 data = *((u16 *)buf);
440 buf += 2;
441 data |= GENMASK(31, 16);
442 break;
443 case 3:
T Karthik Reddycc59fc92020-11-19 05:00:36 -0700444 data = *buf;
445 buf += 3;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530446 data |= GENMASK(31, 24);
447 break;
448 }
449 writel(data, &regs->txd0r);
450 size = 0;
451 }
452 }
453
454 priv->tx_buf += len;
455 return 0;
456}
457
458static void zynqmp_qspi_genfifo_cmd(struct zynqmp_qspi_priv *priv)
459{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600460 const struct spi_mem_op *op = priv->op;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530461 u32 gen_fifo_cmd;
Brandon Maier4d9cce72021-01-20 10:39:46 -0600462 u8 i, dummy_cycles, addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530463
Brandon Maier4d9cce72021-01-20 10:39:46 -0600464 /* Send opcode */
465 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
466 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->cmd.buswidth);
467 gen_fifo_cmd |= GQSPI_GFIFO_TX;
468 gen_fifo_cmd |= op->cmd.opcode;
469 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
470
471 /* Send address */
472 for (i = 0; i < op->addr.nbytes; i++) {
473 addr = op->addr.val >> (8 * (op->addr.nbytes - i - 1));
474
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530475 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600476 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->addr.buswidth);
477 gen_fifo_cmd |= GQSPI_GFIFO_TX;
478 gen_fifo_cmd |= addr;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530479
480 debug("GFIFO_CMD_Cmd = 0x%x\n", gen_fifo_cmd);
481
482 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
483 }
Brandon Maier4d9cce72021-01-20 10:39:46 -0600484
485 /* Send dummy */
486 if (op->dummy.nbytes) {
487 dummy_cycles = op->dummy.nbytes * 8 / op->dummy.buswidth;
488
489 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
490 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(op->dummy.buswidth);
491 gen_fifo_cmd &= ~(GQSPI_GFIFO_TX | GQSPI_GFIFO_RX);
492 gen_fifo_cmd |= GQSPI_GFIFO_DATA_XFR_MASK;
493 gen_fifo_cmd |= dummy_cycles;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530494 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
495 }
496}
497
498static u32 zynqmp_qspi_calc_exp(struct zynqmp_qspi_priv *priv,
499 u32 *gen_fifo_cmd)
500{
501 u32 expval = 8;
502 u32 len;
503
504 while (1) {
505 if (priv->len > 255) {
506 if (priv->len & (1 << expval)) {
507 *gen_fifo_cmd &= ~GQSPI_GFIFO_IMD_MASK;
508 *gen_fifo_cmd |= GQSPI_GFIFO_EXP_MASK;
509 *gen_fifo_cmd |= expval;
510 priv->len -= (1 << expval);
511 return expval;
512 }
513 expval++;
514 } else {
515 *gen_fifo_cmd &= ~(GQSPI_GFIFO_IMD_MASK |
516 GQSPI_GFIFO_EXP_MASK);
517 *gen_fifo_cmd |= (u8)priv->len;
518 len = (u8)priv->len;
519 priv->len = 0;
520 return len;
521 }
522 }
523}
524
525static int zynqmp_qspi_genfifo_fill_tx(struct zynqmp_qspi_priv *priv)
526{
527 u32 gen_fifo_cmd;
528 u32 len;
529 int ret = 0;
530
531 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600532 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530533 gen_fifo_cmd |= GQSPI_GFIFO_TX |
534 GQSPI_GFIFO_DATA_XFR_MASK;
535
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530536 while (priv->len) {
537 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
538 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
539
540 debug("GFIFO_CMD_TX:0x%x\n", gen_fifo_cmd);
541
542 if (gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK)
543 ret = zynqmp_qspi_fill_tx_fifo(priv,
544 1 << len);
545 else
546 ret = zynqmp_qspi_fill_tx_fifo(priv,
547 len);
548
549 if (ret)
550 return ret;
551 }
552 return ret;
553}
554
555static int zynqmp_qspi_start_dma(struct zynqmp_qspi_priv *priv,
556 u32 gen_fifo_cmd, u32 *buf)
557{
558 u32 addr;
559 u32 size, len;
560 u32 actuallen = priv->len;
561 int ret = 0;
562 struct zynqmp_qspi_dma_regs *dma_regs = priv->dma_regs;
563
564 writel((unsigned long)buf, &dma_regs->dmadst);
565 writel(roundup(priv->len, ARCH_DMA_MINALIGN), &dma_regs->dmasize);
566 writel(GQSPI_DMA_DST_I_STS_MASK, &dma_regs->dmaier);
567 addr = (unsigned long)buf;
568 size = roundup(priv->len, ARCH_DMA_MINALIGN);
569 flush_dcache_range(addr, addr + size);
570
571 while (priv->len) {
572 len = zynqmp_qspi_calc_exp(priv, &gen_fifo_cmd);
573 if (!(gen_fifo_cmd & GQSPI_GFIFO_EXP_MASK) &&
574 (len % ARCH_DMA_MINALIGN)) {
575 gen_fifo_cmd &= ~GENMASK(7, 0);
576 gen_fifo_cmd |= roundup(len, ARCH_DMA_MINALIGN);
577 }
578 zynqmp_qspi_fill_gen_fifo(priv, gen_fifo_cmd);
579
580 debug("GFIFO_CMD_RX:0x%x\n", gen_fifo_cmd);
581 }
582
583 ret = wait_for_bit_le32(&dma_regs->dmaisr, GQSPI_DMA_DST_I_STS_DONE,
584 1, GQSPI_TIMEOUT, 1);
585 if (ret) {
586 printf("DMA Timeout:0x%x\n", readl(&dma_regs->dmaisr));
587 return -ETIMEDOUT;
588 }
589
590 writel(GQSPI_DMA_DST_I_STS_DONE, &dma_regs->dmaisr);
591
592 debug("buf:0x%lx, rxbuf:0x%lx, *buf:0x%x len: 0x%x\n",
593 (unsigned long)buf, (unsigned long)priv->rx_buf, *buf,
594 actuallen);
595
596 if (buf != priv->rx_buf)
597 memcpy(priv->rx_buf, buf, actuallen);
598
599 return 0;
600}
601
602static int zynqmp_qspi_genfifo_fill_rx(struct zynqmp_qspi_priv *priv)
603{
604 u32 gen_fifo_cmd;
605 u32 *buf;
606 u32 actuallen = priv->len;
607
608 gen_fifo_cmd = zynqmp_qspi_bus_select(priv);
Brandon Maier4d9cce72021-01-20 10:39:46 -0600609 gen_fifo_cmd |= zynqmp_qspi_genfifo_mode(priv->op->data.buswidth);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530610 gen_fifo_cmd |= GQSPI_GFIFO_RX |
611 GQSPI_GFIFO_DATA_XFR_MASK;
612
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530613 /*
614 * Check if receive buffer is aligned to 4 byte and length
615 * is multiples of four byte as we are using dma to receive.
616 */
617 if (!((unsigned long)priv->rx_buf & (GQSPI_DMA_ALIGN - 1)) &&
618 !(actuallen % GQSPI_DMA_ALIGN)) {
619 buf = (u32 *)priv->rx_buf;
620 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
621 }
622
623 ALLOC_CACHE_ALIGN_BUFFER(u8, tmp, roundup(priv->len,
624 GQSPI_DMA_ALIGN));
625 buf = (u32 *)tmp;
626 return zynqmp_qspi_start_dma(priv, gen_fifo_cmd, buf);
627}
628
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530629static int zynqmp_qspi_claim_bus(struct udevice *dev)
630{
631 struct udevice *bus = dev->parent;
632 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
633 struct zynqmp_qspi_regs *regs = priv->regs;
634
635 writel(GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
636
637 return 0;
638}
639
640static int zynqmp_qspi_release_bus(struct udevice *dev)
641{
642 struct udevice *bus = dev->parent;
643 struct zynqmp_qspi_priv *priv = dev_get_priv(bus);
644 struct zynqmp_qspi_regs *regs = priv->regs;
645
646 writel(~GQSPI_ENABLE_ENABLE_MASK, &regs->enbr);
647
648 return 0;
649}
650
Brandon Maier4d9cce72021-01-20 10:39:46 -0600651static int zynqmp_qspi_exec_op(struct spi_slave *slave,
652 const struct spi_mem_op *op)
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530653{
Brandon Maier4d9cce72021-01-20 10:39:46 -0600654 struct zynqmp_qspi_priv *priv = dev_get_priv(slave->dev->parent);
655 int ret = 0;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530656
Brandon Maier4d9cce72021-01-20 10:39:46 -0600657 priv->op = op;
658 priv->tx_buf = op->data.buf.out;
659 priv->rx_buf = op->data.buf.in;
660 priv->len = op->data.nbytes;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530661
Brandon Maier4d9cce72021-01-20 10:39:46 -0600662 zynqmp_qspi_chipselect(priv, 1);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530663
Brandon Maier4d9cce72021-01-20 10:39:46 -0600664 /* Send opcode, addr, dummy */
665 zynqmp_qspi_genfifo_cmd(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530666
Brandon Maier4d9cce72021-01-20 10:39:46 -0600667 /* Request the transfer */
668 if (op->data.dir == SPI_MEM_DATA_IN)
669 ret = zynqmp_qspi_genfifo_fill_rx(priv);
670 else if (op->data.dir == SPI_MEM_DATA_OUT)
671 ret = zynqmp_qspi_genfifo_fill_tx(priv);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530672
Brandon Maier4d9cce72021-01-20 10:39:46 -0600673 zynqmp_qspi_chipselect(priv, 0);
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530674
Brandon Maier4d9cce72021-01-20 10:39:46 -0600675 return ret;
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530676}
677
Brandon Maier4d9cce72021-01-20 10:39:46 -0600678static const struct spi_controller_mem_ops zynqmp_qspi_mem_ops = {
679 .exec_op = zynqmp_qspi_exec_op,
680};
681
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530682static const struct dm_spi_ops zynqmp_qspi_ops = {
683 .claim_bus = zynqmp_qspi_claim_bus,
684 .release_bus = zynqmp_qspi_release_bus,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530685 .set_speed = zynqmp_qspi_set_speed,
686 .set_mode = zynqmp_qspi_set_mode,
Brandon Maier4d9cce72021-01-20 10:39:46 -0600687 .mem_ops = &zynqmp_qspi_mem_ops,
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530688};
689
690static const struct udevice_id zynqmp_qspi_ids[] = {
691 { .compatible = "xlnx,zynqmp-qspi-1.0" },
Michal Simeked373eb2018-11-29 08:48:28 +0100692 { .compatible = "xlnx,versal-qspi-1.0" },
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530693 { }
694};
695
696U_BOOT_DRIVER(zynqmp_qspi) = {
697 .name = "zynqmp_qspi",
698 .id = UCLASS_SPI,
699 .of_match = zynqmp_qspi_ids,
700 .ops = &zynqmp_qspi_ops,
Simon Glassaad29ae2020-12-03 16:55:21 -0700701 .of_to_plat = zynqmp_qspi_of_to_plat,
Simon Glassb75b15b2020-12-03 16:55:23 -0700702 .plat_auto = sizeof(struct zynqmp_qspi_plat),
Simon Glass8a2b47f2020-12-03 16:55:17 -0700703 .priv_auto = sizeof(struct zynqmp_qspi_priv),
Siva Durga Prasad Paladugu76597382018-07-04 17:31:23 +0530704 .probe = zynqmp_qspi_probe,
705};