Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0+ |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 2 | /* |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 3 | * (C) Copyright 2013 - 2022, Xilinx, Inc. |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 4 | * (C) Copyright 2015 Jagan Teki <jteki@openedev.com> |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 5 | * (C) Copyright 2023, Advanced Micro Devices, Inc. |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 6 | * |
| 7 | * Xilinx Zynq Quad-SPI(QSPI) controller driver (master mode only) |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 8 | */ |
| 9 | |
T Karthik Reddy | b1f0d3d | 2020-02-04 05:47:45 -0700 | [diff] [blame] | 10 | #include <clk.h> |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 11 | #include <dm.h> |
T Karthik Reddy | b1f0d3d | 2020-02-04 05:47:45 -0700 | [diff] [blame] | 12 | #include <dm/device_compat.h> |
Simon Glass | 0f2af88 | 2020-05-10 11:40:05 -0600 | [diff] [blame] | 13 | #include <log.h> |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 14 | #include <malloc.h> |
| 15 | #include <spi.h> |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 16 | #include <spi_flash.h> |
Simon Glass | 3ba929a | 2020-10-30 21:38:53 -0600 | [diff] [blame] | 17 | #include <asm/global_data.h> |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 18 | #include <asm/io.h> |
Simon Glass | 4dcacfc | 2020-05-10 11:40:13 -0600 | [diff] [blame] | 19 | #include <linux/bitops.h> |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 20 | #include <spi-mem.h> |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 21 | #include "../mtd/spi/sf_internal.h" |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 22 | |
| 23 | DECLARE_GLOBAL_DATA_PTR; |
| 24 | |
| 25 | /* zynq qspi register bit masks ZYNQ_QSPI_<REG>_<BIT>_MASK */ |
Jagan Teki | f94b3f7 | 2015-10-22 20:40:16 +0530 | [diff] [blame] | 26 | #define ZYNQ_QSPI_CR_IFMODE_MASK BIT(31) /* Flash intrface mode*/ |
| 27 | #define ZYNQ_QSPI_CR_MSA_MASK BIT(15) /* Manual start enb */ |
| 28 | #define ZYNQ_QSPI_CR_MCS_MASK BIT(14) /* Manual chip select */ |
| 29 | #define ZYNQ_QSPI_CR_PCS_MASK BIT(10) /* Peri chip select */ |
Jagan Teki | e1af6ae | 2015-10-22 21:06:37 +0530 | [diff] [blame] | 30 | #define ZYNQ_QSPI_CR_FW_MASK GENMASK(7, 6) /* FIFO width */ |
| 31 | #define ZYNQ_QSPI_CR_SS_MASK GENMASK(13, 10) /* Slave Select */ |
| 32 | #define ZYNQ_QSPI_CR_BAUD_MASK GENMASK(5, 3) /* Baud rate div */ |
Jagan Teki | f94b3f7 | 2015-10-22 20:40:16 +0530 | [diff] [blame] | 33 | #define ZYNQ_QSPI_CR_CPHA_MASK BIT(2) /* Clock phase */ |
| 34 | #define ZYNQ_QSPI_CR_CPOL_MASK BIT(1) /* Clock polarity */ |
| 35 | #define ZYNQ_QSPI_CR_MSTREN_MASK BIT(0) /* Mode select */ |
| 36 | #define ZYNQ_QSPI_IXR_RXNEMPTY_MASK BIT(4) /* RX_FIFO_not_empty */ |
| 37 | #define ZYNQ_QSPI_IXR_TXOW_MASK BIT(2) /* TX_FIFO_not_full */ |
Jagan Teki | e1af6ae | 2015-10-22 21:06:37 +0530 | [diff] [blame] | 38 | #define ZYNQ_QSPI_IXR_ALL_MASK GENMASK(6, 0) /* All IXR bits */ |
Jagan Teki | f94b3f7 | 2015-10-22 20:40:16 +0530 | [diff] [blame] | 39 | #define ZYNQ_QSPI_ENR_SPI_EN_MASK BIT(0) /* SPI Enable */ |
Nathan Rossi | b115078 | 2015-12-09 00:44:40 +1000 | [diff] [blame] | 40 | #define ZYNQ_QSPI_LQSPICFG_LQMODE_MASK BIT(31) /* Linear QSPI Mode */ |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 41 | |
| 42 | /* zynq qspi Transmit Data Register */ |
| 43 | #define ZYNQ_QSPI_TXD_00_00_OFFSET 0x1C /* Transmit 4-byte inst */ |
| 44 | #define ZYNQ_QSPI_TXD_00_01_OFFSET 0x80 /* Transmit 1-byte inst */ |
| 45 | #define ZYNQ_QSPI_TXD_00_10_OFFSET 0x84 /* Transmit 2-byte inst */ |
| 46 | #define ZYNQ_QSPI_TXD_00_11_OFFSET 0x88 /* Transmit 3-byte inst */ |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 47 | #define ZYNQ_QSPI_FR_QOUT_CODE 0x6B /* read instruction code */ |
| 48 | |
| 49 | #define QSPI_SELECT_LOWER_CS BIT(0) |
| 50 | #define QSPI_SELECT_UPPER_CS BIT(1) |
| 51 | |
| 52 | /* |
| 53 | * QSPI Linear Configuration Register |
| 54 | * |
| 55 | * It is named Linear Configuration but it controls other modes when not in |
| 56 | * linear mode also. |
| 57 | */ |
| 58 | #define ZYNQ_QSPI_LCFG_TWO_MEM_MASK 0x40000000 /* QSPI Enable Bit Mask */ |
| 59 | #define ZYNQ_QSPI_LCFG_SEP_BUS_MASK 0x20000000 /* QSPI Enable Bit Mask */ |
| 60 | #define ZYNQ_QSPI_LCFG_U_PAGE 0x10000000 /* QSPI Upper memory set */ |
| 61 | #define ZYNQ_QSPI_LCFG_DUMMY_SHIFT 8 |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 62 | |
| 63 | #define ZYNQ_QSPI_TXFIFO_THRESHOLD 1 /* Tx FIFO threshold level*/ |
| 64 | #define ZYNQ_QSPI_RXFIFO_THRESHOLD 32 /* Rx FIFO threshold level */ |
| 65 | |
| 66 | #define ZYNQ_QSPI_CR_BAUD_MAX 8 /* Baud rate divisor max val */ |
| 67 | #define ZYNQ_QSPI_CR_BAUD_SHIFT 3 /* Baud rate divisor shift */ |
| 68 | #define ZYNQ_QSPI_CR_SS_SHIFT 10 /* Slave select shift */ |
| 69 | |
Siva Durga Prasad Paladugu | 4495fd6 | 2022-01-30 22:22:39 -0700 | [diff] [blame] | 70 | #define ZYNQ_QSPI_MAX_BAUD_RATE 0x7 |
| 71 | #define ZYNQ_QSPI_DEFAULT_BAUD_RATE 0x2 |
| 72 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 73 | #define ZYNQ_QSPI_FIFO_DEPTH 63 |
Ashok Reddy Soma | caecfe6 | 2020-05-18 01:11:00 -0600 | [diff] [blame] | 74 | #define ZYNQ_QSPI_WAIT (CONFIG_SYS_HZ / 100) /* 10 ms */ |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 75 | |
| 76 | /* zynq qspi register set */ |
| 77 | struct zynq_qspi_regs { |
| 78 | u32 cr; /* 0x00 */ |
| 79 | u32 isr; /* 0x04 */ |
| 80 | u32 ier; /* 0x08 */ |
| 81 | u32 idr; /* 0x0C */ |
| 82 | u32 imr; /* 0x10 */ |
| 83 | u32 enr; /* 0x14 */ |
| 84 | u32 dr; /* 0x18 */ |
| 85 | u32 txd0r; /* 0x1C */ |
| 86 | u32 drxr; /* 0x20 */ |
| 87 | u32 sicr; /* 0x24 */ |
| 88 | u32 txftr; /* 0x28 */ |
| 89 | u32 rxftr; /* 0x2C */ |
| 90 | u32 gpior; /* 0x30 */ |
| 91 | u32 reserved0[19]; |
| 92 | u32 txd1r; /* 0x80 */ |
| 93 | u32 txd2r; /* 0x84 */ |
| 94 | u32 txd3r; /* 0x88 */ |
Nathan Rossi | b115078 | 2015-12-09 00:44:40 +1000 | [diff] [blame] | 95 | u32 reserved1[5]; |
| 96 | u32 lqspicfg; /* 0xA0 */ |
| 97 | u32 lqspists; /* 0xA4 */ |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 98 | }; |
| 99 | |
| 100 | /* zynq qspi platform data */ |
Simon Glass | b75b15b | 2020-12-03 16:55:23 -0700 | [diff] [blame] | 101 | struct zynq_qspi_plat { |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 102 | struct zynq_qspi_regs *regs; |
| 103 | u32 frequency; /* input frequency */ |
| 104 | u32 speed_hz; |
| 105 | }; |
| 106 | |
| 107 | /* zynq qspi priv */ |
| 108 | struct zynq_qspi_priv { |
| 109 | struct zynq_qspi_regs *regs; |
| 110 | u8 cs; |
| 111 | u8 mode; |
| 112 | u8 fifo_depth; |
| 113 | u32 freq; /* required frequency */ |
Siva Durga Prasad Paladugu | 4ebe3a0 | 2022-07-15 19:31:16 +0530 | [diff] [blame] | 114 | u32 max_hz; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 115 | const void *tx_buf; |
| 116 | void *rx_buf; |
| 117 | unsigned len; |
| 118 | int bytes_to_transfer; |
| 119 | int bytes_to_receive; |
| 120 | unsigned int is_inst; |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 121 | unsigned int is_parallel; |
| 122 | unsigned int is_stacked; |
| 123 | unsigned int u_page; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 124 | unsigned cs_change:1; |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 125 | unsigned is_strip:1; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 126 | }; |
| 127 | |
Simon Glass | aad29ae | 2020-12-03 16:55:21 -0700 | [diff] [blame] | 128 | static int zynq_qspi_of_to_plat(struct udevice *bus) |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 129 | { |
Simon Glass | 9558862 | 2020-12-22 19:30:28 -0700 | [diff] [blame] | 130 | struct zynq_qspi_plat *plat = dev_get_plat(bus); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 131 | const void *blob = gd->fdt_blob; |
Simon Glass | dd79d6e | 2017-01-17 16:52:55 -0700 | [diff] [blame] | 132 | int node = dev_of_offset(bus); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 133 | |
| 134 | plat->regs = (struct zynq_qspi_regs *)fdtdec_get_addr(blob, |
| 135 | node, "reg"); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 136 | return 0; |
| 137 | } |
| 138 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 139 | /** |
| 140 | * zynq_qspi_init_hw - Initialize the hardware |
| 141 | * @priv: Pointer to the zynq_qspi_priv structure |
| 142 | * |
| 143 | * The default settings of the QSPI controller's configurable parameters on |
| 144 | * reset are |
| 145 | * - Master mode |
| 146 | * - Baud rate divisor is set to 2 |
| 147 | * - Threshold value for TX FIFO not full interrupt is set to 1 |
| 148 | * - Flash memory interface mode enabled |
| 149 | * - Size of the word to be transferred as 8 bit |
| 150 | * This function performs the following actions |
| 151 | * - Disable and clear all the interrupts |
| 152 | * - Enable manual slave select |
| 153 | * - Enable auto start |
| 154 | * - Deselect all the chip select lines |
| 155 | * - Set the size of the word to be transferred as 32 bit |
| 156 | * - Set the little endian mode of TX FIFO and |
| 157 | * - Enable the QSPI controller |
| 158 | */ |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 159 | static void zynq_qspi_init_hw(struct zynq_qspi_priv *priv) |
| 160 | { |
| 161 | struct zynq_qspi_regs *regs = priv->regs; |
| 162 | u32 confr; |
| 163 | |
| 164 | /* Disable QSPI */ |
| 165 | writel(~ZYNQ_QSPI_ENR_SPI_EN_MASK, ®s->enr); |
| 166 | |
| 167 | /* Disable Interrupts */ |
| 168 | writel(ZYNQ_QSPI_IXR_ALL_MASK, ®s->idr); |
| 169 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 170 | /* Disable linear mode as the boot loader may have used it */ |
| 171 | writel(0x0, ®s->lqspicfg); |
| 172 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 173 | /* Clear the TX and RX threshold reg */ |
| 174 | writel(ZYNQ_QSPI_TXFIFO_THRESHOLD, ®s->txftr); |
| 175 | writel(ZYNQ_QSPI_RXFIFO_THRESHOLD, ®s->rxftr); |
| 176 | |
| 177 | /* Clear the RX FIFO */ |
| 178 | while (readl(®s->isr) & ZYNQ_QSPI_IXR_RXNEMPTY_MASK) |
| 179 | readl(®s->drxr); |
| 180 | |
| 181 | /* Clear Interrupts */ |
| 182 | writel(ZYNQ_QSPI_IXR_ALL_MASK, ®s->isr); |
| 183 | |
| 184 | /* Manual slave select and Auto start */ |
| 185 | confr = readl(®s->cr); |
| 186 | confr &= ~ZYNQ_QSPI_CR_MSA_MASK; |
| 187 | confr |= ZYNQ_QSPI_CR_IFMODE_MASK | ZYNQ_QSPI_CR_MCS_MASK | |
| 188 | ZYNQ_QSPI_CR_PCS_MASK | ZYNQ_QSPI_CR_FW_MASK | |
| 189 | ZYNQ_QSPI_CR_MSTREN_MASK; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 190 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 191 | if (priv->is_stacked) |
| 192 | confr |= 0x10; |
| 193 | |
| 194 | writel(confr, ®s->cr); |
Nathan Rossi | b115078 | 2015-12-09 00:44:40 +1000 | [diff] [blame] | 195 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 196 | /* Enable SPI */ |
| 197 | writel(ZYNQ_QSPI_ENR_SPI_EN_MASK, ®s->enr); |
| 198 | } |
| 199 | |
Siva Durga Prasad Paladugu | 4ebe3a0 | 2022-07-15 19:31:16 +0530 | [diff] [blame] | 200 | static int zynq_qspi_child_pre_probe(struct udevice *bus) |
| 201 | { |
| 202 | struct spi_slave *slave = dev_get_parent_priv(bus); |
| 203 | struct zynq_qspi_priv *priv = dev_get_priv(bus->parent); |
| 204 | |
| 205 | priv->max_hz = slave->max_hz; |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 206 | slave->multi_cs_cap = true; |
Siva Durga Prasad Paladugu | 4ebe3a0 | 2022-07-15 19:31:16 +0530 | [diff] [blame] | 207 | |
| 208 | return 0; |
| 209 | } |
| 210 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 211 | static int zynq_qspi_probe(struct udevice *bus) |
| 212 | { |
Simon Glass | b75b15b | 2020-12-03 16:55:23 -0700 | [diff] [blame] | 213 | struct zynq_qspi_plat *plat = dev_get_plat(bus); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 214 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
T Karthik Reddy | b1f0d3d | 2020-02-04 05:47:45 -0700 | [diff] [blame] | 215 | struct clk clk; |
| 216 | unsigned long clock; |
| 217 | int ret; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 218 | |
| 219 | priv->regs = plat->regs; |
| 220 | priv->fifo_depth = ZYNQ_QSPI_FIFO_DEPTH; |
| 221 | |
T Karthik Reddy | b1f0d3d | 2020-02-04 05:47:45 -0700 | [diff] [blame] | 222 | ret = clk_get_by_name(bus, "ref_clk", &clk); |
| 223 | if (ret < 0) { |
| 224 | dev_err(bus, "failed to get clock\n"); |
| 225 | return ret; |
| 226 | } |
| 227 | |
| 228 | clock = clk_get_rate(&clk); |
| 229 | if (IS_ERR_VALUE(clock)) { |
| 230 | dev_err(bus, "failed to get rate\n"); |
| 231 | return clock; |
| 232 | } |
| 233 | |
| 234 | ret = clk_enable(&clk); |
Michal Simek | 4171095 | 2021-02-09 15:28:15 +0100 | [diff] [blame] | 235 | if (ret) { |
T Karthik Reddy | b1f0d3d | 2020-02-04 05:47:45 -0700 | [diff] [blame] | 236 | dev_err(bus, "failed to enable clock\n"); |
| 237 | return ret; |
| 238 | } |
| 239 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 240 | /* init the zynq spi hw */ |
| 241 | zynq_qspi_init_hw(priv); |
| 242 | |
T Karthik Reddy | b1f0d3d | 2020-02-04 05:47:45 -0700 | [diff] [blame] | 243 | plat->frequency = clock; |
| 244 | plat->speed_hz = plat->frequency / 2; |
| 245 | |
| 246 | debug("%s: max-frequency=%d\n", __func__, plat->speed_hz); |
| 247 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 248 | return 0; |
| 249 | } |
| 250 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 251 | /** |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 252 | * zynq_qspi_read_data - Copy data to RX buffer |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 253 | * @priv: Pointer to the zynq_qspi_priv structure |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 254 | * @data: The 32 bit variable where data is stored |
| 255 | * @size: Number of bytes to be copied from data to RX buffer |
| 256 | */ |
| 257 | static void zynq_qspi_read_data(struct zynq_qspi_priv *priv, u32 data, u8 size) |
| 258 | { |
| 259 | u8 byte3; |
| 260 | |
| 261 | debug("%s: data 0x%04x rx_buf addr: 0x%08x size %d\n", __func__ , |
| 262 | data, (unsigned)(priv->rx_buf), size); |
| 263 | |
| 264 | if (priv->rx_buf) { |
| 265 | switch (size) { |
| 266 | case 1: |
| 267 | *((u8 *)priv->rx_buf) = data; |
| 268 | priv->rx_buf += 1; |
| 269 | break; |
| 270 | case 2: |
Siva Durga Prasad Paladugu | 99844ac | 2022-01-30 22:22:37 -0700 | [diff] [blame] | 271 | *((u8 *)priv->rx_buf) = data; |
| 272 | priv->rx_buf += 1; |
| 273 | *((u8 *)priv->rx_buf) = (u8)(data >> 8); |
| 274 | priv->rx_buf += 1; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 275 | break; |
| 276 | case 3: |
Siva Durga Prasad Paladugu | 99844ac | 2022-01-30 22:22:37 -0700 | [diff] [blame] | 277 | *((u8 *)priv->rx_buf) = data; |
| 278 | priv->rx_buf += 1; |
| 279 | *((u8 *)priv->rx_buf) = (u8)(data >> 8); |
| 280 | priv->rx_buf += 1; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 281 | byte3 = (u8)(data >> 16); |
| 282 | *((u8 *)priv->rx_buf) = byte3; |
| 283 | priv->rx_buf += 1; |
| 284 | break; |
| 285 | case 4: |
| 286 | /* Can not assume word aligned buffer */ |
| 287 | memcpy(priv->rx_buf, &data, size); |
| 288 | priv->rx_buf += 4; |
| 289 | break; |
| 290 | default: |
| 291 | /* This will never execute */ |
| 292 | break; |
| 293 | } |
| 294 | } |
| 295 | priv->bytes_to_receive -= size; |
| 296 | if (priv->bytes_to_receive < 0) |
| 297 | priv->bytes_to_receive = 0; |
| 298 | } |
| 299 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 300 | /** |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 301 | * zynq_qspi_write_data - Copy data from TX buffer |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 302 | * @priv: Pointer to the zynq_qspi_priv structure |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 303 | * @data: Pointer to the 32 bit variable where data is to be copied |
| 304 | * @size: Number of bytes to be copied from TX buffer to data |
| 305 | */ |
| 306 | static void zynq_qspi_write_data(struct zynq_qspi_priv *priv, |
| 307 | u32 *data, u8 size) |
| 308 | { |
| 309 | if (priv->tx_buf) { |
| 310 | switch (size) { |
| 311 | case 1: |
| 312 | *data = *((u8 *)priv->tx_buf); |
| 313 | priv->tx_buf += 1; |
| 314 | *data |= 0xFFFFFF00; |
| 315 | break; |
| 316 | case 2: |
Siva Durga Prasad Paladugu | 5ff1e24 | 2022-01-30 22:22:38 -0700 | [diff] [blame] | 317 | *data = *((u8 *)priv->tx_buf); |
| 318 | priv->tx_buf += 1; |
| 319 | *data |= (*((u8 *)priv->tx_buf) << 8); |
| 320 | priv->tx_buf += 1; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 321 | *data |= 0xFFFF0000; |
| 322 | break; |
| 323 | case 3: |
Siva Durga Prasad Paladugu | 5ff1e24 | 2022-01-30 22:22:38 -0700 | [diff] [blame] | 324 | *data = *((u8 *)priv->tx_buf); |
| 325 | priv->tx_buf += 1; |
| 326 | *data |= (*((u8 *)priv->tx_buf) << 8); |
| 327 | priv->tx_buf += 1; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 328 | *data |= (*((u8 *)priv->tx_buf) << 16); |
| 329 | priv->tx_buf += 1; |
| 330 | *data |= 0xFF000000; |
| 331 | break; |
| 332 | case 4: |
| 333 | /* Can not assume word aligned buffer */ |
| 334 | memcpy(data, priv->tx_buf, size); |
| 335 | priv->tx_buf += 4; |
| 336 | break; |
| 337 | default: |
| 338 | /* This will never execute */ |
| 339 | break; |
| 340 | } |
| 341 | } else { |
| 342 | *data = 0; |
| 343 | } |
| 344 | |
| 345 | debug("%s: data 0x%08x tx_buf addr: 0x%08x size %d\n", __func__, |
| 346 | *data, (u32)priv->tx_buf, size); |
| 347 | |
| 348 | priv->bytes_to_transfer -= size; |
| 349 | if (priv->bytes_to_transfer < 0) |
| 350 | priv->bytes_to_transfer = 0; |
| 351 | } |
| 352 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 353 | /** |
| 354 | * zynq_qspi_chipselect - Select or deselect the chip select line |
| 355 | * @priv: Pointer to the zynq_qspi_priv structure |
| 356 | * @is_on: Select(1) or deselect (0) the chip select line |
| 357 | */ |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 358 | static void zynq_qspi_chipselect(struct zynq_qspi_priv *priv, int is_on) |
| 359 | { |
| 360 | u32 confr; |
| 361 | struct zynq_qspi_regs *regs = priv->regs; |
| 362 | |
| 363 | confr = readl(®s->cr); |
| 364 | |
| 365 | if (is_on) { |
| 366 | /* Select the slave */ |
| 367 | confr &= ~ZYNQ_QSPI_CR_SS_MASK; |
| 368 | confr |= (~(1 << priv->cs) << ZYNQ_QSPI_CR_SS_SHIFT) & |
| 369 | ZYNQ_QSPI_CR_SS_MASK; |
| 370 | } else |
| 371 | /* Deselect the slave */ |
| 372 | confr |= ZYNQ_QSPI_CR_SS_MASK; |
| 373 | |
| 374 | writel(confr, ®s->cr); |
| 375 | } |
| 376 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 377 | /** |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 378 | * zynq_qspi_fill_tx_fifo - Fills the TX FIFO with as many bytes as possible |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 379 | * @priv: Pointer to the zynq_qspi_priv structure |
| 380 | * @size: Number of bytes to be copied to fifo |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 381 | */ |
| 382 | static void zynq_qspi_fill_tx_fifo(struct zynq_qspi_priv *priv, u32 size) |
| 383 | { |
| 384 | u32 data = 0; |
| 385 | u32 fifocount = 0; |
| 386 | unsigned len, offset; |
| 387 | struct zynq_qspi_regs *regs = priv->regs; |
| 388 | static const unsigned offsets[4] = { |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 389 | ZYNQ_QSPI_TXD_00_01_OFFSET, ZYNQ_QSPI_TXD_00_10_OFFSET, |
| 390 | ZYNQ_QSPI_TXD_00_11_OFFSET, ZYNQ_QSPI_TXD_00_00_OFFSET }; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 391 | |
| 392 | while ((fifocount < size) && |
| 393 | (priv->bytes_to_transfer > 0)) { |
| 394 | if (priv->bytes_to_transfer >= 4) { |
| 395 | if (priv->tx_buf) { |
| 396 | memcpy(&data, priv->tx_buf, 4); |
| 397 | priv->tx_buf += 4; |
| 398 | } else { |
| 399 | data = 0; |
| 400 | } |
| 401 | writel(data, ®s->txd0r); |
| 402 | priv->bytes_to_transfer -= 4; |
| 403 | fifocount++; |
| 404 | } else { |
| 405 | /* Write TXD1, TXD2, TXD3 only if TxFIFO is empty. */ |
| 406 | if (!(readl(®s->isr) |
| 407 | & ZYNQ_QSPI_IXR_TXOW_MASK) && |
| 408 | !priv->rx_buf) |
| 409 | return; |
| 410 | len = priv->bytes_to_transfer; |
| 411 | zynq_qspi_write_data(priv, &data, len); |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 412 | if ((priv->is_parallel || priv->is_stacked) && |
| 413 | !priv->is_inst && (len % 2)) |
| 414 | len++; |
| 415 | offset = (priv->rx_buf) ? |
| 416 | offsets[3] : offsets[len - 1]; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 417 | writel(data, ®s->cr + (offset / 4)); |
| 418 | } |
| 419 | } |
| 420 | } |
| 421 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 422 | /** |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 423 | * zynq_qspi_irq_poll - Interrupt service routine of the QSPI controller |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 424 | * @priv: Pointer to the zynq_qspi structure |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 425 | * |
| 426 | * This function handles TX empty and Mode Fault interrupts only. |
| 427 | * On TX empty interrupt this function reads the received data from RX FIFO and |
| 428 | * fills the TX FIFO if there is any data remaining to be transferred. |
| 429 | * On Mode Fault interrupt this function indicates that transfer is completed, |
| 430 | * the SPI subsystem will identify the error as the remaining bytes to be |
| 431 | * transferred is non-zero. |
| 432 | * |
| 433 | * returns: 0 for poll timeout |
| 434 | * 1 transfer operation complete |
| 435 | */ |
| 436 | static int zynq_qspi_irq_poll(struct zynq_qspi_priv *priv) |
| 437 | { |
| 438 | struct zynq_qspi_regs *regs = priv->regs; |
| 439 | u32 rxindex = 0; |
| 440 | u32 rxcount; |
| 441 | u32 status, timeout; |
| 442 | |
| 443 | /* Poll until any of the interrupt status bits are set */ |
| 444 | timeout = get_timer(0); |
| 445 | do { |
| 446 | status = readl(®s->isr); |
| 447 | } while ((status == 0) && |
Ashok Reddy Soma | caecfe6 | 2020-05-18 01:11:00 -0600 | [diff] [blame] | 448 | (get_timer(timeout) < ZYNQ_QSPI_WAIT)); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 449 | |
| 450 | if (status == 0) { |
| 451 | printf("zynq_qspi_irq_poll: Timeout!\n"); |
| 452 | return -ETIMEDOUT; |
| 453 | } |
| 454 | |
| 455 | writel(status, ®s->isr); |
| 456 | |
| 457 | /* Disable all interrupts */ |
| 458 | writel(ZYNQ_QSPI_IXR_ALL_MASK, ®s->idr); |
| 459 | if ((status & ZYNQ_QSPI_IXR_TXOW_MASK) || |
| 460 | (status & ZYNQ_QSPI_IXR_RXNEMPTY_MASK)) { |
| 461 | /* |
| 462 | * This bit is set when Tx FIFO has < THRESHOLD entries. We have |
| 463 | * the THRESHOLD value set to 1, so this bit indicates Tx FIFO |
| 464 | * is empty |
| 465 | */ |
| 466 | rxcount = priv->bytes_to_receive - priv->bytes_to_transfer; |
| 467 | rxcount = (rxcount % 4) ? ((rxcount/4)+1) : (rxcount/4); |
| 468 | while ((rxindex < rxcount) && |
| 469 | (rxindex < ZYNQ_QSPI_RXFIFO_THRESHOLD)) { |
| 470 | /* Read out the data from the RX FIFO */ |
| 471 | u32 data; |
| 472 | data = readl(®s->drxr); |
| 473 | |
| 474 | if (priv->bytes_to_receive >= 4) { |
| 475 | if (priv->rx_buf) { |
| 476 | memcpy(priv->rx_buf, &data, 4); |
| 477 | priv->rx_buf += 4; |
| 478 | } |
| 479 | priv->bytes_to_receive -= 4; |
| 480 | } else { |
| 481 | zynq_qspi_read_data(priv, data, |
| 482 | priv->bytes_to_receive); |
| 483 | } |
| 484 | rxindex++; |
| 485 | } |
| 486 | |
| 487 | if (priv->bytes_to_transfer) { |
| 488 | /* There is more data to send */ |
| 489 | zynq_qspi_fill_tx_fifo(priv, |
| 490 | ZYNQ_QSPI_RXFIFO_THRESHOLD); |
| 491 | |
| 492 | writel(ZYNQ_QSPI_IXR_ALL_MASK, ®s->ier); |
| 493 | } else { |
| 494 | /* |
| 495 | * If transfer and receive is completed then only send |
| 496 | * complete signal |
| 497 | */ |
| 498 | if (!priv->bytes_to_receive) { |
| 499 | /* return operation complete */ |
| 500 | writel(ZYNQ_QSPI_IXR_ALL_MASK, |
| 501 | ®s->idr); |
| 502 | return 1; |
| 503 | } |
| 504 | } |
| 505 | } |
| 506 | |
| 507 | return 0; |
| 508 | } |
| 509 | |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 510 | /** |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 511 | * zynq_qspi_start_transfer - Initiates the QSPI transfer |
Ashok Reddy Soma | 27e770a | 2020-01-28 07:39:04 -0700 | [diff] [blame] | 512 | * @priv: Pointer to the zynq_qspi_priv structure |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 513 | * |
| 514 | * This function fills the TX FIFO, starts the QSPI transfer, and waits for the |
| 515 | * transfer to be completed. |
| 516 | * |
| 517 | * returns: Number of bytes transferred in the last transfer |
| 518 | */ |
| 519 | static int zynq_qspi_start_transfer(struct zynq_qspi_priv *priv) |
| 520 | { |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 521 | static u8 current_u_page; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 522 | u32 data = 0; |
| 523 | struct zynq_qspi_regs *regs = priv->regs; |
| 524 | |
| 525 | debug("%s: qspi: 0x%08x transfer: 0x%08x len: %d\n", __func__, |
| 526 | (u32)priv, (u32)priv, priv->len); |
| 527 | |
| 528 | priv->bytes_to_transfer = priv->len; |
| 529 | priv->bytes_to_receive = priv->len; |
| 530 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 531 | if (priv->is_parallel) |
| 532 | writel((ZYNQ_QSPI_LCFG_TWO_MEM_MASK | |
| 533 | ZYNQ_QSPI_LCFG_SEP_BUS_MASK | |
| 534 | (1 << ZYNQ_QSPI_LCFG_DUMMY_SHIFT) | |
| 535 | ZYNQ_QSPI_FR_QOUT_CODE), ®s->lqspicfg); |
| 536 | |
| 537 | if (priv->is_inst && priv->is_stacked && current_u_page != priv->u_page) { |
| 538 | if (priv->u_page) { |
| 539 | /* Configure two memories on shared bus |
| 540 | * by enabling upper mem |
| 541 | */ |
| 542 | writel((ZYNQ_QSPI_LCFG_TWO_MEM_MASK | |
| 543 | ZYNQ_QSPI_LCFG_U_PAGE | |
| 544 | (1 << ZYNQ_QSPI_LCFG_DUMMY_SHIFT) | |
| 545 | ZYNQ_QSPI_FR_QOUT_CODE), |
| 546 | ®s->lqspicfg); |
| 547 | } else { |
| 548 | /* Configure two memories on shared bus |
| 549 | * by enabling lower mem |
| 550 | */ |
| 551 | writel((ZYNQ_QSPI_LCFG_TWO_MEM_MASK | |
| 552 | (1 << ZYNQ_QSPI_LCFG_DUMMY_SHIFT) | |
| 553 | ZYNQ_QSPI_FR_QOUT_CODE), |
| 554 | ®s->lqspicfg); |
| 555 | } |
| 556 | current_u_page = priv->u_page; |
| 557 | } |
| 558 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 559 | if (priv->len < 4) |
| 560 | zynq_qspi_fill_tx_fifo(priv, priv->len); |
| 561 | else |
| 562 | zynq_qspi_fill_tx_fifo(priv, priv->fifo_depth); |
| 563 | |
| 564 | writel(ZYNQ_QSPI_IXR_ALL_MASK, ®s->ier); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 565 | |
| 566 | /* wait for completion */ |
| 567 | do { |
| 568 | data = zynq_qspi_irq_poll(priv); |
| 569 | } while (data == 0); |
| 570 | |
| 571 | return (priv->len) - (priv->bytes_to_transfer); |
| 572 | } |
| 573 | |
| 574 | static int zynq_qspi_transfer(struct zynq_qspi_priv *priv) |
| 575 | { |
| 576 | unsigned cs_change = 1; |
| 577 | int status = 0; |
| 578 | |
| 579 | while (1) { |
| 580 | /* Select the chip if required */ |
| 581 | if (cs_change) |
| 582 | zynq_qspi_chipselect(priv, 1); |
| 583 | |
| 584 | cs_change = priv->cs_change; |
| 585 | |
| 586 | if (!priv->tx_buf && !priv->rx_buf && priv->len) { |
| 587 | status = -1; |
| 588 | break; |
| 589 | } |
| 590 | |
| 591 | /* Request the transfer */ |
| 592 | if (priv->len) { |
| 593 | status = zynq_qspi_start_transfer(priv); |
| 594 | priv->is_inst = 0; |
| 595 | } |
| 596 | |
| 597 | if (status != priv->len) { |
| 598 | if (status > 0) |
| 599 | status = -EMSGSIZE; |
| 600 | debug("zynq_qspi_transfer:%d len:%d\n", |
| 601 | status, priv->len); |
| 602 | break; |
| 603 | } |
| 604 | status = 0; |
| 605 | |
| 606 | if (cs_change) |
| 607 | /* Deselect the chip */ |
| 608 | zynq_qspi_chipselect(priv, 0); |
| 609 | |
| 610 | break; |
| 611 | } |
| 612 | |
Vipul Kumar | 72cf0c9 | 2018-06-25 14:13:57 +0530 | [diff] [blame] | 613 | return status; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 614 | } |
| 615 | |
| 616 | static int zynq_qspi_claim_bus(struct udevice *dev) |
| 617 | { |
| 618 | struct udevice *bus = dev->parent; |
| 619 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
| 620 | struct zynq_qspi_regs *regs = priv->regs; |
| 621 | |
| 622 | writel(ZYNQ_QSPI_ENR_SPI_EN_MASK, ®s->enr); |
| 623 | |
| 624 | return 0; |
| 625 | } |
| 626 | |
| 627 | static int zynq_qspi_release_bus(struct udevice *dev) |
| 628 | { |
| 629 | struct udevice *bus = dev->parent; |
| 630 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
| 631 | struct zynq_qspi_regs *regs = priv->regs; |
| 632 | |
| 633 | writel(~ZYNQ_QSPI_ENR_SPI_EN_MASK, ®s->enr); |
| 634 | |
| 635 | return 0; |
| 636 | } |
| 637 | |
| 638 | static int zynq_qspi_xfer(struct udevice *dev, unsigned int bitlen, |
| 639 | const void *dout, void *din, unsigned long flags) |
| 640 | { |
| 641 | struct udevice *bus = dev->parent; |
| 642 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
Simon Glass | b75b15b | 2020-12-03 16:55:23 -0700 | [diff] [blame] | 643 | struct dm_spi_slave_plat *slave_plat = dev_get_parent_plat(dev); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 644 | |
Venkatesh Yadav Abbarapu | 91b9e37 | 2024-09-26 10:25:05 +0530 | [diff] [blame] | 645 | priv->cs = slave_plat->cs[0]; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 646 | priv->tx_buf = dout; |
| 647 | priv->rx_buf = din; |
| 648 | priv->len = bitlen / 8; |
| 649 | |
Venkatesh Yadav Abbarapu | 91b9e37 | 2024-09-26 10:25:05 +0530 | [diff] [blame] | 650 | debug("zynq_qspi_xfer: bus:%i cs[0]:%i bitlen:%i len:%i flags:%lx\n", |
| 651 | dev_seq(bus), slave_plat->cs[0], bitlen, priv->len, flags); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 652 | |
| 653 | /* |
| 654 | * Festering sore. |
| 655 | * Assume that the beginning of a transfer with bits to |
| 656 | * transmit must contain a device command. |
| 657 | */ |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 658 | if ((dout && flags & SPI_XFER_BEGIN) || |
| 659 | (flags & SPI_XFER_END && !priv->is_strip)) |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 660 | priv->is_inst = 1; |
| 661 | else |
| 662 | priv->is_inst = 0; |
| 663 | |
| 664 | if (flags & SPI_XFER_END) |
| 665 | priv->cs_change = 1; |
| 666 | else |
| 667 | priv->cs_change = 0; |
| 668 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 669 | if (flags & SPI_XFER_U_PAGE) |
| 670 | priv->u_page = 1; |
| 671 | else |
| 672 | priv->u_page = 0; |
| 673 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 674 | zynq_qspi_transfer(priv); |
| 675 | |
| 676 | return 0; |
| 677 | } |
| 678 | |
| 679 | static int zynq_qspi_set_speed(struct udevice *bus, uint speed) |
| 680 | { |
Simon Glass | 9558862 | 2020-12-22 19:30:28 -0700 | [diff] [blame] | 681 | struct zynq_qspi_plat *plat = dev_get_plat(bus); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 682 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
| 683 | struct zynq_qspi_regs *regs = priv->regs; |
| 684 | uint32_t confr; |
| 685 | u8 baud_rate_val = 0; |
| 686 | |
Ashok Reddy Soma | 2410974 | 2022-07-15 19:31:19 +0530 | [diff] [blame] | 687 | if (!speed || speed > priv->max_hz) |
| 688 | speed = priv->max_hz; |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 689 | |
| 690 | /* Set the clock frequency */ |
| 691 | confr = readl(®s->cr); |
Ashok Reddy Soma | 2410974 | 2022-07-15 19:31:19 +0530 | [diff] [blame] | 692 | if (plat->speed_hz != speed) { |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 693 | while ((baud_rate_val < ZYNQ_QSPI_CR_BAUD_MAX) && |
| 694 | ((plat->frequency / |
| 695 | (2 << baud_rate_val)) > speed)) |
| 696 | baud_rate_val++; |
| 697 | |
Siva Durga Prasad Paladugu | 4495fd6 | 2022-01-30 22:22:39 -0700 | [diff] [blame] | 698 | if (baud_rate_val > ZYNQ_QSPI_MAX_BAUD_RATE) |
| 699 | baud_rate_val = ZYNQ_QSPI_DEFAULT_BAUD_RATE; |
| 700 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 701 | plat->speed_hz = speed / (2 << baud_rate_val); |
| 702 | } |
| 703 | confr &= ~ZYNQ_QSPI_CR_BAUD_MASK; |
| 704 | confr |= (baud_rate_val << ZYNQ_QSPI_CR_BAUD_SHIFT); |
| 705 | |
| 706 | writel(confr, ®s->cr); |
| 707 | priv->freq = speed; |
| 708 | |
Jagan Teki | cc7ae3d | 2015-10-25 09:31:54 +0530 | [diff] [blame] | 709 | debug("%s: regs=%p, speed=%d\n", __func__, priv->regs, priv->freq); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 710 | |
| 711 | return 0; |
| 712 | } |
| 713 | |
| 714 | static int zynq_qspi_set_mode(struct udevice *bus, uint mode) |
| 715 | { |
| 716 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
| 717 | struct zynq_qspi_regs *regs = priv->regs; |
| 718 | uint32_t confr; |
| 719 | |
| 720 | /* Set the SPI Clock phase and polarities */ |
| 721 | confr = readl(®s->cr); |
| 722 | confr &= ~(ZYNQ_QSPI_CR_CPHA_MASK | ZYNQ_QSPI_CR_CPOL_MASK); |
| 723 | |
Jagan Teki | c27f1c1 | 2015-09-08 01:39:44 +0530 | [diff] [blame] | 724 | if (mode & SPI_CPHA) |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 725 | confr |= ZYNQ_QSPI_CR_CPHA_MASK; |
Jagan Teki | c27f1c1 | 2015-09-08 01:39:44 +0530 | [diff] [blame] | 726 | if (mode & SPI_CPOL) |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 727 | confr |= ZYNQ_QSPI_CR_CPOL_MASK; |
| 728 | |
| 729 | writel(confr, ®s->cr); |
| 730 | priv->mode = mode; |
| 731 | |
Jagan Teki | cc7ae3d | 2015-10-25 09:31:54 +0530 | [diff] [blame] | 732 | debug("%s: regs=%p, mode=%d\n", __func__, priv->regs, priv->mode); |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 733 | |
| 734 | return 0; |
| 735 | } |
| 736 | |
Venkatesh Yadav Abbarapu | 9e11b66 | 2024-10-16 10:14:02 +0530 | [diff] [blame] | 737 | static bool update_stripe(const struct spi_mem_op *op) |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 738 | { |
| 739 | if (op->cmd.opcode == SPINOR_OP_BE_4K || |
| 740 | op->cmd.opcode == SPINOR_OP_CHIP_ERASE || |
| 741 | op->cmd.opcode == SPINOR_OP_SE || |
| 742 | op->cmd.opcode == SPINOR_OP_WREAR || |
| 743 | op->cmd.opcode == SPINOR_OP_WRSR |
| 744 | ) |
| 745 | return false; |
| 746 | |
| 747 | return true; |
| 748 | } |
| 749 | |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 750 | static int zynq_qspi_exec_op(struct spi_slave *slave, |
| 751 | const struct spi_mem_op *op) |
| 752 | { |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 753 | struct udevice *bus = slave->dev->parent; |
| 754 | struct zynq_qspi_priv *priv = dev_get_priv(bus); |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 755 | int op_len, pos = 0, ret, i; |
| 756 | unsigned int flag = 0; |
| 757 | const u8 *tx_buf = NULL; |
| 758 | u8 *rx_buf = NULL; |
| 759 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 760 | if ((slave->flags & QSPI_SELECT_LOWER_CS) && |
| 761 | (slave->flags & QSPI_SELECT_UPPER_CS)) |
| 762 | priv->is_parallel = true; |
| 763 | if (slave->flags & SPI_XFER_STACKED) |
| 764 | priv->is_stacked = true; |
| 765 | |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 766 | if (op->data.nbytes) { |
| 767 | if (op->data.dir == SPI_MEM_DATA_IN) |
| 768 | rx_buf = op->data.buf.in; |
| 769 | else |
| 770 | tx_buf = op->data.buf.out; |
| 771 | } |
| 772 | |
| 773 | op_len = op->cmd.nbytes + op->addr.nbytes + op->dummy.nbytes; |
| 774 | |
| 775 | u8 op_buf[op_len]; |
| 776 | |
| 777 | op_buf[pos++] = op->cmd.opcode; |
| 778 | |
| 779 | if (op->addr.nbytes) { |
| 780 | for (i = 0; i < op->addr.nbytes; i++) |
| 781 | op_buf[pos + i] = op->addr.val >> |
| 782 | (8 * (op->addr.nbytes - i - 1)); |
| 783 | |
| 784 | pos += op->addr.nbytes; |
| 785 | } |
| 786 | |
Stefan Herbrechtsmeier | 139ec86 | 2023-04-27 08:53:54 +0200 | [diff] [blame] | 787 | if (op->dummy.nbytes) |
| 788 | memset(op_buf + pos, 0xff, op->dummy.nbytes); |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 789 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 790 | if (slave->flags & SPI_XFER_U_PAGE) |
| 791 | flag |= SPI_XFER_U_PAGE; |
| 792 | |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 793 | /* 1st transfer: opcode + address + dummy cycles */ |
| 794 | /* Make sure to set END bit if no tx or rx data messages follow */ |
| 795 | if (!tx_buf && !rx_buf) |
| 796 | flag |= SPI_XFER_END; |
| 797 | |
| 798 | ret = zynq_qspi_xfer(slave->dev, op_len * 8, op_buf, NULL, |
| 799 | flag | SPI_XFER_BEGIN); |
| 800 | if (ret) |
| 801 | return ret; |
| 802 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 803 | if (priv->is_parallel) |
| 804 | priv->is_strip = update_stripe(op); |
| 805 | |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 806 | /* 2nd transfer: rx or tx data path */ |
| 807 | if (tx_buf || rx_buf) { |
| 808 | ret = zynq_qspi_xfer(slave->dev, op->data.nbytes * 8, tx_buf, |
| 809 | rx_buf, flag | SPI_XFER_END); |
| 810 | if (ret) |
| 811 | return ret; |
| 812 | } |
| 813 | |
Venkatesh Yadav Abbarapu | b74a102 | 2024-09-26 10:25:07 +0530 | [diff] [blame] | 814 | priv->is_parallel = false; |
| 815 | priv->is_stacked = false; |
Marek Vasut | a2f7500 | 2024-11-03 00:57:31 +0100 | [diff] [blame] | 816 | slave->flags &= ~SPI_XFER_LOWER; |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 817 | spi_release_bus(slave); |
| 818 | |
| 819 | return 0; |
| 820 | } |
| 821 | |
Ashok Reddy Soma | 525a8e4 | 2022-07-15 19:31:18 +0530 | [diff] [blame] | 822 | static int zynq_qspi_check_buswidth(struct spi_slave *slave, u8 width) |
| 823 | { |
| 824 | u32 mode = slave->mode; |
| 825 | |
| 826 | switch (width) { |
| 827 | case 1: |
| 828 | return 0; |
| 829 | case 2: |
| 830 | if (mode & SPI_RX_DUAL) |
| 831 | return 0; |
| 832 | break; |
| 833 | case 4: |
| 834 | if (mode & SPI_RX_QUAD) |
| 835 | return 0; |
| 836 | break; |
| 837 | } |
| 838 | |
| 839 | return -EOPNOTSUPP; |
| 840 | } |
| 841 | |
Algapally Santosh Sagar | dbf9135 | 2023-06-14 03:03:56 -0600 | [diff] [blame] | 842 | static bool zynq_qspi_mem_exec_op(struct spi_slave *slave, |
| 843 | const struct spi_mem_op *op) |
Ashok Reddy Soma | 525a8e4 | 2022-07-15 19:31:18 +0530 | [diff] [blame] | 844 | { |
| 845 | if (zynq_qspi_check_buswidth(slave, op->cmd.buswidth)) |
| 846 | return false; |
| 847 | |
| 848 | if (op->addr.nbytes && |
| 849 | zynq_qspi_check_buswidth(slave, op->addr.buswidth)) |
| 850 | return false; |
| 851 | |
| 852 | if (op->dummy.nbytes && |
| 853 | zynq_qspi_check_buswidth(slave, op->dummy.buswidth)) |
| 854 | return false; |
| 855 | |
| 856 | if (op->data.dir != SPI_MEM_NO_DATA && |
| 857 | zynq_qspi_check_buswidth(slave, op->data.buswidth)) |
| 858 | return false; |
| 859 | |
| 860 | return true; |
| 861 | } |
| 862 | |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 863 | static const struct spi_controller_mem_ops zynq_qspi_mem_ops = { |
| 864 | .exec_op = zynq_qspi_exec_op, |
Ashok Reddy Soma | 525a8e4 | 2022-07-15 19:31:18 +0530 | [diff] [blame] | 865 | .supports_op = zynq_qspi_mem_exec_op, |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 866 | }; |
| 867 | |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 868 | static const struct dm_spi_ops zynq_qspi_ops = { |
| 869 | .claim_bus = zynq_qspi_claim_bus, |
| 870 | .release_bus = zynq_qspi_release_bus, |
| 871 | .xfer = zynq_qspi_xfer, |
| 872 | .set_speed = zynq_qspi_set_speed, |
| 873 | .set_mode = zynq_qspi_set_mode, |
T Karthik Reddy | 7daf856 | 2022-01-30 22:22:40 -0700 | [diff] [blame] | 874 | .mem_ops = &zynq_qspi_mem_ops, |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 875 | }; |
| 876 | |
| 877 | static const struct udevice_id zynq_qspi_ids[] = { |
| 878 | { .compatible = "xlnx,zynq-qspi-1.0" }, |
| 879 | { } |
| 880 | }; |
| 881 | |
| 882 | U_BOOT_DRIVER(zynq_qspi) = { |
| 883 | .name = "zynq_qspi", |
| 884 | .id = UCLASS_SPI, |
| 885 | .of_match = zynq_qspi_ids, |
| 886 | .ops = &zynq_qspi_ops, |
Simon Glass | aad29ae | 2020-12-03 16:55:21 -0700 | [diff] [blame] | 887 | .of_to_plat = zynq_qspi_of_to_plat, |
Simon Glass | b75b15b | 2020-12-03 16:55:23 -0700 | [diff] [blame] | 888 | .plat_auto = sizeof(struct zynq_qspi_plat), |
Simon Glass | 8a2b47f | 2020-12-03 16:55:17 -0700 | [diff] [blame] | 889 | .priv_auto = sizeof(struct zynq_qspi_priv), |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 890 | .probe = zynq_qspi_probe, |
Siva Durga Prasad Paladugu | 4ebe3a0 | 2022-07-15 19:31:16 +0530 | [diff] [blame] | 891 | .child_pre_probe = zynq_qspi_child_pre_probe, |
Jagan Teki | 7fbea8a | 2015-08-17 18:38:06 +0530 | [diff] [blame] | 892 | }; |