Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Designware master SPI core controller driver |
| 4 | * |
| 5 | * Copyright (C) 2014 Stefan Roese <sr@denx.de> |
| 6 | * |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 7 | * Very loosely based on the Linux driver: |
| 8 | * drivers/spi/spi-dw.c, which is: |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 9 | * Copyright (c) 2009, Intel Corporation. |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 10 | */ |
| 11 | |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 12 | #define LOG_CATEGORY UCLASS_SPI |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 13 | #include <common.h> |
Simon Glass | 0f2af88 | 2020-05-10 11:40:05 -0600 | [diff] [blame] | 14 | #include <log.h> |
Horatiu.Vultur@microchip.com | 340e5b3 | 2019-02-25 10:59:54 +0000 | [diff] [blame] | 15 | #include <asm-generic/gpio.h> |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 16 | #include <clk.h> |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 17 | #include <dm.h> |
| 18 | #include <errno.h> |
| 19 | #include <malloc.h> |
| 20 | #include <spi.h> |
| 21 | #include <fdtdec.h> |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 22 | #include <reset.h> |
Simon Glass | 9bc1564 | 2020-02-03 07:36:16 -0700 | [diff] [blame] | 23 | #include <dm/device_compat.h> |
Simon Glass | 4dcacfc | 2020-05-10 11:40:13 -0600 | [diff] [blame] | 24 | #include <linux/bitops.h> |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 25 | #include <linux/compat.h> |
Eugeniy Paltsev | 7215ad2 | 2018-03-22 13:50:43 +0300 | [diff] [blame] | 26 | #include <linux/iopoll.h> |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 27 | #include <asm/io.h> |
| 28 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 29 | /* Register offsets */ |
| 30 | #define DW_SPI_CTRL0 0x00 |
| 31 | #define DW_SPI_CTRL1 0x04 |
| 32 | #define DW_SPI_SSIENR 0x08 |
| 33 | #define DW_SPI_MWCR 0x0c |
| 34 | #define DW_SPI_SER 0x10 |
| 35 | #define DW_SPI_BAUDR 0x14 |
| 36 | #define DW_SPI_TXFLTR 0x18 |
| 37 | #define DW_SPI_RXFLTR 0x1c |
| 38 | #define DW_SPI_TXFLR 0x20 |
| 39 | #define DW_SPI_RXFLR 0x24 |
| 40 | #define DW_SPI_SR 0x28 |
| 41 | #define DW_SPI_IMR 0x2c |
| 42 | #define DW_SPI_ISR 0x30 |
| 43 | #define DW_SPI_RISR 0x34 |
| 44 | #define DW_SPI_TXOICR 0x38 |
| 45 | #define DW_SPI_RXOICR 0x3c |
| 46 | #define DW_SPI_RXUICR 0x40 |
| 47 | #define DW_SPI_MSTICR 0x44 |
| 48 | #define DW_SPI_ICR 0x48 |
| 49 | #define DW_SPI_DMACR 0x4c |
| 50 | #define DW_SPI_DMATDLR 0x50 |
| 51 | #define DW_SPI_DMARDLR 0x54 |
| 52 | #define DW_SPI_IDR 0x58 |
| 53 | #define DW_SPI_VERSION 0x5c |
| 54 | #define DW_SPI_DR 0x60 |
| 55 | |
| 56 | /* Bit fields in CTRLR0 */ |
| 57 | #define SPI_DFS_OFFSET 0 |
| 58 | |
| 59 | #define SPI_FRF_OFFSET 4 |
| 60 | #define SPI_FRF_SPI 0x0 |
| 61 | #define SPI_FRF_SSP 0x1 |
| 62 | #define SPI_FRF_MICROWIRE 0x2 |
| 63 | #define SPI_FRF_RESV 0x3 |
| 64 | |
| 65 | #define SPI_MODE_OFFSET 6 |
| 66 | #define SPI_SCPH_OFFSET 6 |
| 67 | #define SPI_SCOL_OFFSET 7 |
| 68 | |
| 69 | #define SPI_TMOD_OFFSET 8 |
| 70 | #define SPI_TMOD_MASK (0x3 << SPI_TMOD_OFFSET) |
| 71 | #define SPI_TMOD_TR 0x0 /* xmit & recv */ |
| 72 | #define SPI_TMOD_TO 0x1 /* xmit only */ |
| 73 | #define SPI_TMOD_RO 0x2 /* recv only */ |
| 74 | #define SPI_TMOD_EPROMREAD 0x3 /* eeprom read mode */ |
| 75 | |
| 76 | #define SPI_SLVOE_OFFSET 10 |
| 77 | #define SPI_SRL_OFFSET 11 |
| 78 | #define SPI_CFS_OFFSET 12 |
| 79 | |
| 80 | /* Bit fields in SR, 7 bits */ |
Jagan Teki | fac4491 | 2015-10-23 01:01:36 +0530 | [diff] [blame] | 81 | #define SR_MASK GENMASK(6, 0) /* cover 7 bits */ |
Jagan Teki | b17746d | 2015-10-23 01:36:23 +0530 | [diff] [blame] | 82 | #define SR_BUSY BIT(0) |
| 83 | #define SR_TF_NOT_FULL BIT(1) |
| 84 | #define SR_TF_EMPT BIT(2) |
| 85 | #define SR_RF_NOT_EMPT BIT(3) |
| 86 | #define SR_RF_FULL BIT(4) |
| 87 | #define SR_TX_ERR BIT(5) |
| 88 | #define SR_DCOL BIT(6) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 89 | |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 90 | #define RX_TIMEOUT 1000 /* timeout in ms */ |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 91 | |
| 92 | struct dw_spi_platdata { |
| 93 | s32 frequency; /* Default clock frequency, -1 for none */ |
| 94 | void __iomem *regs; |
| 95 | }; |
| 96 | |
| 97 | struct dw_spi_priv { |
| 98 | void __iomem *regs; |
| 99 | unsigned int freq; /* Default frequency */ |
| 100 | unsigned int mode; |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 101 | struct clk clk; |
| 102 | unsigned long bus_clk_rate; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 103 | |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 104 | struct gpio_desc cs_gpio; /* External chip-select gpio */ |
| 105 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 106 | int bits_per_word; |
| 107 | u8 cs; /* chip select pin */ |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 108 | u8 tmode; /* TR/TO/RO/EEPROM */ |
| 109 | u8 type; /* SPI/SSP/MicroWire */ |
| 110 | int len; |
| 111 | |
| 112 | u32 fifo_len; /* depth of the FIFO buffer */ |
| 113 | void *tx; |
| 114 | void *tx_end; |
| 115 | void *rx; |
| 116 | void *rx_end; |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 117 | |
| 118 | struct reset_ctl_bulk resets; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 119 | }; |
| 120 | |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 121 | static inline u32 dw_read(struct dw_spi_priv *priv, u32 offset) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 122 | { |
| 123 | return __raw_readl(priv->regs + offset); |
| 124 | } |
| 125 | |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 126 | static inline void dw_write(struct dw_spi_priv *priv, u32 offset, u32 val) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 127 | { |
| 128 | __raw_writel(val, priv->regs + offset); |
| 129 | } |
| 130 | |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 131 | static int request_gpio_cs(struct udevice *bus) |
| 132 | { |
Simon Glass | fa4689a | 2019-12-06 21:41:35 -0700 | [diff] [blame] | 133 | #if CONFIG_IS_ENABLED(DM_GPIO) && !defined(CONFIG_SPL_BUILD) |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 134 | struct dw_spi_priv *priv = dev_get_priv(bus); |
| 135 | int ret; |
| 136 | |
| 137 | /* External chip select gpio line is optional */ |
Sean Anderson | 17f69fb | 2020-10-16 18:57:45 -0400 | [diff] [blame] | 138 | ret = gpio_request_by_name(bus, "cs-gpios", 0, &priv->cs_gpio, |
| 139 | GPIOD_IS_OUT | GPIOD_IS_OUT_ACTIVE); |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 140 | if (ret == -ENOENT) |
| 141 | return 0; |
| 142 | |
| 143 | if (ret < 0) { |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 144 | dev_err(bus, "Couldn't request gpio! (error %d)\n", ret); |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 145 | return ret; |
| 146 | } |
| 147 | |
| 148 | if (dm_gpio_is_valid(&priv->cs_gpio)) { |
| 149 | dm_gpio_set_dir_flags(&priv->cs_gpio, |
| 150 | GPIOD_IS_OUT | GPIOD_IS_OUT_ACTIVE); |
| 151 | } |
| 152 | |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 153 | dev_dbg(bus, "Using external gpio for CS management\n"); |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 154 | #endif |
| 155 | return 0; |
| 156 | } |
| 157 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 158 | static int dw_spi_ofdata_to_platdata(struct udevice *bus) |
| 159 | { |
| 160 | struct dw_spi_platdata *plat = bus->platdata; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 161 | |
Masahiro Yamada | 1096ae1 | 2020-07-17 14:36:46 +0900 | [diff] [blame] | 162 | plat->regs = dev_read_addr_ptr(bus); |
Sean Anderson | 72097a7 | 2020-10-16 18:57:46 -0400 | [diff] [blame^] | 163 | if (!plat->regs) |
| 164 | return -EINVAL; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 165 | |
| 166 | /* Use 500KHz as a suitable default */ |
Simon Goldschmidt | 70cd31b | 2019-05-09 22:11:57 +0200 | [diff] [blame] | 167 | plat->frequency = dev_read_u32_default(bus, "spi-max-frequency", |
| 168 | 500000); |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 169 | dev_info(bus, "max-frequency=%d\n", plat->frequency); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 170 | |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 171 | return request_gpio_cs(bus); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 172 | } |
| 173 | |
| 174 | static inline void spi_enable_chip(struct dw_spi_priv *priv, int enable) |
| 175 | { |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 176 | dw_write(priv, DW_SPI_SSIENR, (enable ? 1 : 0)); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 177 | } |
| 178 | |
| 179 | /* Restart the controller, disable all interrupts, clean rx fifo */ |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 180 | static void spi_hw_init(struct udevice *bus, struct dw_spi_priv *priv) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 181 | { |
| 182 | spi_enable_chip(priv, 0); |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 183 | dw_write(priv, DW_SPI_IMR, 0xff); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 184 | spi_enable_chip(priv, 1); |
| 185 | |
| 186 | /* |
| 187 | * Try to detect the FIFO depth if not set by interface driver, |
| 188 | * the depth could be from 2 to 256 from HW spec |
| 189 | */ |
| 190 | if (!priv->fifo_len) { |
| 191 | u32 fifo; |
| 192 | |
Axel Lin | 83cfd37 | 2015-02-26 10:45:22 +0800 | [diff] [blame] | 193 | for (fifo = 1; fifo < 256; fifo++) { |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 194 | dw_write(priv, DW_SPI_TXFLTR, fifo); |
| 195 | if (fifo != dw_read(priv, DW_SPI_TXFLTR)) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 196 | break; |
| 197 | } |
| 198 | |
Axel Lin | 83cfd37 | 2015-02-26 10:45:22 +0800 | [diff] [blame] | 199 | priv->fifo_len = (fifo == 1) ? 0 : fifo; |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 200 | dw_write(priv, DW_SPI_TXFLTR, 0); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 201 | } |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 202 | dev_dbg(bus, "fifo_len=%d\n", priv->fifo_len); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 203 | } |
| 204 | |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 205 | /* |
| 206 | * We define dw_spi_get_clk function as 'weak' as some targets |
| 207 | * (like SOCFPGA_GEN5 and SOCFPGA_ARRIA10) don't use standard clock API |
| 208 | * and implement dw_spi_get_clk their own way in their clock manager. |
| 209 | */ |
| 210 | __weak int dw_spi_get_clk(struct udevice *bus, ulong *rate) |
| 211 | { |
| 212 | struct dw_spi_priv *priv = dev_get_priv(bus); |
| 213 | int ret; |
| 214 | |
| 215 | ret = clk_get_by_index(bus, 0, &priv->clk); |
| 216 | if (ret) |
| 217 | return ret; |
| 218 | |
| 219 | ret = clk_enable(&priv->clk); |
| 220 | if (ret && ret != -ENOSYS && ret != -ENOTSUPP) |
| 221 | return ret; |
| 222 | |
| 223 | *rate = clk_get_rate(&priv->clk); |
| 224 | if (!*rate) |
| 225 | goto err_rate; |
| 226 | |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 227 | dev_dbg(bus, "Got clock via device tree: %lu Hz\n", *rate); |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 228 | |
| 229 | return 0; |
| 230 | |
| 231 | err_rate: |
| 232 | clk_disable(&priv->clk); |
| 233 | clk_free(&priv->clk); |
| 234 | |
| 235 | return -EINVAL; |
| 236 | } |
| 237 | |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 238 | static int dw_spi_reset(struct udevice *bus) |
| 239 | { |
| 240 | int ret; |
| 241 | struct dw_spi_priv *priv = dev_get_priv(bus); |
| 242 | |
| 243 | ret = reset_get_bulk(bus, &priv->resets); |
| 244 | if (ret) { |
| 245 | /* |
| 246 | * Return 0 if error due to !CONFIG_DM_RESET and reset |
| 247 | * DT property is not present. |
| 248 | */ |
| 249 | if (ret == -ENOENT || ret == -ENOTSUPP) |
| 250 | return 0; |
| 251 | |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 252 | dev_warn(bus, "Couldn't find/assert reset device (error %d)\n", |
| 253 | ret); |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 254 | return ret; |
| 255 | } |
| 256 | |
| 257 | ret = reset_deassert_bulk(&priv->resets); |
| 258 | if (ret) { |
| 259 | reset_release_bulk(&priv->resets); |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 260 | dev_err(bus, "Failed to de-assert reset for SPI (error %d)\n", |
| 261 | ret); |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 262 | return ret; |
| 263 | } |
| 264 | |
| 265 | return 0; |
| 266 | } |
| 267 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 268 | static int dw_spi_probe(struct udevice *bus) |
| 269 | { |
| 270 | struct dw_spi_platdata *plat = dev_get_platdata(bus); |
| 271 | struct dw_spi_priv *priv = dev_get_priv(bus); |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 272 | int ret; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 273 | |
| 274 | priv->regs = plat->regs; |
| 275 | priv->freq = plat->frequency; |
| 276 | |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 277 | ret = dw_spi_get_clk(bus, &priv->bus_clk_rate); |
| 278 | if (ret) |
| 279 | return ret; |
| 280 | |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 281 | ret = dw_spi_reset(bus); |
| 282 | if (ret) |
| 283 | return ret; |
| 284 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 285 | /* Currently only bits_per_word == 8 supported */ |
| 286 | priv->bits_per_word = 8; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 287 | |
| 288 | priv->tmode = 0; /* Tx & Rx */ |
| 289 | |
| 290 | /* Basic HW init */ |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 291 | spi_hw_init(bus, priv); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 292 | |
| 293 | return 0; |
| 294 | } |
| 295 | |
| 296 | /* Return the max entries we can fill into tx fifo */ |
| 297 | static inline u32 tx_max(struct dw_spi_priv *priv) |
| 298 | { |
| 299 | u32 tx_left, tx_room, rxtx_gap; |
| 300 | |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 301 | tx_left = (priv->tx_end - priv->tx) / (priv->bits_per_word >> 3); |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 302 | tx_room = priv->fifo_len - dw_read(priv, DW_SPI_TXFLR); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 303 | |
| 304 | /* |
| 305 | * Another concern is about the tx/rx mismatch, we |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 306 | * thought about using (priv->fifo_len - rxflr - txflr) as |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 307 | * one maximum value for tx, but it doesn't cover the |
| 308 | * data which is out of tx/rx fifo and inside the |
| 309 | * shift registers. So a control from sw point of |
| 310 | * view is taken. |
| 311 | */ |
| 312 | rxtx_gap = ((priv->rx_end - priv->rx) - (priv->tx_end - priv->tx)) / |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 313 | (priv->bits_per_word >> 3); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 314 | |
| 315 | return min3(tx_left, tx_room, (u32)(priv->fifo_len - rxtx_gap)); |
| 316 | } |
| 317 | |
| 318 | /* Return the max entries we should read out of rx fifo */ |
| 319 | static inline u32 rx_max(struct dw_spi_priv *priv) |
| 320 | { |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 321 | u32 rx_left = (priv->rx_end - priv->rx) / (priv->bits_per_word >> 3); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 322 | |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 323 | return min_t(u32, rx_left, dw_read(priv, DW_SPI_RXFLR)); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 324 | } |
| 325 | |
| 326 | static void dw_writer(struct dw_spi_priv *priv) |
| 327 | { |
| 328 | u32 max = tx_max(priv); |
Sean Anderson | 8fc04b5 | 2020-10-16 18:57:43 -0400 | [diff] [blame] | 329 | u16 txw = 0xFFFF; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 330 | |
| 331 | while (max--) { |
| 332 | /* Set the tx word if the transfer's original "tx" is not null */ |
| 333 | if (priv->tx_end - priv->len) { |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 334 | if (priv->bits_per_word == 8) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 335 | txw = *(u8 *)(priv->tx); |
| 336 | else |
| 337 | txw = *(u16 *)(priv->tx); |
| 338 | } |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 339 | dw_write(priv, DW_SPI_DR, txw); |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 340 | log_content("tx=0x%02x\n", txw); |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 341 | priv->tx += priv->bits_per_word >> 3; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 342 | } |
| 343 | } |
| 344 | |
Eugeniy Paltsev | c5c6d45 | 2018-03-22 13:50:45 +0300 | [diff] [blame] | 345 | static void dw_reader(struct dw_spi_priv *priv) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 346 | { |
Eugeniy Paltsev | c5c6d45 | 2018-03-22 13:50:45 +0300 | [diff] [blame] | 347 | u32 max = rx_max(priv); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 348 | u16 rxw; |
| 349 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 350 | while (max--) { |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 351 | rxw = dw_read(priv, DW_SPI_DR); |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 352 | log_content("rx=0x%02x\n", rxw); |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 353 | |
Eugeniy Paltsev | c5c6d45 | 2018-03-22 13:50:45 +0300 | [diff] [blame] | 354 | /* Care about rx if the transfer's original "rx" is not null */ |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 355 | if (priv->rx_end - priv->len) { |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 356 | if (priv->bits_per_word == 8) |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 357 | *(u8 *)(priv->rx) = rxw; |
| 358 | else |
| 359 | *(u16 *)(priv->rx) = rxw; |
| 360 | } |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 361 | priv->rx += priv->bits_per_word >> 3; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 362 | } |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 363 | } |
| 364 | |
| 365 | static int poll_transfer(struct dw_spi_priv *priv) |
| 366 | { |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 367 | do { |
| 368 | dw_writer(priv); |
Eugeniy Paltsev | c5c6d45 | 2018-03-22 13:50:45 +0300 | [diff] [blame] | 369 | dw_reader(priv); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 370 | } while (priv->rx_end > priv->rx); |
| 371 | |
| 372 | return 0; |
| 373 | } |
| 374 | |
Gregory CLEMENT | f289337 | 2018-10-09 14:14:07 +0200 | [diff] [blame] | 375 | /* |
| 376 | * We define external_cs_manage function as 'weak' as some targets |
| 377 | * (like MSCC Ocelot) don't control the external CS pin using a GPIO |
| 378 | * controller. These SoCs use specific registers to control by |
| 379 | * software the SPI pins (and especially the CS). |
| 380 | */ |
| 381 | __weak void external_cs_manage(struct udevice *dev, bool on) |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 382 | { |
Simon Glass | fa4689a | 2019-12-06 21:41:35 -0700 | [diff] [blame] | 383 | #if CONFIG_IS_ENABLED(DM_GPIO) && !defined(CONFIG_SPL_BUILD) |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 384 | struct dw_spi_priv *priv = dev_get_priv(dev->parent); |
| 385 | |
| 386 | if (!dm_gpio_is_valid(&priv->cs_gpio)) |
| 387 | return; |
| 388 | |
| 389 | dm_gpio_set_value(&priv->cs_gpio, on ? 1 : 0); |
| 390 | #endif |
| 391 | } |
| 392 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 393 | static int dw_spi_xfer(struct udevice *dev, unsigned int bitlen, |
| 394 | const void *dout, void *din, unsigned long flags) |
| 395 | { |
| 396 | struct udevice *bus = dev->parent; |
| 397 | struct dw_spi_priv *priv = dev_get_priv(bus); |
| 398 | const u8 *tx = dout; |
| 399 | u8 *rx = din; |
| 400 | int ret = 0; |
| 401 | u32 cr0 = 0; |
Eugeniy Paltsev | 7215ad2 | 2018-03-22 13:50:43 +0300 | [diff] [blame] | 402 | u32 val; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 403 | u32 cs; |
| 404 | |
| 405 | /* spi core configured to do 8 bit transfers */ |
| 406 | if (bitlen % 8) { |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 407 | dev_err(dev, "Non byte aligned SPI transfer.\n"); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 408 | return -1; |
| 409 | } |
| 410 | |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 411 | /* Start the transaction if necessary. */ |
| 412 | if (flags & SPI_XFER_BEGIN) |
| 413 | external_cs_manage(dev, false); |
| 414 | |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 415 | cr0 = (priv->bits_per_word - 1) | (priv->type << SPI_FRF_OFFSET) | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 416 | (priv->mode << SPI_MODE_OFFSET) | |
| 417 | (priv->tmode << SPI_TMOD_OFFSET); |
| 418 | |
| 419 | if (rx && tx) |
| 420 | priv->tmode = SPI_TMOD_TR; |
| 421 | else if (rx) |
| 422 | priv->tmode = SPI_TMOD_RO; |
| 423 | else |
Eugeniy Paltsev | 31f5013 | 2018-03-22 13:50:44 +0300 | [diff] [blame] | 424 | /* |
| 425 | * In transmit only mode (SPI_TMOD_TO) input FIFO never gets |
| 426 | * any data which breaks our logic in poll_transfer() above. |
| 427 | */ |
| 428 | priv->tmode = SPI_TMOD_TR; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 429 | |
| 430 | cr0 &= ~SPI_TMOD_MASK; |
| 431 | cr0 |= (priv->tmode << SPI_TMOD_OFFSET); |
| 432 | |
Stefan Roese | 571e2a4 | 2014-11-16 12:47:01 +0100 | [diff] [blame] | 433 | priv->len = bitlen >> 3; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 434 | |
| 435 | priv->tx = (void *)tx; |
| 436 | priv->tx_end = priv->tx + priv->len; |
| 437 | priv->rx = rx; |
| 438 | priv->rx_end = priv->rx + priv->len; |
| 439 | |
| 440 | /* Disable controller before writing control registers */ |
| 441 | spi_enable_chip(priv, 0); |
| 442 | |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 443 | dev_dbg(dev, "cr0=%08x rx=%p tx=%p len=%d [bytes]\n", cr0, rx, tx, |
| 444 | priv->len); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 445 | /* Reprogram cr0 only if changed */ |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 446 | if (dw_read(priv, DW_SPI_CTRL0) != cr0) |
| 447 | dw_write(priv, DW_SPI_CTRL0, cr0); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 448 | |
| 449 | /* |
| 450 | * Configure the desired SS (slave select 0...3) in the controller |
| 451 | * The DW SPI controller will activate and deactivate this CS |
| 452 | * automatically. So no cs_activate() etc is needed in this driver. |
| 453 | */ |
| 454 | cs = spi_chip_select(dev); |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 455 | dw_write(priv, DW_SPI_SER, 1 << cs); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 456 | |
| 457 | /* Enable controller after writing control registers */ |
| 458 | spi_enable_chip(priv, 1); |
| 459 | |
| 460 | /* Start transfer in a polling loop */ |
| 461 | ret = poll_transfer(priv); |
| 462 | |
Eugeniy Paltsev | 7215ad2 | 2018-03-22 13:50:43 +0300 | [diff] [blame] | 463 | /* |
| 464 | * Wait for current transmit operation to complete. |
| 465 | * Otherwise if some data still exists in Tx FIFO it can be |
| 466 | * silently flushed, i.e. dropped on disabling of the controller, |
| 467 | * which happens when writing 0 to DW_SPI_SSIENR which happens |
| 468 | * in the beginning of new transfer. |
| 469 | */ |
| 470 | if (readl_poll_timeout(priv->regs + DW_SPI_SR, val, |
Eugeniy Paltsev | 208be8f | 2018-04-19 17:47:41 +0300 | [diff] [blame] | 471 | (val & SR_TF_EMPT) && !(val & SR_BUSY), |
Eugeniy Paltsev | 7215ad2 | 2018-03-22 13:50:43 +0300 | [diff] [blame] | 472 | RX_TIMEOUT * 1000)) { |
| 473 | ret = -ETIMEDOUT; |
| 474 | } |
| 475 | |
Eugeniy Paltsev | a7b4de1 | 2018-03-22 13:50:46 +0300 | [diff] [blame] | 476 | /* Stop the transaction if necessary */ |
| 477 | if (flags & SPI_XFER_END) |
| 478 | external_cs_manage(dev, true); |
| 479 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 480 | return ret; |
| 481 | } |
| 482 | |
| 483 | static int dw_spi_set_speed(struct udevice *bus, uint speed) |
| 484 | { |
| 485 | struct dw_spi_platdata *plat = bus->platdata; |
| 486 | struct dw_spi_priv *priv = dev_get_priv(bus); |
| 487 | u16 clk_div; |
| 488 | |
| 489 | if (speed > plat->frequency) |
| 490 | speed = plat->frequency; |
| 491 | |
| 492 | /* Disable controller before writing control registers */ |
| 493 | spi_enable_chip(priv, 0); |
| 494 | |
| 495 | /* clk_div doesn't support odd number */ |
Eugeniy Paltsev | 8b841e3 | 2017-12-28 15:09:03 +0300 | [diff] [blame] | 496 | clk_div = priv->bus_clk_rate / speed; |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 497 | clk_div = (clk_div + 1) & 0xfffe; |
Eugeniy Paltsev | e0c8923 | 2018-03-22 13:50:47 +0300 | [diff] [blame] | 498 | dw_write(priv, DW_SPI_BAUDR, clk_div); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 499 | |
| 500 | /* Enable controller after writing control registers */ |
| 501 | spi_enable_chip(priv, 1); |
| 502 | |
| 503 | priv->freq = speed; |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 504 | dev_dbg(bus, "speed=%d clk_div=%d\n", priv->freq, clk_div); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 505 | |
| 506 | return 0; |
| 507 | } |
| 508 | |
| 509 | static int dw_spi_set_mode(struct udevice *bus, uint mode) |
| 510 | { |
| 511 | struct dw_spi_priv *priv = dev_get_priv(bus); |
| 512 | |
| 513 | /* |
| 514 | * Can't set mode yet. Since this depends on if rx, tx, or |
| 515 | * rx & tx is requested. So we have to defer this to the |
| 516 | * real transfer function. |
| 517 | */ |
| 518 | priv->mode = mode; |
Sean Anderson | 0dfb3ac | 2020-10-16 18:57:44 -0400 | [diff] [blame] | 519 | dev_dbg(bus, "mode=%d\n", priv->mode); |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 520 | |
| 521 | return 0; |
| 522 | } |
| 523 | |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 524 | static int dw_spi_remove(struct udevice *bus) |
| 525 | { |
| 526 | struct dw_spi_priv *priv = dev_get_priv(bus); |
Ley Foon Tan | d95ab40 | 2018-09-19 16:27:19 +0800 | [diff] [blame] | 527 | int ret; |
| 528 | |
| 529 | ret = reset_release_bulk(&priv->resets); |
| 530 | if (ret) |
| 531 | return ret; |
| 532 | |
| 533 | #if CONFIG_IS_ENABLED(CLK) |
| 534 | ret = clk_disable(&priv->clk); |
| 535 | if (ret) |
| 536 | return ret; |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 537 | |
Ley Foon Tan | d95ab40 | 2018-09-19 16:27:19 +0800 | [diff] [blame] | 538 | ret = clk_free(&priv->clk); |
| 539 | if (ret) |
| 540 | return ret; |
| 541 | #endif |
| 542 | return 0; |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 543 | } |
| 544 | |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 545 | static const struct dm_spi_ops dw_spi_ops = { |
| 546 | .xfer = dw_spi_xfer, |
| 547 | .set_speed = dw_spi_set_speed, |
| 548 | .set_mode = dw_spi_set_mode, |
| 549 | /* |
| 550 | * cs_info is not needed, since we require all chip selects to be |
| 551 | * in the device tree explicitly |
| 552 | */ |
| 553 | }; |
| 554 | |
| 555 | static const struct udevice_id dw_spi_ids[] = { |
Marek Vasut | 67e767d | 2014-12-31 20:14:55 +0100 | [diff] [blame] | 556 | { .compatible = "snps,dw-apb-ssi" }, |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 557 | { } |
| 558 | }; |
| 559 | |
| 560 | U_BOOT_DRIVER(dw_spi) = { |
| 561 | .name = "dw_spi", |
| 562 | .id = UCLASS_SPI, |
| 563 | .of_match = dw_spi_ids, |
| 564 | .ops = &dw_spi_ops, |
| 565 | .ofdata_to_platdata = dw_spi_ofdata_to_platdata, |
| 566 | .platdata_auto_alloc_size = sizeof(struct dw_spi_platdata), |
| 567 | .priv_auto_alloc_size = sizeof(struct dw_spi_priv), |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 568 | .probe = dw_spi_probe, |
Ley Foon Tan | fc3382d | 2018-09-07 14:25:29 +0800 | [diff] [blame] | 569 | .remove = dw_spi_remove, |
Stefan Roese | d987ea6 | 2014-11-07 13:50:31 +0100 | [diff] [blame] | 570 | }; |