blob: bd4906f58e7460dae300c4f299820a95dc8951e0 [file] [log] [blame]
Amit Singh Tomar347494c2021-11-28 17:02:24 +05301// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Copyright (C) 2020 Amit Singh Tomar <amittomer25@gmail.com>
4 *
5 * Driver for SD/MMC controller present on Actions Semi S700/S900 SoC, based
6 * on Linux Driver "drivers/mmc/host/owl-mmc.c".
7 *
8 * Though, there is a bit (BSEL, BUS or DMA Special Channel Selection) that
9 * controls the data transfer from SDx_DAT register either using CPU AHB Bus
10 * or DMA channel, but seems like, it only works correctly using external DMA
11 * channel, and those special bits used in this driver is picked from vendor
12 * source exclusively for MMC/SD.
13 */
Amit Singh Tomar347494c2021-11-28 17:02:24 +053014#include <clk.h>
15#include <cpu_func.h>
16#include <dm.h>
17#include <errno.h>
18#include <log.h>
19#include <mmc.h>
20#include <asm/io.h>
21#include <linux/bitops.h>
22#include <linux/delay.h>
23#include <linux/err.h>
24#include <linux/iopoll.h>
25
26/*
27 * SDC registers
28 */
29#define OWL_REG_SD_EN 0x0000
30#define OWL_REG_SD_CTL 0x0004
31#define OWL_REG_SD_STATE 0x0008
32#define OWL_REG_SD_CMD 0x000c
33#define OWL_REG_SD_ARG 0x0010
34#define OWL_REG_SD_RSPBUF0 0x0014
35#define OWL_REG_SD_RSPBUF1 0x0018
36#define OWL_REG_SD_RSPBUF2 0x001c
37#define OWL_REG_SD_RSPBUF3 0x0020
38#define OWL_REG_SD_RSPBUF4 0x0024
39#define OWL_REG_SD_DAT 0x0028
40#define OWL_REG_SD_BLK_SIZE 0x002c
41#define OWL_REG_SD_BLK_NUM 0x0030
42#define OWL_REG_SD_BUF_SIZE 0x0034
43
44/* SD_EN Bits */
45#define OWL_SD_EN_RANE BIT(31)
46#define OWL_SD_EN_RESE BIT(10)
47#define OWL_SD_ENABLE BIT(7)
48#define OWL_SD_EN_BSEL BIT(6)
49#define OWL_SD_EN_DATAWID(x) (((x) & 0x3) << 0)
50#define OWL_SD_EN_DATAWID_MASK 0x03
51
52/* SD_CTL Bits */
53#define OWL_SD_CTL_TOUTEN BIT(31)
54#define OWL_SD_CTL_DELAY_MSK GENMASK(23, 16)
55#define OWL_SD_CTL_RDELAY(x) (((x) & 0xf) << 20)
56#define OWL_SD_CTL_WDELAY(x) (((x) & 0xf) << 16)
57#define OWL_SD_CTL_TS BIT(7)
58#define OWL_SD_CTL_LBE BIT(6)
59#define OWL_SD_CTL_TM(x) (((x) & 0xf) << 0)
60
61#define OWL_SD_DELAY_LOW_CLK 0x0f
62#define OWL_SD_DELAY_MID_CLK 0x0a
63#define OWL_SD_RDELAY_HIGH 0x08
64#define OWL_SD_WDELAY_HIGH 0x09
65
66/* SD_STATE Bits */
67#define OWL_SD_STATE_DAT0S BIT(7)
68#define OWL_SD_STATE_CLNR BIT(4)
69#define OWL_SD_STATE_CRC7ER BIT(0)
70
71#define OWL_MMC_OCR (MMC_VDD_32_33 | MMC_VDD_33_34 | \
72 MMC_VDD_165_195)
73
74#define DATA_TRANSFER_TIMEOUT 3000000
75#define DMA_TRANSFER_TIMEOUT 5000000
76
77/*
78 * Simple DMA transfer operations defines for MMC/SD card
79 */
80#define SD_DMA_CHANNEL(base, channel) ((base) + 0x100 + 0x100 * (channel))
81
82#define DMA_MODE 0x0000
83#define DMA_SOURCE 0x0004
84#define DMA_DESTINATION 0x0008
85#define DMA_FRAME_LEN 0x000C
86#define DMA_FRAME_CNT 0x0010
87#define DMA_START 0x0024
88
89/* DMAx_MODE */
90#define DMA_MODE_ST(x) (((x) & 0x3) << 8)
91#define DMA_MODE_ST_DEV DMA_MODE_ST(0)
92#define DMA_MODE_DT(x) (((x) & 0x3) << 10)
93#define DMA_MODE_DT_DCU DMA_MODE_DT(2)
94#define DMA_MODE_SAM(x) (((x) & 0x3) << 16)
95#define DMA_MODE_SAM_CONST DMA_MODE_SAM(0)
96#define DMA_MODE_DAM(x) (((x) & 0x3) << 18)
97#define DMA_MODE_DAM_INC DMA_MODE_DAM(1)
98
99#define DMA_ENABLE 0x1
100
101struct owl_mmc_plat {
102 struct mmc_config cfg;
103 struct mmc mmc;
104};
105
106struct owl_mmc_priv {
107 void *reg_base;
108 void *dma_channel;
109 struct clk clk;
110 unsigned int clock; /* Current clock */
111 unsigned int dma_drq; /* Trigger Source */
112};
113
114static void owl_dma_config(struct owl_mmc_priv *priv, unsigned int src,
115 unsigned int dst, unsigned int len)
116{
117 unsigned int mode = priv->dma_drq;
118
119 /* Set Source and Destination adderess mode */
120 mode |= (DMA_MODE_ST_DEV | DMA_MODE_SAM_CONST | DMA_MODE_DT_DCU |
121 DMA_MODE_DAM_INC);
122
123 writel(mode, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_MODE);
124 writel(src, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_SOURCE);
125 writel(dst, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_DESTINATION);
126 writel(len, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_FRAME_LEN);
127 writel(0x1, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_FRAME_CNT);
128}
129
130static void owl_mmc_prepare_data(struct owl_mmc_priv *priv,
131 struct mmc_data *data)
132{
133 unsigned int total;
134 u32 buf = 0;
135
136 setbits_le32(priv->reg_base + OWL_REG_SD_EN, OWL_SD_EN_BSEL);
137
138 writel(data->blocks, priv->reg_base + OWL_REG_SD_BLK_NUM);
139 writel(data->blocksize, priv->reg_base + OWL_REG_SD_BLK_SIZE);
140 total = data->blocksize * data->blocks;
141
142 if (total < 512)
143 writel(total, priv->reg_base + OWL_REG_SD_BUF_SIZE);
144 else
145 writel(512, priv->reg_base + OWL_REG_SD_BUF_SIZE);
146
147 /* DMA STOP */
148 writel(0x0, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
149
150 if (data) {
151 if (data->flags == MMC_DATA_READ) {
152 buf = (ulong) (data->dest);
153 owl_dma_config(priv, (ulong) priv->reg_base +
154 OWL_REG_SD_DAT, buf, total);
155 invalidate_dcache_range(buf, buf + total);
156 } else {
157 buf = (ulong) (data->src);
158 owl_dma_config(priv, buf, (ulong) priv->reg_base +
159 OWL_REG_SD_DAT, total);
160 flush_dcache_range(buf, buf + total);
161 }
162 /* DMA START */
163 writel(0x1, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
164 }
165}
166
167static int owl_mmc_send_cmd(struct udevice *dev, struct mmc_cmd *cmd,
168 struct mmc_data *data)
169{
170 struct owl_mmc_priv *priv = dev_get_priv(dev);
171 unsigned int cmd_rsp_mask, mode, reg;
172 int ret;
173
174 setbits_le32(priv->reg_base + OWL_REG_SD_EN, OWL_SD_ENABLE);
175
176 /* setup response */
177 mode = 0;
178 if (cmd->resp_type != MMC_RSP_NONE)
179 cmd_rsp_mask = OWL_SD_STATE_CLNR | OWL_SD_STATE_CRC7ER;
180 if (cmd->resp_type == MMC_RSP_R1) {
181 if (data) {
182 if (data->flags == MMC_DATA_READ)
183 mode |= OWL_SD_CTL_TM(4);
184 else
185 mode |= OWL_SD_CTL_TM(5);
186 } else
187 mode |= OWL_SD_CTL_TM(1);
188 } else if (cmd->resp_type == MMC_RSP_R2) {
189 mode = OWL_SD_CTL_TM(2);
190 } else if (cmd->resp_type == MMC_RSP_R1b) {
191 mode = OWL_SD_CTL_TM(3);
192 } else if (cmd->resp_type == MMC_RSP_R3) {
193 cmd_rsp_mask = OWL_SD_STATE_CLNR;
194 mode = OWL_SD_CTL_TM(1);
195 }
196
197 mode |= (readl(priv->reg_base + OWL_REG_SD_CTL) & (0xff << 16));
198
199 /* setup command */
200 writel(cmd->cmdidx, priv->reg_base + OWL_REG_SD_CMD);
201 writel(cmd->cmdarg, priv->reg_base + OWL_REG_SD_ARG);
202
203 /* Set LBE to send clk at the end of last read block */
204 if (data)
205 mode |= (OWL_SD_CTL_TS | OWL_SD_CTL_LBE | 0xE4000000);
206 else
207 mode |= OWL_SD_CTL_TS;
208
209 if (data)
210 owl_mmc_prepare_data(priv, data);
211
212 /* Start transfer */
213 writel(mode, priv->reg_base + OWL_REG_SD_CTL);
214
215 ret = readl_poll_timeout(priv->reg_base + OWL_REG_SD_CTL, reg,
216 !(reg & OWL_SD_CTL_TS), DATA_TRANSFER_TIMEOUT);
217
218 if (ret == -ETIMEDOUT) {
219 debug("error: transferred data timeout\n");
220 return ret;
221 }
222
223 reg = readl(priv->reg_base + OWL_REG_SD_STATE) & cmd_rsp_mask;
224 if (cmd->resp_type & MMC_RSP_PRESENT) {
225 if (reg & OWL_SD_STATE_CLNR) {
226 printf("Error CMD_NO_RSP\n");
227 return -1;
228 }
229
230 if (reg & OWL_SD_STATE_CRC7ER) {
231 printf("Error CMD_RSP_CRC\n");
232 return -1;
233 }
234
235 if (cmd->resp_type & MMC_RSP_136) {
236 cmd->response[3] = readl(priv->reg_base + OWL_REG_SD_RSPBUF0);
237 cmd->response[2] = readl(priv->reg_base + OWL_REG_SD_RSPBUF1);
238 cmd->response[1] = readl(priv->reg_base + OWL_REG_SD_RSPBUF2);
239 cmd->response[0] = readl(priv->reg_base + OWL_REG_SD_RSPBUF3);
240 } else {
241 u32 rsp[2];
242
243 rsp[0] = readl(priv->reg_base + OWL_REG_SD_RSPBUF0);
244 rsp[1] = readl(priv->reg_base + OWL_REG_SD_RSPBUF1);
245 cmd->response[0] = rsp[1] << 24 | rsp[0] >> 8;
246 cmd->response[1] = rsp[1] >> 8;
247 }
248 }
249
250 if (data) {
251 ret = readl_poll_timeout(SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START,
252 reg, !(reg & DMA_ENABLE), DMA_TRANSFER_TIMEOUT);
253
254 if (ret == -ETIMEDOUT) {
255 debug("error: DMA transfer timeout\n");
256 return ret;
257 }
258
259 /* DMA STOP */
260 writel(0x0, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
261 /* Transmission STOP */
262 while (readl(priv->reg_base + OWL_REG_SD_CTL) & OWL_SD_CTL_TS)
263 clrbits_le32(priv->reg_base + OWL_REG_SD_CTL,
264 OWL_SD_CTL_TS);
265 }
266
267 return 0;
268}
269
270static int owl_mmc_clk_set(struct owl_mmc_priv *priv, int rate)
271{
272 u32 reg, wdelay, rdelay;
273
274 reg = readl(priv->reg_base + OWL_REG_SD_CTL);
275 reg &= ~OWL_SD_CTL_DELAY_MSK;
276
277 /* Set RDELAY and WDELAY based on the clock */
278 if (rate <= 1000000)
279 rdelay = wdelay = OWL_SD_DELAY_LOW_CLK;
280 else if ((rate > 1000000) && (rate <= 26000000))
281 rdelay = wdelay = OWL_SD_DELAY_MID_CLK;
282 else if ((rate > 26000000) && (rate <= 52000000)) {
283 rdelay = OWL_SD_RDELAY_HIGH;
284 wdelay = OWL_SD_WDELAY_HIGH;
285 } else {
286 debug("SD clock rate not supported\n");
287 return -EINVAL;
288 }
289
290 writel(reg | OWL_SD_CTL_RDELAY(rdelay) | OWL_SD_CTL_WDELAY(wdelay),
291 priv->reg_base + OWL_REG_SD_CTL);
292
293 return 0;
294}
295
296static int owl_mmc_set_ios(struct udevice *dev)
297{
298 struct owl_mmc_priv *priv = dev_get_priv(dev);
299 struct owl_mmc_plat *plat = dev_get_plat(dev);
300 struct mmc *mmc = &plat->mmc;
301 u32 reg, ret;
302
303 if (mmc->clock != priv->clock) {
304 priv->clock = mmc->clock;
305 ret = owl_mmc_clk_set(priv, mmc->clock);
306 if (IS_ERR_VALUE(ret))
307 return ret;
308
309 ret = clk_set_rate(&priv->clk, mmc->clock);
310 if (IS_ERR_VALUE(ret))
311 return ret;
312 }
313
314 if (mmc->clk_disable)
315 ret = clk_disable(&priv->clk);
316 else
317 ret = clk_enable(&priv->clk);
318 if (ret)
319 return ret;
320
321 /* Set the Bus width */
322 reg = readl(priv->reg_base + OWL_REG_SD_EN);
323 reg &= ~OWL_SD_EN_DATAWID_MASK;
324 if (mmc->bus_width == 8)
325 reg |= OWL_SD_EN_DATAWID(2);
326 else if (mmc->bus_width == 4)
327 reg |= OWL_SD_EN_DATAWID(1);
328
329 writel(reg, priv->reg_base + OWL_REG_SD_EN);
330
331 return 0;
332}
333
334static const struct dm_mmc_ops owl_mmc_ops = {
335 .send_cmd = owl_mmc_send_cmd,
336 .set_ios = owl_mmc_set_ios,
337};
338
339static int owl_mmc_probe(struct udevice *dev)
340{
341 struct mmc_uclass_priv *upriv = dev_get_uclass_priv(dev);
342 struct owl_mmc_plat *plat = dev_get_plat(dev);
343 struct owl_mmc_priv *priv = dev_get_priv(dev);
344 struct mmc_config *cfg = &plat->cfg;
345 struct ofnode_phandle_args args;
346 int ret;
347 fdt_addr_t addr;
348
349 cfg->name = dev->name;
350 cfg->voltages = OWL_MMC_OCR;
351 cfg->f_min = 400000;
352 cfg->f_max = 52000000;
353 cfg->b_max = 512;
354 cfg->host_caps = MMC_MODE_HS | MMC_MODE_HS_52MHz;
355
356 ret = mmc_of_parse(dev, cfg);
357 if (ret)
358 return ret;
359
360 addr = dev_read_addr(dev);
361 if (addr == FDT_ADDR_T_NONE)
362 return -EINVAL;
363
364 priv->reg_base = (void *)addr;
365
366 ret = dev_read_phandle_with_args(dev, "dmas", "#dma-cells", 0, 0,
367 &args);
368 if (ret)
369 return ret;
370
371 priv->dma_channel = (void *)ofnode_get_addr(args.node);
372 priv->dma_drq = args.args[0];
373
374 ret = clk_get_by_index(dev, 0, &priv->clk);
375 if (ret) {
376 debug("clk_get_by_index() failed: %d\n", ret);
377 return ret;
378 }
379
380 upriv->mmc = &plat->mmc;
381
382 return 0;
383}
384
385static int owl_mmc_bind(struct udevice *dev)
386{
387 struct owl_mmc_plat *plat = dev_get_plat(dev);
388
389 return mmc_bind(dev, &plat->mmc, &plat->cfg);
390}
391
392static const struct udevice_id owl_mmc_ids[] = {
393 { .compatible = "actions,s700-mmc" },
394 { .compatible = "actions,owl-mmc" },
395 { }
396};
397
398U_BOOT_DRIVER(owl_mmc_drv) = {
399 .name = "owl_mmc",
400 .id = UCLASS_MMC,
401 .of_match = owl_mmc_ids,
402 .bind = owl_mmc_bind,
403 .probe = owl_mmc_probe,
404 .ops = &owl_mmc_ops,
405 .plat_auto = sizeof(struct owl_mmc_plat),
406 .priv_auto = sizeof(struct owl_mmc_priv),
407};