blob: 234a7d2134dc34750bf01ddf3f73690550a6614b [file] [log] [blame]
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +01001// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Direct Memory Access U-Class Simulation driver
4 *
5 * Copyright (C) 2018 Texas Instruments Incorporated <www.ti.com>
6 *
7 * Author: Grygorii Strashko <grygorii.strashko@ti.com>
8 */
9
10#include <common.h>
11#include <dm.h>
Simon Glass9bc15642020-02-03 07:36:16 -070012#include <malloc.h>
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010013#include <dm/read.h>
14#include <dma-uclass.h>
15#include <dt-structs.h>
16#include <errno.h>
17
18#define SANDBOX_DMA_CH_CNT 3
19#define SANDBOX_DMA_BUF_SIZE 1024
20
21struct sandbox_dma_chan {
22 struct sandbox_dma_dev *ud;
23 char name[20];
24 u32 id;
25 enum dma_direction dir;
26 bool in_use;
27 bool enabled;
28};
29
30struct sandbox_dma_dev {
31 struct device *dev;
32 u32 ch_count;
33 struct sandbox_dma_chan channels[SANDBOX_DMA_CH_CNT];
34 uchar buf[SANDBOX_DMA_BUF_SIZE];
35 uchar *buf_rx;
36 size_t data_len;
37 u32 meta;
38};
39
40static int sandbox_dma_transfer(struct udevice *dev, int direction,
41 void *dst, void *src, size_t len)
42{
43 memcpy(dst, src, len);
44
45 return 0;
46}
47
48static int sandbox_dma_of_xlate(struct dma *dma,
49 struct ofnode_phandle_args *args)
50{
51 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
52 struct sandbox_dma_chan *uc;
53
54 debug("%s(dma id=%u)\n", __func__, args->args[0]);
55
56 if (args->args[0] >= SANDBOX_DMA_CH_CNT)
57 return -EINVAL;
58
59 dma->id = args->args[0];
60
61 uc = &ud->channels[dma->id];
62
63 if (dma->id == 1)
64 uc->dir = DMA_MEM_TO_DEV;
65 else if (dma->id == 2)
66 uc->dir = DMA_DEV_TO_MEM;
67 else
68 uc->dir = DMA_MEM_TO_MEM;
69 debug("%s(dma id=%lu dir=%d)\n", __func__, dma->id, uc->dir);
70
71 return 0;
72}
73
74static int sandbox_dma_request(struct dma *dma)
75{
76 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
77 struct sandbox_dma_chan *uc;
78
79 if (dma->id >= SANDBOX_DMA_CH_CNT)
80 return -EINVAL;
81
82 uc = &ud->channels[dma->id];
83 if (uc->in_use)
84 return -EBUSY;
85
86 uc->in_use = true;
87 debug("%s(dma id=%lu in_use=%d)\n", __func__, dma->id, uc->in_use);
88
89 return 0;
90}
91
Simon Glass75c0ad62020-02-03 07:35:55 -070092static int sandbox_dma_rfree(struct dma *dma)
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010093{
94 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
95 struct sandbox_dma_chan *uc;
96
97 if (dma->id >= SANDBOX_DMA_CH_CNT)
98 return -EINVAL;
99
100 uc = &ud->channels[dma->id];
101 if (!uc->in_use)
102 return -EINVAL;
103
104 uc->in_use = false;
105 ud->buf_rx = NULL;
106 ud->data_len = 0;
107 debug("%s(dma id=%lu in_use=%d)\n", __func__, dma->id, uc->in_use);
108
109 return 0;
110}
111
112static int sandbox_dma_enable(struct dma *dma)
113{
114 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
115 struct sandbox_dma_chan *uc;
116
117 if (dma->id >= SANDBOX_DMA_CH_CNT)
118 return -EINVAL;
119
120 uc = &ud->channels[dma->id];
121 if (!uc->in_use)
122 return -EINVAL;
123 if (uc->enabled)
124 return -EINVAL;
125
126 uc->enabled = true;
127 debug("%s(dma id=%lu enabled=%d)\n", __func__, dma->id, uc->enabled);
128
129 return 0;
130}
131
132static int sandbox_dma_disable(struct dma *dma)
133{
134 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
135 struct sandbox_dma_chan *uc;
136
137 if (dma->id >= SANDBOX_DMA_CH_CNT)
138 return -EINVAL;
139
140 uc = &ud->channels[dma->id];
141 if (!uc->in_use)
142 return -EINVAL;
143 if (!uc->enabled)
144 return -EINVAL;
145
146 uc->enabled = false;
147 debug("%s(dma id=%lu enabled=%d)\n", __func__, dma->id, uc->enabled);
148
149 return 0;
150}
151
152static int sandbox_dma_send(struct dma *dma,
153 void *src, size_t len, void *metadata)
154{
155 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
156 struct sandbox_dma_chan *uc;
157
158 if (dma->id >= SANDBOX_DMA_CH_CNT)
159 return -EINVAL;
160 if (!src || !metadata)
161 return -EINVAL;
162
163 debug("%s(dma id=%lu)\n", __func__, dma->id);
164
165 uc = &ud->channels[dma->id];
166 if (uc->dir != DMA_MEM_TO_DEV)
167 return -EINVAL;
168 if (!uc->in_use)
169 return -EINVAL;
170 if (!uc->enabled)
171 return -EINVAL;
172 if (len >= SANDBOX_DMA_BUF_SIZE)
173 return -EINVAL;
174
175 memcpy(ud->buf, src, len);
176 ud->data_len = len;
177 ud->meta = *((u32 *)metadata);
178
179 debug("%s(dma id=%lu len=%zu meta=%08x)\n",
180 __func__, dma->id, len, ud->meta);
181
182 return 0;
183}
184
185static int sandbox_dma_receive(struct dma *dma, void **dst, void *metadata)
186{
187 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
188 struct sandbox_dma_chan *uc;
189
190 if (dma->id >= SANDBOX_DMA_CH_CNT)
191 return -EINVAL;
192 if (!dst || !metadata)
193 return -EINVAL;
194
195 uc = &ud->channels[dma->id];
196 if (uc->dir != DMA_DEV_TO_MEM)
197 return -EINVAL;
198 if (!uc->in_use)
199 return -EINVAL;
200 if (!uc->enabled)
201 return -EINVAL;
202 if (!ud->data_len)
203 return 0;
204
205 if (ud->buf_rx) {
206 memcpy(ud->buf_rx, ud->buf, ud->data_len);
207 *dst = ud->buf_rx;
208 } else {
209 memcpy(*dst, ud->buf, ud->data_len);
210 }
211
212 *((u32 *)metadata) = ud->meta;
213
214 debug("%s(dma id=%lu len=%zu meta=%08x %p)\n",
215 __func__, dma->id, ud->data_len, ud->meta, *dst);
216
217 return ud->data_len;
218}
219
220static int sandbox_dma_prepare_rcv_buf(struct dma *dma, void *dst, size_t size)
221{
222 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
223
224 ud->buf_rx = dst;
225
226 return 0;
227}
228
229static const struct dma_ops sandbox_dma_ops = {
230 .transfer = sandbox_dma_transfer,
231 .of_xlate = sandbox_dma_of_xlate,
232 .request = sandbox_dma_request,
Simon Glass75c0ad62020-02-03 07:35:55 -0700233 .rfree = sandbox_dma_rfree,
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +0100234 .enable = sandbox_dma_enable,
235 .disable = sandbox_dma_disable,
236 .send = sandbox_dma_send,
237 .receive = sandbox_dma_receive,
238 .prepare_rcv_buf = sandbox_dma_prepare_rcv_buf,
239};
240
241static int sandbox_dma_probe(struct udevice *dev)
242{
243 struct dma_dev_priv *uc_priv = dev_get_uclass_priv(dev);
244 struct sandbox_dma_dev *ud = dev_get_priv(dev);
245 int i, ret = 0;
246
247 uc_priv->supported = DMA_SUPPORTS_MEM_TO_MEM |
248 DMA_SUPPORTS_MEM_TO_DEV |
249 DMA_SUPPORTS_DEV_TO_MEM;
250
251 ud->ch_count = SANDBOX_DMA_CH_CNT;
252 ud->buf_rx = NULL;
253 ud->meta = 0;
254 ud->data_len = 0;
255
256 pr_err("Number of channels: %u\n", ud->ch_count);
257
258 for (i = 0; i < ud->ch_count; i++) {
259 struct sandbox_dma_chan *uc = &ud->channels[i];
260
261 uc->ud = ud;
262 uc->id = i;
263 sprintf(uc->name, "DMA chan%d\n", i);
264 uc->in_use = false;
265 uc->enabled = false;
266 }
267
268 return ret;
269}
270
271static const struct udevice_id sandbox_dma_ids[] = {
272 { .compatible = "sandbox,dma" },
273 { }
274};
275
276U_BOOT_DRIVER(sandbox_dma) = {
277 .name = "sandbox-dma",
278 .id = UCLASS_DMA,
279 .of_match = sandbox_dma_ids,
280 .ops = &sandbox_dma_ops,
281 .probe = sandbox_dma_probe,
282 .priv_auto_alloc_size = sizeof(struct sandbox_dma_dev),
283};