blob: a19e5e37fb9d68886782faae027819b771f0267f [file] [log] [blame]
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +01001// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Direct Memory Access U-Class Simulation driver
4 *
5 * Copyright (C) 2018 Texas Instruments Incorporated <www.ti.com>
6 *
7 * Author: Grygorii Strashko <grygorii.strashko@ti.com>
8 */
9
10#include <common.h>
11#include <dm.h>
Simon Glass0f2af882020-05-10 11:40:05 -060012#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -070013#include <malloc.h>
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010014#include <dm/read.h>
15#include <dma-uclass.h>
16#include <dt-structs.h>
17#include <errno.h>
Simon Glassbdd5f812023-09-14 18:21:46 -060018#include <linux/printk.h>
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010019
20#define SANDBOX_DMA_CH_CNT 3
21#define SANDBOX_DMA_BUF_SIZE 1024
22
23struct sandbox_dma_chan {
24 struct sandbox_dma_dev *ud;
25 char name[20];
26 u32 id;
27 enum dma_direction dir;
28 bool in_use;
29 bool enabled;
30};
31
32struct sandbox_dma_dev {
33 struct device *dev;
34 u32 ch_count;
35 struct sandbox_dma_chan channels[SANDBOX_DMA_CH_CNT];
36 uchar buf[SANDBOX_DMA_BUF_SIZE];
37 uchar *buf_rx;
38 size_t data_len;
39 u32 meta;
40};
41
42static int sandbox_dma_transfer(struct udevice *dev, int direction,
Andrew Davisd2da2842022-10-07 12:11:13 -050043 dma_addr_t dst, dma_addr_t src, size_t len)
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010044{
Andrew Davisd2da2842022-10-07 12:11:13 -050045 memcpy((void *)dst, (void *)src, len);
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010046
47 return 0;
48}
49
50static int sandbox_dma_of_xlate(struct dma *dma,
51 struct ofnode_phandle_args *args)
52{
53 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
54 struct sandbox_dma_chan *uc;
55
56 debug("%s(dma id=%u)\n", __func__, args->args[0]);
57
58 if (args->args[0] >= SANDBOX_DMA_CH_CNT)
59 return -EINVAL;
60
61 dma->id = args->args[0];
62
63 uc = &ud->channels[dma->id];
64
65 if (dma->id == 1)
66 uc->dir = DMA_MEM_TO_DEV;
67 else if (dma->id == 2)
68 uc->dir = DMA_DEV_TO_MEM;
69 else
70 uc->dir = DMA_MEM_TO_MEM;
71 debug("%s(dma id=%lu dir=%d)\n", __func__, dma->id, uc->dir);
72
73 return 0;
74}
75
76static int sandbox_dma_request(struct dma *dma)
77{
78 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
79 struct sandbox_dma_chan *uc;
80
81 if (dma->id >= SANDBOX_DMA_CH_CNT)
82 return -EINVAL;
83
84 uc = &ud->channels[dma->id];
85 if (uc->in_use)
86 return -EBUSY;
87
88 uc->in_use = true;
89 debug("%s(dma id=%lu in_use=%d)\n", __func__, dma->id, uc->in_use);
90
91 return 0;
92}
93
Simon Glass75c0ad62020-02-03 07:35:55 -070094static int sandbox_dma_rfree(struct dma *dma)
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +010095{
96 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
97 struct sandbox_dma_chan *uc;
98
99 if (dma->id >= SANDBOX_DMA_CH_CNT)
100 return -EINVAL;
101
102 uc = &ud->channels[dma->id];
103 if (!uc->in_use)
104 return -EINVAL;
105
106 uc->in_use = false;
107 ud->buf_rx = NULL;
108 ud->data_len = 0;
109 debug("%s(dma id=%lu in_use=%d)\n", __func__, dma->id, uc->in_use);
110
111 return 0;
112}
113
114static int sandbox_dma_enable(struct dma *dma)
115{
116 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
117 struct sandbox_dma_chan *uc;
118
119 if (dma->id >= SANDBOX_DMA_CH_CNT)
120 return -EINVAL;
121
122 uc = &ud->channels[dma->id];
123 if (!uc->in_use)
124 return -EINVAL;
125 if (uc->enabled)
126 return -EINVAL;
127
128 uc->enabled = true;
129 debug("%s(dma id=%lu enabled=%d)\n", __func__, dma->id, uc->enabled);
130
131 return 0;
132}
133
134static int sandbox_dma_disable(struct dma *dma)
135{
136 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
137 struct sandbox_dma_chan *uc;
138
139 if (dma->id >= SANDBOX_DMA_CH_CNT)
140 return -EINVAL;
141
142 uc = &ud->channels[dma->id];
143 if (!uc->in_use)
144 return -EINVAL;
145 if (!uc->enabled)
146 return -EINVAL;
147
148 uc->enabled = false;
149 debug("%s(dma id=%lu enabled=%d)\n", __func__, dma->id, uc->enabled);
150
151 return 0;
152}
153
154static int sandbox_dma_send(struct dma *dma,
155 void *src, size_t len, void *metadata)
156{
157 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
158 struct sandbox_dma_chan *uc;
159
160 if (dma->id >= SANDBOX_DMA_CH_CNT)
161 return -EINVAL;
162 if (!src || !metadata)
163 return -EINVAL;
164
165 debug("%s(dma id=%lu)\n", __func__, dma->id);
166
167 uc = &ud->channels[dma->id];
168 if (uc->dir != DMA_MEM_TO_DEV)
169 return -EINVAL;
170 if (!uc->in_use)
171 return -EINVAL;
172 if (!uc->enabled)
173 return -EINVAL;
174 if (len >= SANDBOX_DMA_BUF_SIZE)
175 return -EINVAL;
176
177 memcpy(ud->buf, src, len);
178 ud->data_len = len;
179 ud->meta = *((u32 *)metadata);
180
181 debug("%s(dma id=%lu len=%zu meta=%08x)\n",
182 __func__, dma->id, len, ud->meta);
183
184 return 0;
185}
186
187static int sandbox_dma_receive(struct dma *dma, void **dst, void *metadata)
188{
189 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
190 struct sandbox_dma_chan *uc;
191
192 if (dma->id >= SANDBOX_DMA_CH_CNT)
193 return -EINVAL;
194 if (!dst || !metadata)
195 return -EINVAL;
196
197 uc = &ud->channels[dma->id];
198 if (uc->dir != DMA_DEV_TO_MEM)
199 return -EINVAL;
200 if (!uc->in_use)
201 return -EINVAL;
202 if (!uc->enabled)
203 return -EINVAL;
204 if (!ud->data_len)
205 return 0;
206
207 if (ud->buf_rx) {
208 memcpy(ud->buf_rx, ud->buf, ud->data_len);
209 *dst = ud->buf_rx;
210 } else {
211 memcpy(*dst, ud->buf, ud->data_len);
212 }
213
214 *((u32 *)metadata) = ud->meta;
215
216 debug("%s(dma id=%lu len=%zu meta=%08x %p)\n",
217 __func__, dma->id, ud->data_len, ud->meta, *dst);
218
219 return ud->data_len;
220}
221
222static int sandbox_dma_prepare_rcv_buf(struct dma *dma, void *dst, size_t size)
223{
224 struct sandbox_dma_dev *ud = dev_get_priv(dma->dev);
225
226 ud->buf_rx = dst;
227
228 return 0;
229}
230
231static const struct dma_ops sandbox_dma_ops = {
232 .transfer = sandbox_dma_transfer,
233 .of_xlate = sandbox_dma_of_xlate,
234 .request = sandbox_dma_request,
Simon Glass75c0ad62020-02-03 07:35:55 -0700235 .rfree = sandbox_dma_rfree,
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +0100236 .enable = sandbox_dma_enable,
237 .disable = sandbox_dma_disable,
238 .send = sandbox_dma_send,
239 .receive = sandbox_dma_receive,
240 .prepare_rcv_buf = sandbox_dma_prepare_rcv_buf,
241};
242
243static int sandbox_dma_probe(struct udevice *dev)
244{
245 struct dma_dev_priv *uc_priv = dev_get_uclass_priv(dev);
246 struct sandbox_dma_dev *ud = dev_get_priv(dev);
247 int i, ret = 0;
248
249 uc_priv->supported = DMA_SUPPORTS_MEM_TO_MEM |
250 DMA_SUPPORTS_MEM_TO_DEV |
251 DMA_SUPPORTS_DEV_TO_MEM;
252
253 ud->ch_count = SANDBOX_DMA_CH_CNT;
254 ud->buf_rx = NULL;
255 ud->meta = 0;
256 ud->data_len = 0;
257
258 pr_err("Number of channels: %u\n", ud->ch_count);
259
260 for (i = 0; i < ud->ch_count; i++) {
261 struct sandbox_dma_chan *uc = &ud->channels[i];
262
263 uc->ud = ud;
264 uc->id = i;
265 sprintf(uc->name, "DMA chan%d\n", i);
266 uc->in_use = false;
267 uc->enabled = false;
268 }
269
270 return ret;
271}
272
273static const struct udevice_id sandbox_dma_ids[] = {
274 { .compatible = "sandbox,dma" },
275 { }
276};
277
278U_BOOT_DRIVER(sandbox_dma) = {
279 .name = "sandbox-dma",
280 .id = UCLASS_DMA,
281 .of_match = sandbox_dma_ids,
282 .ops = &sandbox_dma_ops,
283 .probe = sandbox_dma_probe,
Simon Glass8a2b47f2020-12-03 16:55:17 -0700284 .priv_auto = sizeof(struct sandbox_dma_dev),
Grygorii Strashko19ebf0b2018-11-28 19:17:51 +0100285};