blob: f15b67546a9f9e3daa48399af3de72529893fba7 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Sylvain Lemieux90a837f2015-08-10 08:16:31 -04002/*
3 * Copyright (C) 2008 by NXP Semiconductors
4 * @Author: Kevin Wells
5 * @Descr: LPC3250 DMA controller interface support functions
6 *
7 * Copyright (c) 2015 Tyco Fire Protection Products.
Sylvain Lemieux90a837f2015-08-10 08:16:31 -04008 */
9
Sylvain Lemieux90a837f2015-08-10 08:16:31 -040010#include <errno.h>
Simon Glass97589732020-05-10 11:40:02 -060011#include <init.h>
Tom Rinidec7ea02024-05-20 13:35:03 -060012#include <time.h>
Sylvain Lemieux90a837f2015-08-10 08:16:31 -040013#include <asm/arch/dma.h>
14#include <asm/arch/cpu.h>
15#include <asm/arch/clk.h>
16#include <asm/arch/sys_proto.h>
17#include <asm/io.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060018#include <linux/bitops.h>
Simon Glassdbd79542020-05-10 11:40:11 -060019#include <linux/delay.h>
Simon Glassbdd5f812023-09-14 18:21:46 -060020#include <linux/printk.h>
Sylvain Lemieux90a837f2015-08-10 08:16:31 -040021
22/* DMA controller channel register structure */
23struct dmac_chan_reg {
24 u32 src_addr;
25 u32 dest_addr;
26 u32 lli;
27 u32 control;
28 u32 config_ch;
29 u32 reserved[3];
30};
31
32/* DMA controller register structures */
33struct dma_reg {
34 u32 int_stat;
35 u32 int_tc_stat;
36 u32 int_tc_clear;
37 u32 int_err_stat;
38 u32 int_err_clear;
39 u32 raw_tc_stat;
40 u32 raw_err_stat;
41 u32 chan_enable;
42 u32 sw_burst_req;
43 u32 sw_single_req;
44 u32 sw_last_burst_req;
45 u32 sw_last_single_req;
46 u32 config;
47 u32 sync;
48 u32 reserved[50];
49 struct dmac_chan_reg dma_chan[8];
50};
51
52#define DMA_NO_OF_CHANNELS 8
53
54/* config register definitions */
55#define DMAC_CTRL_ENABLE (1 << 0) /* For enabling the DMA controller */
56
57static u32 alloc_ch;
58
59static struct dma_reg *dma = (struct dma_reg *)DMA_BASE;
60
61int lpc32xx_dma_get_channel(void)
62{
63 int i;
64
65 if (!alloc_ch) { /* First time caller */
66 /*
67 * DMA clock are enable by "lpc32xx_dma_init()" and should
68 * be call by board "board_early_init_f()" function.
69 */
70
71 /*
72 * Make sure DMA controller and all channels are disabled.
73 * Controller is in little-endian mode. Disable sync signals.
74 */
75 writel(0, &dma->config);
76 writel(0, &dma->sync);
77
78 /* Clear interrupt and error statuses */
79 writel(0xFF, &dma->int_tc_clear);
80 writel(0xFF, &dma->raw_tc_stat);
81 writel(0xFF, &dma->int_err_clear);
82 writel(0xFF, &dma->raw_err_stat);
83
84 /* Enable DMA controller */
85 writel(DMAC_CTRL_ENABLE, &dma->config);
86 }
87
88 i = ffz(alloc_ch);
89
90 /* Check if all the available channels are busy */
91 if (unlikely(i == DMA_NO_OF_CHANNELS))
92 return -1;
93 alloc_ch |= BIT_MASK(i);
94 return i;
95}
96
97int lpc32xx_dma_start_xfer(unsigned int channel,
98 const struct lpc32xx_dmac_ll *desc, u32 config)
99{
100 if (unlikely(((BIT_MASK(channel) & alloc_ch) == 0) ||
101 (channel >= DMA_NO_OF_CHANNELS))) {
Masahiro Yamada81e10422017-09-16 14:10:41 +0900102 pr_err("Request for xfer on unallocated channel %d", channel);
Sylvain Lemieux90a837f2015-08-10 08:16:31 -0400103 return -1;
104 }
105 writel(BIT_MASK(channel), &dma->int_tc_clear);
106 writel(BIT_MASK(channel), &dma->int_err_clear);
107 writel(desc->dma_src, &dma->dma_chan[channel].src_addr);
108 writel(desc->dma_dest, &dma->dma_chan[channel].dest_addr);
109 writel(desc->next_lli, &dma->dma_chan[channel].lli);
110 writel(desc->next_ctrl, &dma->dma_chan[channel].control);
111 writel(config, &dma->dma_chan[channel].config_ch);
112
113 return 0;
114}
115
116int lpc32xx_dma_wait_status(unsigned int channel)
117{
118 unsigned long start;
119 u32 reg;
120
121 /* Check if given channel is valid */
122 if (unlikely(channel >= DMA_NO_OF_CHANNELS)) {
Masahiro Yamada81e10422017-09-16 14:10:41 +0900123 pr_err("Request for status on unallocated channel %d", channel);
Sylvain Lemieux90a837f2015-08-10 08:16:31 -0400124 return -1;
125 }
126
127 start = get_timer(0);
128 while (1) {
129 reg = readl(&dma->raw_tc_stat);
130 reg |= readl(dma->raw_err_stat);
131 if (reg & BIT_MASK(channel))
132 break;
133
134 if (get_timer(start) > CONFIG_SYS_HZ) {
Masahiro Yamada81e10422017-09-16 14:10:41 +0900135 pr_err("DMA status timeout channel %d\n", channel);
Sylvain Lemieux90a837f2015-08-10 08:16:31 -0400136 return -ETIMEDOUT;
137 }
138 udelay(1);
139 }
140
141 if (unlikely(readl(&dma->raw_err_stat) & BIT_MASK(channel))) {
142 setbits_le32(&dma->int_err_clear, BIT_MASK(channel));
143 setbits_le32(&dma->raw_err_stat, BIT_MASK(channel));
Masahiro Yamada81e10422017-09-16 14:10:41 +0900144 pr_err("DMA error on channel %d\n", channel);
Sylvain Lemieux90a837f2015-08-10 08:16:31 -0400145 return -1;
146 }
147 setbits_le32(&dma->int_tc_clear, BIT_MASK(channel));
148 setbits_le32(&dma->raw_tc_stat, BIT_MASK(channel));
149 return 0;
150}