blob: 2939cf9be3555922b01021009d97145045c98114 [file] [log] [blame]
developerb11a5392022-03-31 00:34:47 +08001// SPDX-License-Identifier: ISC
2/* Copyright (C) 2020 MediaTek Inc. */
3
4#include "mt7921.h"
5#include "../dma.h"
6#include "mac.h"
7
8static int mt7921_init_tx_queues(struct mt7921_phy *phy, int idx, int n_desc)
9{
10 int i, err;
11
developer66cd2092022-05-10 15:43:01 +080012 err = mt76_init_tx_queue(phy->mt76, 0, idx, n_desc, MT_TX_RING_BASE, 0);
developerb11a5392022-03-31 00:34:47 +080013 if (err < 0)
14 return err;
15
16 for (i = 0; i <= MT_TXQ_PSD; i++)
17 phy->mt76->q_tx[i] = phy->mt76->q_tx[0];
18
19 return 0;
20}
21
22static int mt7921_poll_tx(struct napi_struct *napi, int budget)
23{
24 struct mt7921_dev *dev;
25
26 dev = container_of(napi, struct mt7921_dev, mt76.tx_napi);
27
28 if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
29 napi_complete(napi);
30 queue_work(dev->mt76.wq, &dev->pm.wake_work);
31 return 0;
32 }
33
34 mt7921_mcu_tx_cleanup(dev);
35 if (napi_complete(napi))
36 mt7921_irq_enable(dev, MT_INT_TX_DONE_ALL);
37 mt76_connac_pm_unref(&dev->mphy, &dev->pm);
38
39 return 0;
40}
41
42static int mt7921_poll_rx(struct napi_struct *napi, int budget)
43{
44 struct mt7921_dev *dev;
45 int done;
46
47 dev = container_of(napi->dev, struct mt7921_dev, mt76.napi_dev);
48
49 if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
50 napi_complete(napi);
51 queue_work(dev->mt76.wq, &dev->pm.wake_work);
52 return 0;
53 }
54 done = mt76_dma_rx_poll(napi, budget);
55 mt76_connac_pm_unref(&dev->mphy, &dev->pm);
56
57 return done;
58}
59
60static void mt7921_dma_prefetch(struct mt7921_dev *dev)
61{
62#define PREFETCH(base, depth) ((base) << 16 | (depth))
63
64 mt76_wr(dev, MT_WFDMA0_RX_RING0_EXT_CTRL, PREFETCH(0x0, 0x4));
65 mt76_wr(dev, MT_WFDMA0_RX_RING2_EXT_CTRL, PREFETCH(0x40, 0x4));
66 mt76_wr(dev, MT_WFDMA0_RX_RING3_EXT_CTRL, PREFETCH(0x80, 0x4));
67 mt76_wr(dev, MT_WFDMA0_RX_RING4_EXT_CTRL, PREFETCH(0xc0, 0x4));
68 mt76_wr(dev, MT_WFDMA0_RX_RING5_EXT_CTRL, PREFETCH(0x100, 0x4));
69
70 mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, PREFETCH(0x140, 0x4));
71 mt76_wr(dev, MT_WFDMA0_TX_RING1_EXT_CTRL, PREFETCH(0x180, 0x4));
72 mt76_wr(dev, MT_WFDMA0_TX_RING2_EXT_CTRL, PREFETCH(0x1c0, 0x4));
73 mt76_wr(dev, MT_WFDMA0_TX_RING3_EXT_CTRL, PREFETCH(0x200, 0x4));
74 mt76_wr(dev, MT_WFDMA0_TX_RING4_EXT_CTRL, PREFETCH(0x240, 0x4));
75 mt76_wr(dev, MT_WFDMA0_TX_RING5_EXT_CTRL, PREFETCH(0x280, 0x4));
76 mt76_wr(dev, MT_WFDMA0_TX_RING6_EXT_CTRL, PREFETCH(0x2c0, 0x4));
77 mt76_wr(dev, MT_WFDMA0_TX_RING16_EXT_CTRL, PREFETCH(0x340, 0x4));
78 mt76_wr(dev, MT_WFDMA0_TX_RING17_EXT_CTRL, PREFETCH(0x380, 0x4));
79}
80
81static int mt7921_dma_disable(struct mt7921_dev *dev, bool force)
82{
83 if (force) {
84 /* reset */
85 mt76_clear(dev, MT_WFDMA0_RST,
86 MT_WFDMA0_RST_DMASHDL_ALL_RST |
87 MT_WFDMA0_RST_LOGIC_RST);
88
89 mt76_set(dev, MT_WFDMA0_RST,
90 MT_WFDMA0_RST_DMASHDL_ALL_RST |
91 MT_WFDMA0_RST_LOGIC_RST);
92 }
93
94 /* disable dmashdl */
95 mt76_clear(dev, MT_WFDMA0_GLO_CFG_EXT0,
96 MT_WFDMA0_CSR_TX_DMASHDL_ENABLE);
97 mt76_set(dev, MT_DMASHDL_SW_CONTROL, MT_DMASHDL_DMASHDL_BYPASS);
98
99 /* disable WFDMA0 */
100 mt76_clear(dev, MT_WFDMA0_GLO_CFG,
101 MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN |
102 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
103 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
104 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
105 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
106
107 if (!mt76_poll(dev, MT_WFDMA0_GLO_CFG,
108 MT_WFDMA0_GLO_CFG_TX_DMA_BUSY |
109 MT_WFDMA0_GLO_CFG_RX_DMA_BUSY, 0, 1000))
110 return -ETIMEDOUT;
111
112 return 0;
113}
114
115static int mt7921_dma_enable(struct mt7921_dev *dev)
116{
117 /* configure perfetch settings */
118 mt7921_dma_prefetch(dev);
119
120 /* reset dma idx */
121 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0);
122
123 /* configure delay interrupt */
124 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0);
125
126 mt76_set(dev, MT_WFDMA0_GLO_CFG,
127 MT_WFDMA0_GLO_CFG_TX_WB_DDONE |
128 MT_WFDMA0_GLO_CFG_FIFO_LITTLE_ENDIAN |
129 MT_WFDMA0_GLO_CFG_CLK_GAT_DIS |
130 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
131 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
132 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
133
134 mt76_set(dev, MT_WFDMA0_GLO_CFG,
135 MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN);
136
137 mt76_set(dev, MT_WFDMA_DUMMY_CR, MT_WFDMA_NEED_REINIT);
138
139 /* enable interrupts for TX/RX rings */
140 mt7921_irq_enable(dev,
141 MT_INT_RX_DONE_ALL | MT_INT_TX_DONE_ALL |
142 MT_INT_MCU_CMD);
143 mt76_set(dev, MT_MCU2HOST_SW_INT_ENA, MT_MCU_CMD_WAKE_RX_PCIE);
144
145 return 0;
146}
147
148static int mt7921_dma_reset(struct mt7921_dev *dev, bool force)
149{
150 int i, err;
151
152 err = mt7921_dma_disable(dev, force);
153 if (err)
154 return err;
155
156 /* reset hw queues */
157 for (i = 0; i < __MT_TXQ_MAX; i++)
158 mt76_queue_reset(dev, dev->mphy.q_tx[i]);
159
160 for (i = 0; i < __MT_MCUQ_MAX; i++)
161 mt76_queue_reset(dev, dev->mt76.q_mcu[i]);
162
163 mt76_for_each_q_rx(&dev->mt76, i)
164 mt76_queue_reset(dev, &dev->mt76.q_rx[i]);
165
166 mt76_tx_status_check(&dev->mt76, true);
167
168 return mt7921_dma_enable(dev);
169}
170
171int mt7921_wfsys_reset(struct mt7921_dev *dev)
172{
173 mt76_clear(dev, MT_WFSYS_SW_RST_B, WFSYS_SW_RST_B);
174 msleep(50);
175 mt76_set(dev, MT_WFSYS_SW_RST_B, WFSYS_SW_RST_B);
176
177 if (!__mt76_poll_msec(&dev->mt76, MT_WFSYS_SW_RST_B,
178 WFSYS_SW_INIT_DONE, WFSYS_SW_INIT_DONE, 500))
179 return -ETIMEDOUT;
180
181 return 0;
182}
183
184int mt7921_wpdma_reset(struct mt7921_dev *dev, bool force)
185{
186 int i, err;
187
188 /* clean up hw queues */
189 for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++)
190 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true);
191
192 for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++)
193 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true);
194
195 mt76_for_each_q_rx(&dev->mt76, i)
196 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]);
197
198 if (force) {
199 err = mt7921_wfsys_reset(dev);
200 if (err)
201 return err;
202 }
203 err = mt7921_dma_reset(dev, force);
204 if (err)
205 return err;
206
207 mt76_for_each_q_rx(&dev->mt76, i)
208 mt76_queue_rx_reset(dev, i);
209
210 return 0;
211}
212
213int mt7921_wpdma_reinit_cond(struct mt7921_dev *dev)
214{
215 struct mt76_connac_pm *pm = &dev->pm;
216 int err;
217
218 /* check if the wpdma must be reinitialized */
219 if (mt7921_dma_need_reinit(dev)) {
220 /* disable interrutpts */
221 mt76_wr(dev, MT_WFDMA0_HOST_INT_ENA, 0);
222 mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0x0);
223
224 err = mt7921_wpdma_reset(dev, false);
225 if (err) {
226 dev_err(dev->mt76.dev, "wpdma reset failed\n");
227 return err;
228 }
229
230 /* enable interrutpts */
231 mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0xff);
232 pm->stats.lp_wake++;
233 }
234
235 return 0;
236}
237
238int mt7921_dma_init(struct mt7921_dev *dev)
239{
240 int ret;
241
242 mt76_dma_attach(&dev->mt76);
243
244 ret = mt7921_dma_disable(dev, true);
245 if (ret)
246 return ret;
247
248 ret = mt7921_wfsys_reset(dev);
249 if (ret)
250 return ret;
251
252 /* init tx queue */
253 ret = mt7921_init_tx_queues(&dev->phy, MT7921_TXQ_BAND0,
254 MT7921_TX_RING_SIZE);
255 if (ret)
256 return ret;
257
258 mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, 0x4);
259
260 /* command to WM */
261 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, MT7921_TXQ_MCU_WM,
262 MT7921_TX_MCU_RING_SIZE, MT_TX_RING_BASE);
263 if (ret)
264 return ret;
265
266 /* firmware download */
267 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, MT7921_TXQ_FWDL,
268 MT7921_TX_FWDL_RING_SIZE, MT_TX_RING_BASE);
269 if (ret)
270 return ret;
271
272 /* event from WM before firmware download */
273 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU],
274 MT7921_RXQ_MCU_WM,
275 MT7921_RX_MCU_RING_SIZE,
276 MT_RX_BUF_SIZE, MT_RX_EVENT_RING_BASE);
277 if (ret)
278 return ret;
279
280 /* Change mcu queue after firmware download */
281 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA],
282 MT7921_RXQ_MCU_WM,
283 MT7921_RX_MCU_RING_SIZE,
284 MT_RX_BUF_SIZE, MT_WFDMA0(0x540));
285 if (ret)
286 return ret;
287
288 /* rx data */
289 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN],
290 MT7921_RXQ_BAND0, MT7921_RX_RING_SIZE,
291 MT_RX_BUF_SIZE, MT_RX_DATA_RING_BASE);
292 if (ret)
293 return ret;
294
295 ret = mt76_init_queues(dev, mt7921_poll_rx);
296 if (ret < 0)
297 return ret;
298
299 netif_tx_napi_add(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi,
300 mt7921_poll_tx, NAPI_POLL_WEIGHT);
301 napi_enable(&dev->mt76.tx_napi);
302
303 return mt7921_dma_enable(dev);
304}
305
306void mt7921_dma_cleanup(struct mt7921_dev *dev)
307{
308 /* disable */
309 mt76_clear(dev, MT_WFDMA0_GLO_CFG,
310 MT_WFDMA0_GLO_CFG_TX_DMA_EN |
311 MT_WFDMA0_GLO_CFG_RX_DMA_EN |
312 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
313 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
314 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
315 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
316
317 /* reset */
318 mt76_clear(dev, MT_WFDMA0_RST,
319 MT_WFDMA0_RST_DMASHDL_ALL_RST |
320 MT_WFDMA0_RST_LOGIC_RST);
321
322 mt76_set(dev, MT_WFDMA0_RST,
323 MT_WFDMA0_RST_DMASHDL_ALL_RST |
324 MT_WFDMA0_RST_LOGIC_RST);
325
326 mt76_dma_cleanup(&dev->mt76);
327}