blob: 7423a6e327462442193998d75eb6271d88dc1e74 [file] [log] [blame]
developer69bcd592024-03-25 14:26:39 +08001From 67a20e07982e9c43d299679f75fd81638271fc63 Mon Sep 17 00:00:00 2001
2From: mtk27745 <rex.lu@mediatek.com>
3Date: Mon, 18 Sep 2023 13:22:44 +0800
4Subject: [PATCH 2/6] mtk wed add wed3 ser support
5
6---
7 drivers/net/ethernet/mediatek/mtk_wed.c | 339 ++++++++++++++++---
8 drivers/net/ethernet/mediatek/mtk_wed_regs.h | 68 +++-
9 include/linux/soc/mediatek/mtk_wed.h | 8 +-
10 3 files changed, 367 insertions(+), 48 deletions(-)
11
12diff --git a/drivers/net/ethernet/mediatek/mtk_wed.c b/drivers/net/ethernet/mediatek/mtk_wed.c
13index 4b32a82..02c156a 100644
14--- a/drivers/net/ethernet/mediatek/mtk_wed.c
15+++ b/drivers/net/ethernet/mediatek/mtk_wed.c
16@@ -110,24 +110,88 @@ mtk_wdma_read_reset(struct mtk_wed_device *dev)
17 return wdma_r32(dev, MTK_WDMA_GLO_CFG);
18 }
19
20-static u32
21-mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
22+static void
23+mtk_wdma_v3_rx_reset(struct mtk_wed_device *dev)
24 {
25- if (wed_r32(dev, reg) & mask)
26- return true;
27-
28- return false;
29-}
30+ u32 status;
31
32-static int
33-mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
34-{
35- int sleep = 1000;
36- int timeout = 100 * sleep;
37- u32 val;
38+ if (!mtk_wed_is_v3_or_greater(dev->hw))
39+ return;
40
41- return read_poll_timeout(mtk_wed_check_busy, val, !val, sleep,
42- timeout, false, dev, reg, mask);
43+ wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
44+ wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
45+
46+ if (read_poll_timeout(wdma_r32, status,
47+ !(status & MTK_WDMA_PREF_TX_CFG_PREF_BUSY),
48+ 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG))
49+ dev_err(dev->hw->dev, "rx reset failed\n");
50+
51+ if (read_poll_timeout(wdma_r32, status,
52+ !(status & MTK_WDMA_PREF_RX_CFG_PREF_BUSY),
53+ 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG))
54+ dev_err(dev->hw->dev, "rx reset failed\n");
55+
56+ wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
57+ wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
58+
59+ if (read_poll_timeout(wdma_r32, status,
60+ !(status & MTK_WDMA_WRBK_TX_CFG_WRBK_BUSY),
61+ 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG))
62+ dev_err(dev->hw->dev, "rx reset failed\n");
63+
64+ if (read_poll_timeout(wdma_r32, status,
65+ !(status & MTK_WDMA_WRBK_RX_CFG_WRBK_BUSY),
66+ 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG))
67+ dev_err(dev->hw->dev, "rx reset failed\n");
68+
69+ /* prefetch FIFO */
70+ wdma_w32(dev, MTK_WDMA_PREF_RX_FIFO_CFG,
71+ MTK_WDMA_PREF_RX_FIFO_CFG_RING0_CLEAR |
72+ MTK_WDMA_PREF_RX_FIFO_CFG_RING1_CLEAR);
73+ wdma_clr(dev, MTK_WDMA_PREF_RX_FIFO_CFG,
74+ MTK_WDMA_PREF_RX_FIFO_CFG_RING0_CLEAR |
75+ MTK_WDMA_PREF_RX_FIFO_CFG_RING1_CLEAR);
76+
77+ /* core FIFO */
78+ wdma_w32(dev, MTK_WDMA_XDMA_RX_FIFO_CFG,
79+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_PAR_FIFO_CLEAR |
80+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_CMD_FIFO_CLEAR |
81+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_DMAD_FIFO_CLEAR |
82+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_ARR_FIFO_CLEAR |
83+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_LEN_FIFO_CLEAR |
84+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_WID_FIFO_CLEAR |
85+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_BID_FIFO_CLEAR);
86+ wdma_clr(dev, MTK_WDMA_XDMA_RX_FIFO_CFG,
87+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_PAR_FIFO_CLEAR |
88+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_CMD_FIFO_CLEAR |
89+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_DMAD_FIFO_CLEAR |
90+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_ARR_FIFO_CLEAR |
91+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_LEN_FIFO_CLEAR |
92+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_WID_FIFO_CLEAR |
93+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_BID_FIFO_CLEAR);
94+
95+ /* writeback FIFO */
96+ wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0),
97+ MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
98+ wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1),
99+ MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
100+
101+ wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0),
102+ MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
103+ wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1),
104+ MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
105+
106+ /* prefetch ring status */
107+ wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG,
108+ MTK_WDMA_PREF_SIDX_CFG_RX_RING_CLEAR);
109+ wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG,
110+ MTK_WDMA_PREF_SIDX_CFG_RX_RING_CLEAR);
111+
112+ /* writeback ring status */
113+ wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG,
114+ MTK_WDMA_WRBK_SIDX_CFG_RX_RING_CLEAR);
115+ wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG,
116+ MTK_WDMA_WRBK_SIDX_CFG_RX_RING_CLEAR);
117 }
118
119 static int
120@@ -142,6 +206,7 @@ mtk_wdma_rx_reset(struct mtk_wed_device *dev)
121 if (ret)
122 dev_err(dev->hw->dev, "rx reset failed \n");
123
124+ mtk_wdma_v3_rx_reset(dev);
125 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX);
126 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
127
128@@ -156,6 +221,101 @@ mtk_wdma_rx_reset(struct mtk_wed_device *dev)
129 return ret;
130 }
131
132+static u32
133+mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
134+{
135+ return !!(wed_r32(dev, reg) & mask);
136+}
137+
138+static int
139+mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
140+{
141+ int sleep = 15000;
142+ int timeout = 100 * sleep;
143+ u32 val;
144+
145+ return read_poll_timeout(mtk_wed_check_busy, val, !val, sleep,
146+ timeout, false, dev, reg, mask);
147+}
148+
149+static void
150+mtk_wdma_v3_tx_reset(struct mtk_wed_device *dev)
151+{
152+ u32 status;
153+
154+ if (!mtk_wed_is_v3_or_greater(dev->hw))
155+ return;
156+
157+ wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
158+ wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
159+
160+ if (read_poll_timeout(wdma_r32, status,
161+ !(status & MTK_WDMA_PREF_TX_CFG_PREF_BUSY),
162+ 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG))
163+ dev_err(dev->hw->dev, "tx reset failed\n");
164+
165+ if (read_poll_timeout(wdma_r32, status,
166+ !(status & MTK_WDMA_PREF_RX_CFG_PREF_BUSY),
167+ 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG))
168+ dev_err(dev->hw->dev, "tx reset failed\n");
169+
170+ wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
171+ wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
172+
173+ if (read_poll_timeout(wdma_r32, status,
174+ !(status & MTK_WDMA_WRBK_TX_CFG_WRBK_BUSY),
175+ 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG))
176+ dev_err(dev->hw->dev, "tx reset failed\n");
177+
178+ if (read_poll_timeout(wdma_r32, status,
179+ !(status & MTK_WDMA_WRBK_RX_CFG_WRBK_BUSY),
180+ 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG))
181+ dev_err(dev->hw->dev, "tx reset failed\n");
182+
183+ /* prefetch FIFO */
184+ wdma_w32(dev, MTK_WDMA_PREF_TX_FIFO_CFG,
185+ MTK_WDMA_PREF_TX_FIFO_CFG_RING0_CLEAR |
186+ MTK_WDMA_PREF_TX_FIFO_CFG_RING1_CLEAR);
187+ wdma_clr(dev, MTK_WDMA_PREF_TX_FIFO_CFG,
188+ MTK_WDMA_PREF_TX_FIFO_CFG_RING0_CLEAR |
189+ MTK_WDMA_PREF_TX_FIFO_CFG_RING1_CLEAR);
190+
191+ /* core FIFO */
192+ wdma_w32(dev, MTK_WDMA_XDMA_TX_FIFO_CFG,
193+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_PAR_FIFO_CLEAR |
194+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_CMD_FIFO_CLEAR |
195+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_DMAD_FIFO_CLEAR |
196+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_ARR_FIFO_CLEAR);
197+ wdma_clr(dev, MTK_WDMA_XDMA_TX_FIFO_CFG,
198+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_PAR_FIFO_CLEAR |
199+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_CMD_FIFO_CLEAR |
200+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_DMAD_FIFO_CLEAR |
201+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_ARR_FIFO_CLEAR);
202+
203+ /* writeback FIFO */
204+ wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0),
205+ MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
206+ wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1),
207+ MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
208+
209+ wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0),
210+ MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
211+ wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1),
212+ MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
213+
214+ /* prefetch ring status */
215+ wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG,
216+ MTK_WDMA_PREF_SIDX_CFG_TX_RING_CLEAR);
217+ wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG,
218+ MTK_WDMA_PREF_SIDX_CFG_TX_RING_CLEAR);
219+
220+ /* writeback ring status */
221+ wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG,
222+ MTK_WDMA_WRBK_SIDX_CFG_TX_RING_CLEAR);
223+ wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG,
224+ MTK_WDMA_WRBK_SIDX_CFG_TX_RING_CLEAR);
225+}
226+
227 static void
228 mtk_wdma_tx_reset(struct mtk_wed_device *dev)
229 {
230@@ -167,6 +327,7 @@ mtk_wdma_tx_reset(struct mtk_wed_device *dev)
231 !(status & mask), 0, 10000))
232 dev_err(dev->hw->dev, "tx reset failed \n");
233
234+ mtk_wdma_v3_tx_reset(dev);
235 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX);
236 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
237
238@@ -1389,25 +1550,6 @@ mtk_wed_ring_reset(struct mtk_wed_ring *ring, int size, bool tx)
239 }
240 }
241
242-static u32
243-mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
244-{
245- if (wed_r32(dev, reg) & mask)
246- return true;
247-
248- return false;
249-}
250-
251-static int
252-mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask)
253-{
254- int sleep = 1000;
255- int timeout = 100 * sleep;
256- u32 val;
257-
258- return read_poll_timeout(mtk_wed_check_busy, val, !val, sleep,
259- timeout, false, dev, reg, mask);
260-}
261
262 static int
263 mtk_wed_rx_reset(struct mtk_wed_device *dev)
264@@ -1423,13 +1565,32 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
265 if (ret)
266 return ret;
267
268+ if (dev->wlan.hw_rro) {
269+ wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN);
270+ mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_HW_STS,
271+ MTK_WED_RX_IND_CMD_BUSY);
272+ mtk_wed_reset(dev, MTK_WED_RESET_RRO_RX_TO_PG);
273+ }
274+
275 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN);
276 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
277 MTK_WED_WPDMA_RX_D_RX_DRV_BUSY);
278+ if (!ret && mtk_wed_is_v3_or_greater(dev->hw))
279+ ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
280+ MTK_WED_WPDMA_RX_D_PREF_BUSY);
281 if (ret) {
282 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
283 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV);
284 } else {
285+ if (mtk_wed_is_v3_or_greater(dev->hw)) {
286+ /*1.a. Disable Prefetch HW*/
287+ wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, MTK_WED_WPDMA_RX_D_PREF_EN);
288+ mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
289+ MTK_WED_WPDMA_RX_D_PREF_BUSY);
290+ wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
291+ MTK_WED_WPDMA_RX_D_RST_DRV_IDX_ALL);
292+ }
293+
294 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
295 MTK_WED_WPDMA_RX_D_RST_CRX_IDX |
296 MTK_WED_WPDMA_RX_D_RST_DRV_IDX);
297@@ -1457,15 +1618,36 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
298 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0);
299 }
300
301+ if (dev->wlan.hw_rro) {
302+ /* Disable RRO MSDU Page Drv */
303+ wed_clr(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, MTK_WED_RRO_MSDU_PG_DRV_EN);
304+
305+ /* Disable RRO Data Drv */
306+ wed_clr(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN);
307+
308+ /* RRO MSDU Page Drv Reset */
309+ wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, MTK_WED_RRO_MSDU_PG_DRV_CLR);
310+ mtk_wed_poll_busy(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
311+ MTK_WED_RRO_MSDU_PG_DRV_CLR);
312+
313+ /* RRO Data Drv Reset */
314+ wed_w32(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_CLR);
315+ mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_D_CFG(2),
316+ MTK_WED_RRO_RX_D_DRV_CLR);
317+ }
318+
319 /* reset route qm */
320 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN);
321 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL,
322 MTK_WED_CTRL_RX_ROUTE_QM_BUSY);
323 if (ret) {
324 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
325+ } else if (mtk_wed_is_v3_or_greater(dev->hw)) {
326+ wed_set(dev, MTK_WED_RTQM_RST, BIT(0));
327+ wed_clr(dev, MTK_WED_RTQM_RST, BIT(0));
328+ mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
329 } else {
330- wed_set(dev, MTK_WED_RTQM_GLO_CFG,
331- MTK_WED_RTQM_Q_RST);
332+ wed_set(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST);
333 }
334
335 /* reset tx wdma */
336@@ -1473,8 +1655,12 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
337
338 /* reset tx wdma drv */
339 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN);
340- mtk_wed_poll_busy(dev, MTK_WED_CTRL,
341- MTK_WED_CTRL_WDMA_INT_AGENT_BUSY);
342+ if (mtk_wed_is_v3_or_greater(dev->hw))
343+ mtk_wed_poll_busy(dev, MTK_WED_WPDMA_STATUS,
344+ MTK_WED_WPDMA_STATUS_TX_DRV);
345+ else
346+ mtk_wed_poll_busy(dev, MTK_WED_CTRL,
347+ MTK_WED_CTRL_WDMA_INT_AGENT_BUSY);
348 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV);
349
350 /* reset wed rx dma */
351@@ -1495,6 +1681,14 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
352 MTK_WED_CTRL_WED_RX_BM_BUSY);
353 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM);
354
355+ if (dev->wlan.hw_rro) {
356+ wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN);
357+ mtk_wed_poll_busy(dev, MTK_WED_CTRL,
358+ MTK_WED_CTRL_WED_RX_PG_BM_BUSY);
359+ wed_set(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM);
360+ wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM);
361+ }
362+
363 /* wo change to enable state */
364 val = WO_STATE_ENABLE;
365 ret = mtk_wed_mcu_send_msg(wo, MTK_WED_MODULE_ID_WO,
366@@ -1549,16 +1743,55 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
367
368 /* 2. Reset WDMA Rx DMA/Driver_Engine */
369 busy = !!mtk_wdma_rx_reset(dev);
370+ if (mtk_wed_is_v3_or_greater(dev->hw)) {
371+ val = MTK_WED_WDMA_GLO_CFG_RX_DIS_FSM_AUTO_IDLE |
372+ wed_r32(dev, MTK_WED_WDMA_GLO_CFG);
373+ val &= ~MTK_WED_WDMA_GLO_CFG_RX_DRV_EN;
374+ wed_w32(dev, MTK_WED_WDMA_GLO_CFG, val);
375+ } else {
376+ wed_clr(dev, MTK_WED_WDMA_GLO_CFG,
377+ MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
378+ }
379
380- wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
381 if (!busy)
382 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG,
383 MTK_WED_WDMA_GLO_CFG_RX_DRV_BUSY);
384+ if (!busy && mtk_wed_is_v3_or_greater(dev->hw))
385+ busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG,
386+ MTK_WED_WDMA_RX_PREF_BUSY);
387
388 if (busy) {
389 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT);
390 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV);
391 } else {
392+ if (mtk_wed_is_v3_or_greater(dev->hw)) {
393+ /*1.a. Disable Prefetch HW*/
394+ wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
395+ MTK_WED_WDMA_RX_PREF_EN);
396+ mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG,
397+ MTK_WED_WDMA_RX_PREF_BUSY);
398+ wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
399+ MTK_WED_WDMA_RX_PREF_DDONE2_EN);
400+
401+ /* reset prefetch index */
402+ wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG,
403+ MTK_WED_WDMA_RX_PREF_RX0_SIDX_CLR |
404+ MTK_WED_WDMA_RX_PREF_RX1_SIDX_CLR);
405+
406+ wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG,
407+ MTK_WED_WDMA_RX_PREF_RX0_SIDX_CLR |
408+ MTK_WED_WDMA_RX_PREF_RX1_SIDX_CLR);
409+
410+ /* reset prefetch FIFO */
411+ wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG,
412+ MTK_WED_WDMA_RX_PREF_FIFO_RX0_CLR |
413+ MTK_WED_WDMA_RX_PREF_FIFO_RX1_CLR);
414+ wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, 0);
415+
416+ /*2. Reset dma index*/
417+ wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
418+ MTK_WED_WDMA_RESET_IDX_RX_ALL);
419+ }
420 wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
421 MTK_WED_WDMA_RESET_IDX_RX | MTK_WED_WDMA_RESET_IDX_DRV);
422 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0);
423@@ -1574,8 +1807,13 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
424 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN);
425
426 for (i = 0; i < 100; i++) {
427- val = wed_r32(dev, MTK_WED_TX_BM_INTF);
428- if (FIELD_GET(MTK_WED_TX_BM_INTF_TKFIFO_FDEP, val) == 0x40)
429+ if (mtk_wed_is_v1(dev->hw))
430+ val = FIELD_GET(MTK_WED_TX_BM_INTF_TKFIFO_FDEP,
431+ wed_r32(dev, MTK_WED_TX_BM_INTF));
432+ else
433+ val = FIELD_GET(MTK_WED_TX_TKID_INTF_TKFIFO_FDEP,
434+ wed_r32(dev, MTK_WED_TX_TKID_INTF));
435+ if (val == 0x40)
436 break;
437 }
438
439@@ -1599,6 +1837,8 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
440 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
441 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV);
442 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV);
443+ if (mtk_wed_is_v3_or_greater(dev->hw))
444+ wed_w32(dev, MTK_WED_RX1_CTRL2, 0);
445 } else {
446 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX,
447 MTK_WED_WPDMA_RESET_IDX_TX |
448@@ -1615,7 +1855,14 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
449 wed_w32(dev, MTK_WED_RESET_IDX, 0);
450 }
451
452- mtk_wed_rx_reset(dev);
453+ if (mtk_wed_is_v3_or_greater(dev->hw)) {
454+ /* reset amsdu engine */
455+ wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN);
456+ mtk_wed_reset(dev, MTK_WED_RESET_TX_AMSDU);
457+ }
458+
459+ if (mtk_wed_get_rx_capa(dev))
460+ mtk_wed_rx_reset(dev);
461 }
462
463 static int
464@@ -1932,7 +2179,7 @@ mtk_wed_dma_enable(struct mtk_wed_device *dev)
465 }
466
467 static void
468-mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask)
469+mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask, bool reset)
470 {
471 int i;
472
473@@ -1942,6 +2189,12 @@ mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask)
474 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro)
475 return;
476
477+ if (reset) {
478+ wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
479+ MTK_WED_RRO_MSDU_PG_DRV_EN);
480+ return;
481+ }
482+
483 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_MSDU_PG_DRV_CLR);
484 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
485 MTK_WED_RRO_MSDU_PG_DRV_CLR);
486diff --git a/drivers/net/ethernet/mediatek/mtk_wed_regs.h b/drivers/net/ethernet/mediatek/mtk_wed_regs.h
487index 0af264d..1ee0fe1 100644
488--- a/drivers/net/ethernet/mediatek/mtk_wed_regs.h
489+++ b/drivers/net/ethernet/mediatek/mtk_wed_regs.h
490@@ -36,6 +36,8 @@ struct mtk_wdma_desc {
491 #define MTK_WED_RESET 0x008
492 #define MTK_WED_RESET_TX_BM BIT(0)
493 #define MTK_WED_RESET_RX_BM BIT(1)
494+#define MTK_WED_RESET_RX_PG_BM BIT(2)
495+#define MTK_WED_RESET_RRO_RX_TO_PG BIT(3)
496 #define MTK_WED_RESET_TX_FREE_AGENT BIT(4)
497 #define MTK_WED_RESET_WPDMA_TX_DRV BIT(8)
498 #define MTK_WED_RESET_WPDMA_RX_DRV BIT(9)
499@@ -58,7 +60,7 @@ struct mtk_wdma_desc {
500 #define MTK_WED_CTRL_WDMA_INT_AGENT_BUSY BIT(3)
501 #define MTK_WED_CTRL_WED_RX_IND_CMD_EN BIT(5)
502 #define MTK_WED_CTRL_WED_RX_PG_BM_EN BIT(6)
503-#define MTK_WED_CTRL_WED_RX_PG_BM_BUSU BIT(7)
504+#define MTK_WED_CTRL_WED_RX_PG_BM_BUSY BIT(7)
505 #define MTK_WED_CTRL_WED_TX_BM_EN BIT(8)
506 #define MTK_WED_CTRL_WED_TX_BM_BUSY BIT(9)
507 #define MTK_WED_CTRL_WED_TX_FREE_AGENT_EN BIT(10)
508@@ -117,6 +119,10 @@ struct mtk_wdma_desc {
509 #define MTK_WED_STATUS 0x060
510 #define MTK_WED_STATUS_TX GENMASK(15, 8)
511
512+#define MTK_WED_WPDMA_STATUS 0x068
513+#define MTK_WED_WPDMA_STATUS_TX_DRV GENMASK(15, 8)
514+
515+
516 #define MTK_WED_TX_BM_CTRL 0x080
517 #define MTK_WED_TX_BM_CTRL_VLD_GRP_NUM GENMASK(6, 0)
518 #define MTK_WED_TX_BM_CTRL_RSV_GRP_NUM GENMASK(22, 16)
519@@ -154,6 +160,9 @@ struct mtk_wdma_desc {
520 #define MTK_WED_TX_TKID_CTRL_VLD_GRP_NUM_V3 GENMASK(7, 0)
521 #define MTK_WED_TX_TKID_CTRL_RSV_GRP_NUM_V3 GENMASK(23, 16)
522
523+#define MTK_WED_TX_TKID_INTF 0x0dc
524+#define MTK_WED_TX_TKID_INTF_TKFIFO_FDEP GENMASK(25, 16)
525+
526 #define MTK_WED_TX_TKID_DYN_THR 0x0e0
527 #define MTK_WED_TX_TKID_DYN_THR_LO GENMASK(6, 0)
528 #define MTK_WED_TX_TKID_DYN_THR_HI GENMASK(22, 16)
529@@ -205,6 +214,7 @@ struct mtk_wdma_desc {
530 #define MTK_WED_RING_RX_DATA(_n) (0x420 + (_n) * 0x10)
531
532 #define MTK_WED_SCR0 0x3c0
533+#define MTK_WED_RX1_CTRL2 0x418
534 #define MTK_WED_WPDMA_INT_TRIGGER 0x504
535 #define MTK_WED_WPDMA_INT_TRIGGER_RX_DONE BIT(1)
536 #define MTK_WED_WPDMA_INT_TRIGGER_TX_DONE GENMASK(5, 4)
537@@ -320,6 +330,7 @@ struct mtk_wdma_desc {
538
539 #define MTK_WED_WPDMA_RX_D_RST_IDX 0x760
540 #define MTK_WED_WPDMA_RX_D_RST_CRX_IDX GENMASK(17, 16)
541+#define MTK_WED_WPDMA_RX_D_RST_DRV_IDX_ALL BIT(20)
542 #define MTK_WED_WPDMA_RX_D_RST_DRV_IDX GENMASK(25, 24)
543
544 #define MTK_WED_WPDMA_RX_GLO_CFG 0x76c
545@@ -336,6 +347,7 @@ struct mtk_wdma_desc {
546
547 #define MTK_WED_WPDMA_RX_D_PREF_CFG 0x7b4
548 #define MTK_WED_WPDMA_RX_D_PREF_EN BIT(0)
549+#define MTK_WED_WPDMA_RX_D_PREF_BUSY BIT(1)
550 #define MTK_WED_WPDMA_RX_D_PREF_BURST_SIZE GENMASK(12, 8)
551 #define MTK_WED_WPDMA_RX_D_PREF_LOW_THRES GENMASK(21, 16)
552
553@@ -357,11 +369,13 @@ struct mtk_wdma_desc {
554
555 #define MTK_WED_WDMA_RX_PREF_CFG 0x950
556 #define MTK_WED_WDMA_RX_PREF_EN BIT(0)
557+#define MTK_WED_WDMA_RX_PREF_BUSY BIT(1)
558 #define MTK_WED_WDMA_RX_PREF_BURST_SIZE GENMASK(12, 8)
559 #define MTK_WED_WDMA_RX_PREF_LOW_THRES GENMASK(21, 16)
560 #define MTK_WED_WDMA_RX_PREF_RX0_SIDX_CLR BIT(24)
561 #define MTK_WED_WDMA_RX_PREF_RX1_SIDX_CLR BIT(25)
562 #define MTK_WED_WDMA_RX_PREF_DDONE2_EN BIT(26)
563+#define MTK_WED_WDMA_RX_PREF_DDONE2_BUSY BIT(27)
564
565 #define MTK_WED_WDMA_RX_PREF_FIFO_CFG 0x95C
566 #define MTK_WED_WDMA_RX_PREF_FIFO_RX0_CLR BIT(0)
567@@ -390,6 +404,7 @@ struct mtk_wdma_desc {
568
569 #define MTK_WED_WDMA_RESET_IDX 0xa08
570 #define MTK_WED_WDMA_RESET_IDX_RX GENMASK(17, 16)
571+#define MTK_WED_WDMA_RESET_IDX_RX_ALL BIT(20)
572 #define MTK_WED_WDMA_RESET_IDX_DRV GENMASK(25, 24)
573
574 #define MTK_WED_WDMA_INT_CLR 0xa24
575@@ -458,21 +473,66 @@ struct mtk_wdma_desc {
576 #define MTK_WDMA_INT_MASK_RX_DELAY BIT(30)
577 #define MTK_WDMA_INT_MASK_RX_COHERENT BIT(31)
578
579+#define MTK_WDMA_XDMA_TX_FIFO_CFG 0x238
580+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_PAR_FIFO_CLEAR BIT(0)
581+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_CMD_FIFO_CLEAR BIT(4)
582+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_DMAD_FIFO_CLEAR BIT(8)
583+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_ARR_FIFO_CLEAR BIT(12)
584+
585+#define MTK_WDMA_XDMA_RX_FIFO_CFG 0x23c
586+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_PAR_FIFO_CLEAR BIT(0)
587+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_CMD_FIFO_CLEAR BIT(4)
588+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_DMAD_FIFO_CLEAR BIT(8)
589+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_ARR_FIFO_CLEAR BIT(12)
590+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_LEN_FIFO_CLEAR BIT(15)
591+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_WID_FIFO_CLEAR BIT(18)
592+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_BID_FIFO_CLEAR BIT(21)
593+
594+
595+
596 #define MTK_WDMA_INT_GRP1 0x250
597 #define MTK_WDMA_INT_GRP2 0x254
598
599 #define MTK_WDMA_PREF_TX_CFG 0x2d0
600 #define MTK_WDMA_PREF_TX_CFG_PREF_EN BIT(0)
601+#define MTK_WDMA_PREF_TX_CFG_PREF_BUSY BIT(1)
602
603 #define MTK_WDMA_PREF_RX_CFG 0x2dc
604 #define MTK_WDMA_PREF_RX_CFG_PREF_EN BIT(0)
605+#define MTK_WDMA_PREF_RX_CFG_PREF_BUSY BIT(1)
606+
607+#define MTK_WDMA_PREF_RX_FIFO_CFG 0x2e0
608+#define MTK_WDMA_PREF_RX_FIFO_CFG_RING0_CLEAR BIT(0)
609+#define MTK_WDMA_PREF_RX_FIFO_CFG_RING1_CLEAR BIT(16)
610+
611+#define MTK_WDMA_PREF_TX_FIFO_CFG 0x2d4
612+#define MTK_WDMA_PREF_TX_FIFO_CFG_RING0_CLEAR BIT(0)
613+#define MTK_WDMA_PREF_TX_FIFO_CFG_RING1_CLEAR BIT(16)
614+
615+#define MTK_WDMA_PREF_SIDX_CFG 0x2e4
616+#define MTK_WDMA_PREF_SIDX_CFG_TX_RING_CLEAR GENMASK(3, 0)
617+#define MTK_WDMA_PREF_SIDX_CFG_RX_RING_CLEAR GENMASK(5, 4)
618
619 #define MTK_WDMA_WRBK_TX_CFG 0x300
620+#define MTK_WDMA_WRBK_TX_CFG_WRBK_BUSY BIT(0)
621 #define MTK_WDMA_WRBK_TX_CFG_WRBK_EN BIT(30)
622
623+#define MTK_WDMA_WRBK_TX_FIFO_CFG(_n) (0x304 + (_n) * 0x4)
624+#define MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR BIT(0)
625+
626+
627 #define MTK_WDMA_WRBK_RX_CFG 0x344
628+#define MTK_WDMA_WRBK_RX_CFG_WRBK_BUSY BIT(0)
629 #define MTK_WDMA_WRBK_RX_CFG_WRBK_EN BIT(30)
630
631+#define MTK_WDMA_WRBK_RX_FIFO_CFG(_n) (0x348 + (_n) * 0x4)
632+#define MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR BIT(0)
633+
634+
635+#define MTK_WDMA_WRBK_SIDX_CFG 0x388
636+#define MTK_WDMA_WRBK_SIDX_CFG_TX_RING_CLEAR GENMASK(3, 0)
637+#define MTK_WDMA_WRBK_SIDX_CFG_RX_RING_CLEAR GENMASK(5, 4)
638+
639 #define MTK_PCIE_MIRROR_MAP(n) ((n) ? 0x4 : 0x0)
640 #define MTK_PCIE_MIRROR_MAP_EN BIT(0)
641 #define MTK_PCIE_MIRROR_MAP_WED_ID BIT(1)
642@@ -486,6 +546,9 @@ struct mtk_wdma_desc {
643 #define MTK_WED_RTQM_Q_DBG_BYPASS BIT(5)
644 #define MTK_WED_RTQM_TXDMAD_FPORT GENMASK(23, 20)
645
646+#define MTK_WED_RTQM_RST 0xb04
647+
648+
649 #define MTK_WED_RTQM_IGRS0_I2HW_DMAD_CNT 0xb1c
650 #define MTK_WED_RTQM_IGRS0_I2H_DMAD_CNT(_n) (0xb20 + (_n) * 0x4)
651 #define MTK_WED_RTQM_IGRS0_I2HW_PKT_CNT 0xb28
652@@ -675,6 +738,9 @@ struct mtk_wdma_desc {
653 #define MTK_WED_WPDMA_INT_CTRL_RRO_PG2_CLR BIT(17)
654 #define MTK_WED_WPDMA_INT_CTRL_RRO_PG2_DONE_TRIG GENMASK(22, 18)
655
656+#define MTK_WED_RRO_RX_HW_STS 0xf00
657+#define MTK_WED_RX_IND_CMD_BUSY GENMASK(31, 0)
658+
659 #define MTK_WED_RX_IND_CMD_CNT0 0xf20
660 #define MTK_WED_RX_IND_CMD_DBG_CNT_EN BIT(31)
661
662diff --git a/include/linux/soc/mediatek/mtk_wed.h b/include/linux/soc/mediatek/mtk_wed.h
663index e81e41f..83a4b8b 100644
664--- a/include/linux/soc/mediatek/mtk_wed.h
665+++ b/include/linux/soc/mediatek/mtk_wed.h
666@@ -222,7 +222,7 @@ struct mtk_wed_ops {
667
668 u32 (*irq_get)(struct mtk_wed_device *dev, u32 mask);
669 void (*irq_set_mask)(struct mtk_wed_device *dev, u32 mask);
670- void (*start_hw_rro)(struct mtk_wed_device *dev, u32 irq_mask);
671+ void (*start_hw_rro)(struct mtk_wed_device *dev, u32 irq_mask, bool reset);
672 void (*rro_rx_ring_setup)(struct mtk_wed_device *dev, int ring,
673 void __iomem *regs);
674 void (*msdu_pg_rx_ring_setup)(struct mtk_wed_device *dev, int ring,
675@@ -302,8 +302,8 @@ mtk_wed_is_amsdu_supported(struct mtk_wed_device *dev)
676 #define mtk_wed_device_dma_reset(_dev) (_dev)->ops->reset_dma(_dev)
677 #define mtk_wed_device_setup_tc(_dev, _ndev, _type, _data) \
678 (_dev)->ops->setup_tc(_dev, _ndev, _type, _data)
679-#define mtk_wed_device_start_hw_rro(_dev, _mask) \
680- (_dev)->ops->start_hw_rro(_dev, _mask)
681+#define mtk_wed_device_start_hw_rro(_dev, _mask, _reset) \
682+ (_dev)->ops->start_hw_rro(_dev, _mask, _reset)
683 #define mtk_wed_device_rro_rx_ring_setup(_dev, _ring, _regs) \
684 (_dev)->ops->rro_rx_ring_setup(_dev, _ring, _regs)
685 #define mtk_wed_device_msdu_pg_rx_ring_setup(_dev, _ring, _regs) \
686@@ -329,7 +329,7 @@ static inline bool mtk_wed_device_active(struct mtk_wed_device *dev)
687 #define mtk_wed_device_stop(_dev) do {} while (0)
688 #define mtk_wed_device_dma_reset(_dev) do {} while (0)
689 #define mtk_wed_device_setup_tc(_dev, _ndev, _type, _data) do {} while (0)
690-#define mtk_wed_device_start_hw_rro(_dev, _mask) do {} while (0)
691+#define mtk_wed_device_start_hw_rro(_dev, _mask, _reset) do {} while (0)
692 #define mtk_wed_device_rro_rx_ring_setup(_dev, _ring, _regs) -ENODEV
693 #define mtk_wed_device_msdu_pg_rx_ring_setup(_dev, _ring, _regs) -ENODEV
694 #define mtk_wed_device_ind_rx_ring_setup(_dev, _regs) -ENODEV
695--
6962.18.0
697