blob: ea0daa25833b866e5b8fb0fcfcb071d935fb2144 [file] [log] [blame]
developer58aa0682023-09-18 14:02:26 +08001From a77480d9f1e1f2de7d27365a9668daf98184b0e2 Mon Sep 17 00:00:00 2001
developer23f9f0f2023-06-15 13:06:25 +08002From: mtk27745 <rex.lu@mediatek.com>
developer58aa0682023-09-18 14:02:26 +08003Date: Mon, 18 Sep 2023 13:22:44 +0800
4Subject: [PATCH 21/22] mtk wed add wed3 ser support
developer23f9f0f2023-06-15 13:06:25 +08005
6---
7 drivers/net/ethernet/mediatek/mtk_wed.c | 236 +++++++++++++++++--
8 drivers/net/ethernet/mediatek/mtk_wed_regs.h | 73 +++++-
9 include/linux/soc/mediatek/mtk_wed.h | 6 +-
10 3 files changed, 291 insertions(+), 24 deletions(-)
11
12diff --git a/drivers/net/ethernet/mediatek/mtk_wed.c b/drivers/net/ethernet/mediatek/mtk_wed.c
developer58aa0682023-09-18 14:02:26 +080013index 9047cb0..0d101d5 100644
developer23f9f0f2023-06-15 13:06:25 +080014--- a/drivers/net/ethernet/mediatek/mtk_wed.c
15+++ b/drivers/net/ethernet/mediatek/mtk_wed.c
16@@ -99,11 +99,65 @@ mtk_wdma_rx_reset(struct mtk_wed_device *dev)
17 u32 status;
18 u32 mask = MTK_WDMA_GLO_CFG_RX_DMA_BUSY;
19 int busy, i;
20+ u32 value;
21
22 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN);
23 busy = readx_poll_timeout(mtk_wdma_read_reset, dev, status,
24- !(status & mask), 0, 10000);
25+ !(status & mask), 0, 10000);
26
27+ if (dev->hw->version == 3) {
28+ wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
29+ wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
30+ busy = read_poll_timeout(wdma_r32, status,
31+ !(status & MTK_WDMA_PREF_TX_CFG_PREF_BUSY), 0, 10000,
32+ false, dev, MTK_WDMA_PREF_TX_CFG);
33+ busy = read_poll_timeout(wdma_r32, status,
34+ !(status & MTK_WDMA_PREF_RX_CFG_PREF_BUSY), 0, 10000,
35+ false, dev, MTK_WDMA_PREF_RX_CFG);
36+
37+ wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
38+ wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
39+ busy = read_poll_timeout(wdma_r32, status,
40+ !(status & MTK_WDMA_WRBK_TX_CFG_WRBK_BUSY), 0, 10000,
41+ false, dev, MTK_WDMA_WRBK_TX_CFG);
42+ busy = read_poll_timeout(wdma_r32, status,
43+ !(status & MTK_WDMA_WRBK_RX_CFG_WRBK_BUSY), 0, 10000,
44+ false, dev, MTK_WDMA_WRBK_RX_CFG);
45+
46+ /* Prefetch FIFO */
47+ wdma_w32(dev, MTK_WDMA_PREF_RX_FIFO_CFG,
48+ MTK_WDMA_PREF_RX_FIFO_CFG_RING0_CLEAR |
49+ MTK_WDMA_PREF_RX_FIFO_CFG_RING1_CLEAR);
50+ wdma_clr(dev, MTK_WDMA_PREF_RX_FIFO_CFG,
51+ MTK_WDMA_PREF_RX_FIFO_CFG_RING0_CLEAR |
52+ MTK_WDMA_PREF_RX_FIFO_CFG_RING1_CLEAR);
53+
54+ /* Core FIFO */
55+ value = (MTK_WDMA_XDMA_RX_FIFO_CFG_RX_PAR_FIFO_CLEAR |
56+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_CMD_FIFO_CLEAR |
57+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_DMAD_FIFO_CLEAR |
58+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_ARR_FIFO_CLEAR |
59+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_LEN_FIFO_CLEAR |
60+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_WID_FIFO_CLEAR |
61+ MTK_WDMA_XDMA_RX_FIFO_CFG_RX_BID_FIFO_CLEAR);
62+
63+ wdma_w32(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, value);
64+ wdma_clr(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, value);
65+
66+ /* Writeback FIFO */
67+ wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
68+ wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
69+
70+ wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
71+ wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR);
72+
73+ /* Prefetch ring status */
74+ wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, MTK_WDMA_PREF_SIDX_CFG_RX_RING_CLEAR);
75+ wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, MTK_WDMA_PREF_SIDX_CFG_RX_RING_CLEAR);
76+ /* Writeback ring status */
77+ wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, MTK_WDMA_WRBK_SIDX_CFG_RX_RING_CLEAR);
78+ wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, MTK_WDMA_WRBK_SIDX_CFG_RX_RING_CLEAR);
79+ }
80 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX);
81 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
82
83@@ -121,13 +175,62 @@ mtk_wdma_tx_reset(struct mtk_wed_device *dev)
84 {
85 u32 status;
86 u32 mask = MTK_WDMA_GLO_CFG_TX_DMA_BUSY;
87- int i;
88+ int busy, i;
89+ u32 value;
90
91 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN);
92 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status,
93 !(status & mask), 0, 10000))
94 WARN_ON_ONCE(1);
95
96+ if (dev->hw->version == 3) {
97+ wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN);
98+ wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
99+ busy = read_poll_timeout(wdma_r32, status,
100+ !(status & MTK_WDMA_PREF_TX_CFG_PREF_BUSY), 0, 10000,
101+ false, dev, MTK_WDMA_PREF_TX_CFG);
102+ busy = read_poll_timeout(wdma_r32, status,
103+ !(status & MTK_WDMA_PREF_RX_CFG_PREF_BUSY), 0, 10000,
104+ false, dev, MTK_WDMA_PREF_RX_CFG);
105+
106+ wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN);
107+ wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
108+ busy = read_poll_timeout(wdma_r32, status,
109+ !(status & MTK_WDMA_WRBK_TX_CFG_WRBK_BUSY), 0, 10000,
110+ false, dev, MTK_WDMA_WRBK_TX_CFG);
111+ busy = read_poll_timeout(wdma_r32, status,
112+ !(status & MTK_WDMA_WRBK_RX_CFG_WRBK_BUSY), 0, 10000,
113+ false, dev, MTK_WDMA_WRBK_RX_CFG);
114+
115+ /* Prefetch FIFO */
116+ wdma_w32(dev, MTK_WDMA_PREF_TX_FIFO_CFG,
117+ MTK_WDMA_PREF_TX_FIFO_CFG_RING0_CLEAR |
118+ MTK_WDMA_PREF_TX_FIFO_CFG_RING1_CLEAR);
119+ wdma_clr(dev, MTK_WDMA_PREF_TX_FIFO_CFG,
120+ MTK_WDMA_PREF_TX_FIFO_CFG_RING0_CLEAR |
121+ MTK_WDMA_PREF_TX_FIFO_CFG_RING1_CLEAR);
122+ /* Core FIFO */
123+ value = (MTK_WDMA_XDMA_TX_FIFO_CFG_TX_PAR_FIFO_CLEAR |
124+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_CMD_FIFO_CLEAR |
125+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_DMAD_FIFO_CLEAR |
126+ MTK_WDMA_XDMA_TX_FIFO_CFG_TX_ARR_FIFO_CLEAR);
127+
128+ wdma_w32(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, value);
129+ wdma_clr(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, value);
130+ /* Writeback FIFO */
131+ wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
132+ wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
133+
134+ wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
135+ wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR);
136+
137+ /* Prefetch ring status */
138+ wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, MTK_WDMA_PREF_SIDX_CFG_TX_RING_CLEAR);
139+ wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, MTK_WDMA_PREF_SIDX_CFG_TX_RING_CLEAR);
140+ /* Writeback ring status */
141+ wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, MTK_WDMA_WRBK_SIDX_CFG_TX_RING_CLEAR);
142+ wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, MTK_WDMA_WRBK_SIDX_CFG_TX_RING_CLEAR);
143+ }
144 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX);
145 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0);
146 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++)
147@@ -903,7 +1006,7 @@ mtk_wed_dma_enable(struct mtk_wed_device *dev)
148 MTK_WED_WPDMA_GLO_CFG_RX_DRV_UNS_VER_FORCE_4);
149
150 wdma_set(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN);
151- //wdma_w32(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
152+ wdma_set(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN);
153 if (mtk_wed_get_rx_capa(dev)) {
154 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
155 MTK_WED_WPDMA_RX_D_PREF_EN |
developer58aa0682023-09-18 14:02:26 +0800156@@ -1466,13 +1569,30 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800157 mtk_wed_mcu_send_msg(wo, MODULE_ID_WO, MTK_WED_WO_CMD_CHANGE_STATE,
158 &state, sizeof(state), true);
159
160+ if (dev->wlan.hwrro) {
161+ wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN);
162+ mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_HW_STS,
163+ MTK_WED_RX_IND_CMD_BUSY);
164+ mtk_wed_reset(dev, MTK_WED_RESET_RRO_RX_TO_PG);
165+ }
166 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN);
167 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG,
168 MTK_WED_WPDMA_RX_D_RX_DRV_BUSY);
169+ if (dev->hw->version == 3)
170+ busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
171+ MTK_WED_WPDMA_RX_D_PREF_BUSY);
172 if (busy) {
173 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
174 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV);
175 } else {
176+ if (dev->hw->version == 3) {
177+ /*1.a. Disable Prefetch HW*/
178+ wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, MTK_WED_WPDMA_RX_D_PREF_EN);
179+ mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG,
180+ MTK_WED_WPDMA_RX_D_PREF_BUSY);
181+ wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
182+ MTK_WED_WPDMA_RX_D_RST_DRV_IDX_ALL);
183+ }
184 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX,
185 MTK_WED_WPDMA_RX_D_RST_CRX_IDX |
186 MTK_WED_WPDMA_RX_D_RST_DRV_IDX);
developer58aa0682023-09-18 14:02:26 +0800187@@ -1500,6 +1620,24 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800188 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0);
189 }
190
191+ if (dev->wlan.hwrro) {
192+ /* Disable RRO MSDU Page Drv */
193+ wed_clr(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, MTK_WED_RRO_MSDU_PG_DRV_EN);
194+
195+ /* Disable RRO Data Drv */
196+ wed_clr(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN);
197+
198+ /* RRO MSDU Page Drv Reset */
199+ wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, MTK_WED_RRO_MSDU_PG_DRV_CLR);
200+ mtk_wed_poll_busy(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG,
201+ MTK_WED_RRO_MSDU_PG_DRV_CLR);
202+
203+ /* RRO Data Drv Reset */
204+ wed_w32(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_CLR);
205+ mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_D_CFG(2),
206+ MTK_WED_RRO_RX_D_DRV_CLR);
207+ }
208+
209 /* reset route qm */
210 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN);
211 busy = mtk_wed_poll_busy(dev, MTK_WED_CTRL,
developer58aa0682023-09-18 14:02:26 +0800212@@ -1507,8 +1645,13 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800213 if (busy) {
214 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
215 } else {
216- wed_set(dev, MTK_WED_RTQM_GLO_CFG,
217- MTK_WED_RTQM_Q_RST);
218+ if (dev->hw->version == 3) {
219+ wed_set(dev, MTK_WED_RTQM_RST, BIT(0));
220+ wed_clr(dev, MTK_WED_RTQM_RST, BIT(0));
221+ mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM);
222+ } else
223+ wed_set(dev, MTK_WED_RTQM_GLO_CFG,
224+ MTK_WED_RTQM_Q_RST);
225 }
226
227 /* reset tx wdma */
developer58aa0682023-09-18 14:02:26 +0800228@@ -1516,8 +1659,13 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800229
230 /* reset tx wdma drv */
231 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN);
232- mtk_wed_poll_busy(dev, MTK_WED_CTRL,
233- MTK_WED_CTRL_WDMA_INT_AGENT_BUSY);
234+ if (dev->hw->version == 3)
235+ mtk_wed_poll_busy(dev, MTK_WED_WPDMA_STATUS,
236+ MTK_WED_WPDMA_STATUS_TX_DRV);
237+ else
238+ mtk_wed_poll_busy(dev, MTK_WED_CTRL,
239+ MTK_WED_CTRL_WDMA_INT_AGENT_BUSY);
240+
241 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV);
242
243 /* reset wed rx dma */
developer58aa0682023-09-18 14:02:26 +0800244@@ -1535,9 +1683,17 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800245 /* reset rx bm */
246 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN);
247 mtk_wed_poll_busy(dev, MTK_WED_CTRL,
248- MTK_WED_CTRL_WED_RX_BM_BUSY);
249+ MTK_WED_CTRL_WED_RX_BM_BUSY);
250 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM);
251
252+ if (dev->wlan.hwrro) {
253+ wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN);
254+ mtk_wed_poll_busy(dev, MTK_WED_CTRL,
255+ MTK_WED_CTRL_WED_RX_PG_BM_BUSY);
256+ wed_set(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM);
257+ wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM);
258+ }
259+
260 /* wo change to enable state */
261 state = WO_STATE_ENABLE;
262 mtk_wed_mcu_send_msg(wo, MODULE_ID_WO, MTK_WED_WO_CMD_CHANGE_STATE,
developer58aa0682023-09-18 14:02:26 +0800263@@ -1554,6 +1710,9 @@ mtk_wed_rx_reset(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800264 }
265
266 mtk_wed_free_rx_buffer(dev);
267+
268+ if (dev->wlan.hwrro)
269+ mtk_wed_rx_page_free_buffer(dev);
270 }
271
272
developer58aa0682023-09-18 14:02:26 +0800273@@ -1587,18 +1746,40 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800274
275 /* 2. Reset WDMA Rx DMA/Driver_Engine */
276 busy = !!mtk_wdma_rx_reset(dev);
277+ if (dev->hw->version == 3) {
278+ val = wed_r32(dev, MTK_WED_WDMA_GLO_CFG);
279+ val |= MTK_WED_WDMA_GLO_CFG_RX_DIS_FSM_AUTO_IDLE;
280+ val &= ~MTK_WED_WDMA_GLO_CFG_RX_DRV_EN;
281+ wed_w32(dev, MTK_WED_WDMA_GLO_CFG, val);
282+ } else
283+ wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
284
285- wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN);
286 busy = !!(busy ||
287 mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG,
288- MTK_WED_WDMA_GLO_CFG_RX_DRV_BUSY));
289+ MTK_WED_WDMA_GLO_CFG_RX_DRV_BUSY));
290+ if (dev->hw->version == 3)
291+ busy = !!(busy ||
292+ mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG,
293+ MTK_WED_WDMA_RX_PREF_BUSY));
294
295 if (busy) {
296 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT);
297 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV);
298 } else {
299+ if (dev->hw->version == 3) {
300+ /*1.a. Disable Prefetch HW*/
301+ wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, MTK_WED_WDMA_RX_PREF_EN);
302+ mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG,
303+ MTK_WED_WDMA_RX_PREF_BUSY);
304+ wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, MTK_WED_WDMA_RX_PREF_DDONE2_EN);
305+
306+ /*2. Reset dma index*/
307+ wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
308+ MTK_WED_WDMA_RESET_IDX_RX_ALL);
309+ }
310 wed_w32(dev, MTK_WED_WDMA_RESET_IDX,
311- MTK_WED_WDMA_RESET_IDX_RX | MTK_WED_WDMA_RESET_IDX_DRV);
312+ MTK_WED_WDMA_RESET_IDX_RX |
313+ MTK_WED_WDMA_RESET_IDX_DRV);
314 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0);
315
316 wed_set(dev, MTK_WED_WDMA_GLO_CFG,
developer58aa0682023-09-18 14:02:26 +0800317@@ -1613,9 +1794,15 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800318 MTK_WED_CTRL_WED_TX_FREE_AGENT_EN);
319
320 for (i = 0; i < 100; i++) {
321- val = wed_r32(dev, MTK_WED_TX_BM_INTF);
322- if (FIELD_GET(MTK_WED_TX_BM_INTF_TKFIFO_FDEP, val) == 0x40)
323- break;
324+ if (dev->ver > MTK_WED_V1) {
325+ val = wed_r32(dev, MTK_WED_TX_TKID_INTF);
326+ if (FIELD_GET(MTK_WED_TX_TKID_INTF_TKFIFO_FDEP, val) == 0x40)
327+ break;
328+ } else {
329+ val = wed_r32(dev, MTK_WED_TX_BM_INTF);
330+ if (FIELD_GET(MTK_WED_TX_BM_INTF_TKFIFO_FDEP, val) == 0x40)
331+ break;
332+ }
333 }
334 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT);
335
developer58aa0682023-09-18 14:02:26 +0800336@@ -1624,18 +1811,20 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800337
338 /* 4. Reset WED WPDMA Tx Driver Engine */
339 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG,
340- MTK_WED_WPDMA_GLO_CFG_TX_DRV_BUSY);
341+ MTK_WED_WPDMA_GLO_CFG_TX_DRV_BUSY);
342 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG,
343 MTK_WED_WPDMA_GLO_CFG_TX_DRV_EN |
344 MTK_WED_WPDMA_GLO_CFG_RX_DRV_EN);
345
346 busy = !!(busy ||
347 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG,
348- MTK_WED_WPDMA_GLO_CFG_RX_DRV_BUSY));
349+ MTK_WED_WPDMA_GLO_CFG_RX_DRV_BUSY));
350 if (busy) {
351 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT);
352 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV);
353 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV);
354+ if (dev->hw->version == 3)
355+ wed_w32(dev, MTK_WED_RX1_CTRL2, 0);
356 } else {
357 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX,
358 MTK_WED_WPDMA_RESET_IDX_TX |
developer58aa0682023-09-18 14:02:26 +0800359@@ -1648,7 +1837,13 @@ mtk_wed_reset_dma(struct mtk_wed_device *dev)
developer23f9f0f2023-06-15 13:06:25 +0800360 }
361 }
362
363- if (dev->ver > MTK_WED_V1) {
364+ if (dev->hw->version == 3) {
365+ /*reset wed pao*/
366+ wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_PAO_EN);
367+ mtk_wed_reset(dev, MTK_WED_RESET_TX_PAO);
368+ }
369+
370+ if (mtk_wed_get_rx_capa(dev)) {
371 dev->init_done = false;
372 mtk_wed_rx_reset(dev);
373 }
developer58aa0682023-09-18 14:02:26 +0800374@@ -1863,7 +2058,7 @@ mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb,
developer23f9f0f2023-06-15 13:06:25 +0800375 }
376
377 static void
378-mtk_wed_start_hwrro(struct mtk_wed_device *dev, u32 irq_mask)
379+mtk_wed_start_hwrro(struct mtk_wed_device *dev, u32 irq_mask, bool reset)
380 {
381 int idx, ret;
382
developer58aa0682023-09-18 14:02:26 +0800383@@ -1873,6 +2068,11 @@ mtk_wed_start_hwrro(struct mtk_wed_device *dev, u32 irq_mask)
developer23f9f0f2023-06-15 13:06:25 +0800384 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hwrro)
385 return;
386
387+ if (reset) {
388+ wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, MTK_WED_RRO_MSDU_PG_DRV_EN);
389+ return;
390+ }
391+
392 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_MSDU_PG_DRV_CLR);
393 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, MTK_WED_RRO_MSDU_PG_DRV_CLR);
394
395diff --git a/drivers/net/ethernet/mediatek/mtk_wed_regs.h b/drivers/net/ethernet/mediatek/mtk_wed_regs.h
396index 25be547..4379dc4 100644
397--- a/drivers/net/ethernet/mediatek/mtk_wed_regs.h
398+++ b/drivers/net/ethernet/mediatek/mtk_wed_regs.h
399@@ -42,6 +42,8 @@ struct mtk_wdma_desc {
400 #define MTK_WED_RESET 0x008
401 #define MTK_WED_RESET_TX_BM BIT(0)
402 #define MTK_WED_RESET_RX_BM BIT(1)
403+#define MTK_WED_RESET_RX_PG_BM BIT(2)
404+#define MTK_WED_RESET_RRO_RX_TO_PG BIT(3)
405 #define MTK_WED_RESET_TX_FREE_AGENT BIT(4)
406 #define MTK_WED_RESET_WPDMA_TX_DRV BIT(8)
407 #define MTK_WED_RESET_WPDMA_RX_DRV BIT(9)
408@@ -64,7 +66,7 @@ struct mtk_wdma_desc {
409 #define MTK_WED_CTRL_WDMA_INT_AGENT_BUSY BIT(3)
410 #define MTK_WED_CTRL_WED_RX_IND_CMD_EN BIT(5)
411 #define MTK_WED_CTRL_WED_RX_PG_BM_EN BIT(6)
412-#define MTK_WED_CTRL_WED_RX_PG_BM_BUSU BIT(7)
413+#define MTK_WED_CTRL_WED_RX_PG_BM_BUSY BIT(7)
414 #define MTK_WED_CTRL_WED_TX_BM_EN BIT(8)
415 #define MTK_WED_CTRL_WED_TX_BM_BUSY BIT(9)
416 #define MTK_WED_CTRL_WED_TX_FREE_AGENT_EN BIT(10)
417@@ -123,6 +125,10 @@ struct mtk_wdma_desc {
418 #define MTK_WED_STATUS 0x060
419 #define MTK_WED_STATUS_TX GENMASK(15, 8)
420
421+#define MTK_WED_WPDMA_STATUS 0x068
422+#define MTK_WED_WPDMA_STATUS_TX_DRV GENMASK(15, 8)
423+
424+
425 #define MTK_WED_TX_BM_CTRL 0x080
426 #define MTK_WED_TX_BM_CTRL_VLD_GRP_NUM GENMASK(6, 0)
427 #define MTK_WED_TX_BM_CTRL_RSV_GRP_NUM GENMASK(22, 16)
428@@ -167,6 +173,9 @@ struct mtk_wdma_desc {
429
430 #define MTK_WED_TX_TKID_CTRL_PAUSE BIT(28)
431
432+#define MTK_WED_TX_TKID_INTF 0x0dc
433+#define MTK_WED_TX_TKID_INTF_TKFIFO_FDEP GENMASK(25, 16)
434+
435 #define MTK_WED_TX_TKID_DYN_THR 0x0e0
436 #define MTK_WED_TX_TKID_DYN_THR_LO GENMASK(6, 0)
437 #define MTK_WED_TX_TKID_DYN_THR_HI GENMASK(22, 16)
438@@ -203,10 +212,11 @@ struct mtk_wdma_desc {
439 #define MTK_WED_GLO_CFG_RX_2B_OFFSET BIT(31)
440
441 #define MTK_WED_RESET_IDX 0x20c
442-#define MTK_WED_RESET_IDX_TX GENMASK(3, 0)
443-#if defined(CONFIG_MEDIATEK_NETSYS_V2)
444+#if defined(CONFIG_MEDIATEK_NETSYS_V2) || defined(CONFIG_MEDIATEK_NETSYS_V3)
445+#define MTK_WED_RESET_IDX_TX GENMASK(1, 0)
446 #define MTK_WED_RESET_IDX_RX GENMASK(7, 6)
447 #else
448+#define MTK_WED_RESET_IDX_TX GENMASK(3, 0)
449 #define MTK_WED_RESET_IDX_RX GENMASK(17, 16)
450 #endif
451 #define MTK_WED_RESET_WPDMA_IDX_RX GENMASK(31, 30)
452@@ -221,6 +231,7 @@ struct mtk_wdma_desc {
453 #define MTK_WED_RING_RX_DATA(_n) (0x420 + (_n) * 0x10)
454
455 #define MTK_WED_SCR0 0x3c0
456+#define MTK_WED_RX1_CTRL2 0x418
457 #define MTK_WED_WPDMA_INT_TRIGGER 0x504
458 #define MTK_WED_WPDMA_INT_TRIGGER_RX_DONE BIT(1)
459 #define MTK_WED_WPDMA_INT_TRIGGER_TX_DONE GENMASK(5, 4)
460@@ -336,6 +347,7 @@ struct mtk_wdma_desc {
461
462 #define MTK_WED_WPDMA_RX_D_RST_IDX 0x760
463 #define MTK_WED_WPDMA_RX_D_RST_CRX_IDX GENMASK(17, 16)
464+#define MTK_WED_WPDMA_RX_D_RST_DRV_IDX_ALL BIT(20)
465 #define MTK_WED_WPDMA_RX_D_RST_DRV_IDX GENMASK(25, 24)
466
467 #define MTK_WED_WPDMA_RX_GLO_CFG 0x76c
468@@ -352,6 +364,7 @@ struct mtk_wdma_desc {
469
470 #define MTK_WED_WPDMA_RX_D_PREF_CFG 0x7b4
471 #define MTK_WED_WPDMA_RX_D_PREF_EN BIT(0)
472+#define MTK_WED_WPDMA_RX_D_PREF_BUSY BIT(1)
473 #define MTK_WED_WPDMA_RX_D_PREF_BURST_SIZE GENMASK(12, 8)
474 #define MTK_WED_WPDMA_RX_D_PREF_LOW_THRES GENMASK(21, 16)
475
476@@ -373,11 +386,13 @@ struct mtk_wdma_desc {
477
478 #define MTK_WED_WDMA_RX_PREF_CFG 0x950
479 #define MTK_WED_WDMA_RX_PREF_EN BIT(0)
480+#define MTK_WED_WDMA_RX_PREF_BUSY BIT(1)
481 #define MTK_WED_WDMA_RX_PREF_BURST_SIZE GENMASK(12, 8)
482 #define MTK_WED_WDMA_RX_PREF_LOW_THRES GENMASK(21, 16)
483 #define MTK_WED_WDMA_RX_PREF_RX0_SIDX_CLR BIT(24)
484 #define MTK_WED_WDMA_RX_PREF_RX1_SIDX_CLR BIT(25)
485 #define MTK_WED_WDMA_RX_PREF_DDONE2_EN BIT(26)
486+#define MTK_WED_WDMA_RX_PREF_DDONE2_BUSY BIT(27)
487
488 #define MTK_WED_WDMA_RX_PREF_FIFO_CFG 0x95C
489 #define MTK_WED_WDMA_RX_PREF_FIFO_RX0_CLR BIT(0)
490@@ -406,6 +421,7 @@ struct mtk_wdma_desc {
491
492 #define MTK_WED_WDMA_RESET_IDX 0xa08
493 #define MTK_WED_WDMA_RESET_IDX_RX GENMASK(17, 16)
494+#define MTK_WED_WDMA_RESET_IDX_RX_ALL BIT(20)
495 #define MTK_WED_WDMA_RESET_IDX_DRV GENMASK(25, 24)
496
497 #define MTK_WED_WDMA_INT_CLR 0xa24
498@@ -474,21 +490,66 @@ struct mtk_wdma_desc {
499 #define MTK_WDMA_INT_MASK_RX_DELAY BIT(30)
500 #define MTK_WDMA_INT_MASK_RX_COHERENT BIT(31)
501
502+#define MTK_WDMA_XDMA_TX_FIFO_CFG 0x238
503+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_PAR_FIFO_CLEAR BIT(0)
504+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_CMD_FIFO_CLEAR BIT(4)
505+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_DMAD_FIFO_CLEAR BIT(8)
506+#define MTK_WDMA_XDMA_TX_FIFO_CFG_TX_ARR_FIFO_CLEAR BIT(12)
507+
508+#define MTK_WDMA_XDMA_RX_FIFO_CFG 0x23c
509+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_PAR_FIFO_CLEAR BIT(0)
510+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_CMD_FIFO_CLEAR BIT(4)
511+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_DMAD_FIFO_CLEAR BIT(8)
512+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_ARR_FIFO_CLEAR BIT(12)
513+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_LEN_FIFO_CLEAR BIT(15)
514+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_WID_FIFO_CLEAR BIT(18)
515+#define MTK_WDMA_XDMA_RX_FIFO_CFG_RX_BID_FIFO_CLEAR BIT(21)
516+
517+
518+
519 #define MTK_WDMA_INT_GRP1 0x250
520 #define MTK_WDMA_INT_GRP2 0x254
521
522 #define MTK_WDMA_PREF_TX_CFG 0x2d0
523 #define MTK_WDMA_PREF_TX_CFG_PREF_EN BIT(0)
524+#define MTK_WDMA_PREF_TX_CFG_PREF_BUSY BIT(1)
525
526 #define MTK_WDMA_PREF_RX_CFG 0x2dc
527 #define MTK_WDMA_PREF_RX_CFG_PREF_EN BIT(0)
528+#define MTK_WDMA_PREF_RX_CFG_PREF_BUSY BIT(1)
529+
530+#define MTK_WDMA_PREF_RX_FIFO_CFG 0x2e0
531+#define MTK_WDMA_PREF_RX_FIFO_CFG_RING0_CLEAR BIT(0)
532+#define MTK_WDMA_PREF_RX_FIFO_CFG_RING1_CLEAR BIT(16)
533+
534+#define MTK_WDMA_PREF_TX_FIFO_CFG 0x2d4
535+#define MTK_WDMA_PREF_TX_FIFO_CFG_RING0_CLEAR BIT(0)
536+#define MTK_WDMA_PREF_TX_FIFO_CFG_RING1_CLEAR BIT(16)
537+
538+#define MTK_WDMA_PREF_SIDX_CFG 0x2e4
539+#define MTK_WDMA_PREF_SIDX_CFG_TX_RING_CLEAR GENMASK(3, 0)
540+#define MTK_WDMA_PREF_SIDX_CFG_RX_RING_CLEAR GENMASK(5, 4)
541
542 #define MTK_WDMA_WRBK_TX_CFG 0x300
543+#define MTK_WDMA_WRBK_TX_CFG_WRBK_BUSY BIT(0)
544 #define MTK_WDMA_WRBK_TX_CFG_WRBK_EN BIT(30)
545
546+#define MTK_WDMA_WRBK_TX_FIFO_CFG(_n) (0x304 + (_n) * 0x4)
547+#define MTK_WDMA_WRBK_TX_FIFO_CFG_RING_CLEAR BIT(0)
548+
549+
550 #define MTK_WDMA_WRBK_RX_CFG 0x344
551+#define MTK_WDMA_WRBK_RX_CFG_WRBK_BUSY BIT(0)
552 #define MTK_WDMA_WRBK_RX_CFG_WRBK_EN BIT(30)
553
554+#define MTK_WDMA_WRBK_RX_FIFO_CFG(_n) (0x348 + (_n) * 0x4)
555+#define MTK_WDMA_WRBK_RX_FIFO_CFG_RING_CLEAR BIT(0)
556+
557+
558+#define MTK_WDMA_WRBK_SIDX_CFG 0x388
559+#define MTK_WDMA_WRBK_SIDX_CFG_TX_RING_CLEAR GENMASK(3, 0)
560+#define MTK_WDMA_WRBK_SIDX_CFG_RX_RING_CLEAR GENMASK(5, 4)
561+
562 #define MTK_PCIE_MIRROR_MAP(n) ((n) ? 0x4 : 0x0)
563 #define MTK_PCIE_MIRROR_MAP_EN BIT(0)
564 #define MTK_PCIE_MIRROR_MAP_WED_ID BIT(1)
565@@ -502,6 +563,9 @@ struct mtk_wdma_desc {
566 #define MTK_WED_RTQM_Q_DBG_BYPASS BIT(5)
567 #define MTK_WED_RTQM_TXDMAD_FPORT GENMASK(23, 20)
568
569+#define MTK_WED_RTQM_RST 0xb04
570+
571+
572 #define MTK_WED_RTQM_IGRS0_I2HW_DMAD_CNT 0xb1c
573 #define MTK_WED_RTQM_IGRS0_I2H_DMAD_CNT(_n) (0xb20 + (_n) * 0x4)
574 #define MTK_WED_RTQM_IGRS0_I2HW_PKT_CNT 0xb28
575@@ -691,6 +755,9 @@ struct mtk_wdma_desc {
576 #define MTK_WED_WPDMA_INT_CTRL_RRO_PG2_CLR BIT(17)
577 #define MTK_WED_WPDMA_INT_CTRL_RRO_PG2_DONE_TRIG GENMASK(22, 18)
578
579+#define MTK_WED_RRO_RX_HW_STS 0xf00
580+#define MTK_WED_RX_IND_CMD_BUSY GENMASK(31, 0)
581+
582 #define MTK_WED_RX_IND_CMD_CNT0 0xf20
583 #define MTK_WED_RX_IND_CMD_DBG_CNT_EN BIT(31)
584
585diff --git a/include/linux/soc/mediatek/mtk_wed.h b/include/linux/soc/mediatek/mtk_wed.h
developer58aa0682023-09-18 14:02:26 +0800586index 92df4ba..1438692 100644
developer23f9f0f2023-06-15 13:06:25 +0800587--- a/include/linux/soc/mediatek/mtk_wed.h
588+++ b/include/linux/soc/mediatek/mtk_wed.h
589@@ -240,7 +240,7 @@ struct mtk_wed_ops {
590 void (*irq_set_mask)(struct mtk_wed_device *dev, u32 mask);
591 void (*ppe_check)(struct mtk_wed_device *dev, struct sk_buff *skb,
592 u32 reason, u32 hash);
593- void (*start_hwrro)(struct mtk_wed_device *dev, u32 irq_mask);
594+ void (*start_hwrro)(struct mtk_wed_device *dev, u32 irq_mask, bool reset);
595 };
596
597 extern const struct mtk_wed_ops __rcu *mtk_soc_wed_ops;
598@@ -317,8 +317,8 @@ mtk_wed_device_support_pao(struct mtk_wed_device *dev)
599 (_dev)->ops->reset_dma(_dev)
600 #define mtk_wed_device_ppe_check(_dev, _skb, _reason, _hash) \
601 (_dev)->ops->ppe_check(_dev, _skb, _reason, _hash)
602-#define mtk_wed_device_start_hwrro(_dev, _mask) \
603- (_dev)->ops->start_hwrro(_dev, _mask)
604+#define mtk_wed_device_start_hwrro(_dev, _mask, _reset) \
605+ (_dev)->ops->start_hwrro(_dev, _mask, _reset)
606
607 #else
608 static inline bool mtk_wed_device_active(struct mtk_wed_device *dev)
609--
6102.18.0
611