blob: 88c5f82d152f10399c2a70240b281cfb0fbcdb25 [file] [log] [blame]
developerb11a5392022-03-31 00:34:47 +08001// SPDX-License-Identifier: ISC
2/* Copyright (C) 2020 MediaTek Inc. */
3
4#include "bersa.h"
5#include "mac.h"
6#include "mcu.h"
7#include "testmode.h"
8
9enum {
10 TM_CHANGED_TXPOWER,
11 TM_CHANGED_FREQ_OFFSET,
12
13 /* must be last */
14 NUM_TM_CHANGED
15};
16
17static const u8 tm_change_map[] = {
18 [TM_CHANGED_TXPOWER] = MT76_TM_ATTR_TX_POWER,
19 [TM_CHANGED_FREQ_OFFSET] = MT76_TM_ATTR_FREQ_OFFSET,
20};
21
22struct reg_band {
23 u32 band[2];
24};
25
26#define REG_BAND(_list, _reg) \
27 { _list.band[0] = MT_##_reg(0); \
28 _list.band[1] = MT_##_reg(1); }
29#define REG_BAND_IDX(_list, _reg, _idx) \
30 { _list.band[0] = MT_##_reg(0, _idx); \
31 _list.band[1] = MT_##_reg(1, _idx); }
32
33#define TM_REG_MAX_ID 17
34static struct reg_band reg_backup_list[TM_REG_MAX_ID];
35
36
37static int
38bersa_tm_set_tx_power(struct bersa_phy *phy)
39{
40 struct bersa_dev *dev = phy->dev;
41 struct mt76_phy *mphy = phy->mt76;
42 struct cfg80211_chan_def *chandef = &mphy->chandef;
43 int freq = chandef->center_freq1;
44 int ret;
45 struct {
46 u8 format_id;
47 u8 band;
48 s8 tx_power;
49 u8 ant_idx; /* Only 0 is valid */
50 u8 center_chan;
51 u8 rsv[3];
52 } __packed req = {
53 .format_id = 0xf,
54 .band = phy->band_idx,
55 .center_chan = ieee80211_frequency_to_channel(freq),
56 };
57 u8 *tx_power = NULL;
58
59 if (phy->mt76->test.state != MT76_TM_STATE_OFF)
60 tx_power = phy->mt76->test.tx_power;
61
62 /* Tx power of the other antennas are the same as antenna 0 */
63 if (tx_power && tx_power[0])
64 req.tx_power = tx_power[0];
65
66 ret = mt76_mcu_send_msg(&dev->mt76,
67 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
68 &req, sizeof(req), false);
69
70 return ret;
71}
72
73static int
74bersa_tm_set_freq_offset(struct bersa_phy *phy, bool en, u32 val)
75{
76 struct bersa_dev *dev = phy->dev;
77 struct bersa_tm_cmd req = {
78 .testmode_en = en,
79 .param_idx = MCU_ATE_SET_FREQ_OFFSET,
80 .param.freq.band = phy->band_idx,
81 .param.freq.freq_offset = cpu_to_le32(val),
82 };
83
84 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
85 sizeof(req), false);
86}
87
88static int
89bersa_tm_mode_ctrl(struct bersa_dev *dev, bool enable)
90{
91 struct {
92 u8 format_id;
93 bool enable;
94 u8 rsv[2];
95 } __packed req = {
96 .format_id = 0x6,
97 .enable = enable,
98 };
99
100 return mt76_mcu_send_msg(&dev->mt76,
101 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
102 &req, sizeof(req), false);
103}
104
105static int
106bersa_tm_set_trx(struct bersa_phy *phy, int type, bool en)
107{
108 struct bersa_dev *dev = phy->dev;
109 struct bersa_tm_cmd req = {
110 .testmode_en = 1,
111 .param_idx = MCU_ATE_SET_TRX,
112 .param.trx.type = type,
113 .param.trx.enable = en,
114 .param.trx.band = phy->band_idx,
115 };
116
117 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
118 sizeof(req), false);
119}
120
121static int
122bersa_tm_clean_hwq(struct bersa_phy *phy, u8 wcid)
123{
124 struct bersa_dev *dev = phy->dev;
125 struct bersa_tm_cmd req = {
126 .testmode_en = 1,
127 .param_idx = MCU_ATE_CLEAN_TXQUEUE,
128 .param.clean.wcid = wcid,
129 .param.clean.band = phy->band_idx,
130 };
131
132 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
133 sizeof(req), false);
134}
135
136static int
137bersa_tm_set_slot_time(struct bersa_phy *phy, u8 slot_time, u8 sifs)
138{
139 struct bersa_dev *dev = phy->dev;
140 struct bersa_tm_cmd req = {
141 .testmode_en = !(phy->mt76->test.state == MT76_TM_STATE_OFF),
142 .param_idx = MCU_ATE_SET_SLOT_TIME,
143 .param.slot.slot_time = slot_time,
144 .param.slot.sifs = sifs,
145 .param.slot.rifs = 2,
146 .param.slot.eifs = cpu_to_le16(60),
147 .param.slot.band = phy->band_idx,
148 };
149
150 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
151 sizeof(req), false);
152}
153
154static int
155bersa_tm_set_tam_arb(struct bersa_phy *phy, bool enable, bool mu)
156{
157 struct bersa_dev *dev = phy->dev;
158 u32 op_mode;
159
160 if (!enable)
161 op_mode = TAM_ARB_OP_MODE_NORMAL;
162 else if (mu)
163 op_mode = TAM_ARB_OP_MODE_TEST;
164 else
165 op_mode = TAM_ARB_OP_MODE_FORCE_SU;
166
167 return bersa_mcu_set_muru_ctrl(dev, MURU_SET_ARB_OP_MODE, op_mode);
168}
169
170static int
171bersa_tm_set_wmm_qid(struct bersa_dev *dev, u8 qid, u8 aifs, u8 cw_min,
172 u16 cw_max, u16 txop)
173{
174#if 0
175 struct bersa_mcu_tx req = { .total = 1 };
176 struct edca *e = &req.edca[0];
177
178 e->queue = qid;
179 e->set = WMM_PARAM_SET;
180
181 e->aifs = aifs;
182 e->cw_min = cw_min;
183 e->cw_max = cpu_to_le16(cw_max);
184 e->txop = cpu_to_le16(txop);
185
186 return bersa_mcu_update_edca(dev, &req);
187#endif
188 return 0;
189}
190
191static int
192bersa_tm_set_ipg_params(struct bersa_phy *phy, u32 ipg, u8 mode)
193{
194#define TM_DEFAULT_SIFS 10
195#define TM_MAX_SIFS 127
196#define TM_MAX_AIFSN 0xf
197#define TM_MIN_AIFSN 0x1
198#define BBP_PROC_TIME 1500
199 struct bersa_dev *dev = phy->dev;
200 u8 sig_ext = (mode == MT76_TM_TX_MODE_CCK) ? 0 : 6;
201 u8 slot_time = 9, sifs = TM_DEFAULT_SIFS;
202 u8 aifsn = TM_MIN_AIFSN;
203 u32 i2t_time, tr2t_time, txv_time;
204 u16 cw = 0;
205
206 if (ipg < sig_ext + slot_time + sifs)
207 ipg = 0;
208
209 if (!ipg)
210 goto done;
211
212 ipg -= sig_ext;
213
214 if (ipg <= (TM_MAX_SIFS + slot_time)) {
215 sifs = ipg - slot_time;
216 } else {
217 u32 val = (ipg + slot_time) / slot_time;
218
219 while (val >>= 1)
220 cw++;
221
222 if (cw > 16)
223 cw = 16;
224
225 ipg -= ((1 << cw) - 1) * slot_time;
226
227 aifsn = ipg / slot_time;
228 if (aifsn > TM_MAX_AIFSN)
229 aifsn = TM_MAX_AIFSN;
230
231 ipg -= aifsn * slot_time;
232
233 if (ipg > TM_DEFAULT_SIFS) {
234 if (ipg < TM_MAX_SIFS)
235 sifs = ipg;
236 else
237 sifs = TM_MAX_SIFS;
238 }
239 }
240done:
241 txv_time = mt76_get_field(dev, MT_TMAC_ATCR(phy->band_idx),
242 MT_TMAC_ATCR_TXV_TOUT);
243 txv_time *= 50; /* normal clock time */
244
245 i2t_time = (slot_time * 1000 - txv_time - BBP_PROC_TIME) / 50;
246 tr2t_time = (sifs * 1000 - txv_time - BBP_PROC_TIME) / 50;
247
248 mt76_set(dev, MT_TMAC_TRCR0(phy->band_idx),
249 FIELD_PREP(MT_TMAC_TRCR0_TR2T_CHK, tr2t_time) |
250 FIELD_PREP(MT_TMAC_TRCR0_I2T_CHK, i2t_time));
251
252 bersa_tm_set_slot_time(phy, slot_time, sifs);
253
254 return bersa_tm_set_wmm_qid(dev,
255 mt76_connac_lmac_mapping(IEEE80211_AC_BE),
256 aifsn, cw, cw, 0);
257}
258
259static int
260bersa_tm_set_tx_len(struct bersa_phy *phy, u32 tx_time)
261{
262 struct mt76_phy *mphy = phy->mt76;
263 struct mt76_testmode_data *td = &mphy->test;
264 struct ieee80211_supported_band *sband;
265 struct rate_info rate = {};
266 u16 flags = 0, tx_len;
267 u32 bitrate;
268 int ret;
269
270 if (!tx_time)
271 return 0;
272
273 rate.mcs = td->tx_rate_idx;
274 rate.nss = td->tx_rate_nss;
275
276 switch (td->tx_rate_mode) {
277 case MT76_TM_TX_MODE_CCK:
278 case MT76_TM_TX_MODE_OFDM:
279 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ)
280 sband = &mphy->sband_5g.sband;
281 else
282 sband = &mphy->sband_2g.sband;
283
284 rate.legacy = sband->bitrates[rate.mcs].bitrate;
285 break;
286 case MT76_TM_TX_MODE_HT:
287 rate.mcs += rate.nss * 8;
288 flags |= RATE_INFO_FLAGS_MCS;
289
290 if (td->tx_rate_sgi)
291 flags |= RATE_INFO_FLAGS_SHORT_GI;
292 break;
293 case MT76_TM_TX_MODE_VHT:
294 flags |= RATE_INFO_FLAGS_VHT_MCS;
295
296 if (td->tx_rate_sgi)
297 flags |= RATE_INFO_FLAGS_SHORT_GI;
298 break;
299 case MT76_TM_TX_MODE_HE_SU:
300 case MT76_TM_TX_MODE_HE_EXT_SU:
301 case MT76_TM_TX_MODE_HE_TB:
302 case MT76_TM_TX_MODE_HE_MU:
303 rate.he_gi = td->tx_rate_sgi;
304 flags |= RATE_INFO_FLAGS_HE_MCS;
305 break;
306 default:
307 break;
308 }
309 rate.flags = flags;
310
311 switch (mphy->chandef.width) {
312 case NL80211_CHAN_WIDTH_160:
313 case NL80211_CHAN_WIDTH_80P80:
314 rate.bw = RATE_INFO_BW_160;
315 break;
316 case NL80211_CHAN_WIDTH_80:
317 rate.bw = RATE_INFO_BW_80;
318 break;
319 case NL80211_CHAN_WIDTH_40:
320 rate.bw = RATE_INFO_BW_40;
321 break;
322 default:
323 rate.bw = RATE_INFO_BW_20;
324 break;
325 }
326
327 bitrate = cfg80211_calculate_bitrate(&rate);
328 tx_len = bitrate * tx_time / 10 / 8;
329
330 ret = mt76_testmode_alloc_skb(phy->mt76, tx_len);
331 if (ret)
332 return ret;
333
334 return 0;
335}
336
337static void
338bersa_tm_reg_backup_restore(struct bersa_phy *phy)
339{
340 int n_regs = ARRAY_SIZE(reg_backup_list);
341 struct bersa_dev *dev = phy->dev;
342 u32 *b = phy->test.reg_backup;
343 int i;
344
345 REG_BAND_IDX(reg_backup_list[0], AGG_PCR0, 0);
346 REG_BAND_IDX(reg_backup_list[1], AGG_PCR0, 1);
347 REG_BAND_IDX(reg_backup_list[2], AGG_AWSCR0, 0);
348 REG_BAND_IDX(reg_backup_list[3], AGG_AWSCR0, 1);
349 REG_BAND_IDX(reg_backup_list[4], AGG_AWSCR0, 2);
350 REG_BAND_IDX(reg_backup_list[5], AGG_AWSCR0, 3);
351 REG_BAND(reg_backup_list[6], AGG_MRCR);
352 REG_BAND(reg_backup_list[7], TMAC_TFCR0);
353 REG_BAND(reg_backup_list[8], TMAC_TCR0);
354 REG_BAND(reg_backup_list[9], AGG_ATCR1);
355 REG_BAND(reg_backup_list[10], AGG_ATCR3);
356 REG_BAND(reg_backup_list[11], TMAC_TRCR0);
357 REG_BAND(reg_backup_list[12], TMAC_ICR0);
358 REG_BAND_IDX(reg_backup_list[13], ARB_DRNGR0, 0);
359 REG_BAND_IDX(reg_backup_list[14], ARB_DRNGR0, 1);
360 REG_BAND(reg_backup_list[15], WF_RFCR);
361 REG_BAND(reg_backup_list[16], WF_RFCR1);
362
363 if (phy->mt76->test.state == MT76_TM_STATE_OFF) {
364 for (i = 0; i < n_regs; i++)
365 mt76_wr(dev, reg_backup_list[i].band[phy->band_idx], b[i]);
366 return;
367 }
368
369 if (!b) {
370 b = devm_kzalloc(dev->mt76.dev, 4 * n_regs, GFP_KERNEL);
371 if (!b)
372 return;
373
374 phy->test.reg_backup = b;
375 for (i = 0; i < n_regs; i++)
376 b[i] = mt76_rr(dev, reg_backup_list[i].band[phy->band_idx]);
377 }
378
379 mt76_clear(dev, MT_AGG_PCR0(phy->band_idx, 0), MT_AGG_PCR0_MM_PROT |
380 MT_AGG_PCR0_GF_PROT | MT_AGG_PCR0_ERP_PROT |
381 MT_AGG_PCR0_VHT_PROT | MT_AGG_PCR0_BW20_PROT |
382 MT_AGG_PCR0_BW40_PROT | MT_AGG_PCR0_BW80_PROT);
383 mt76_set(dev, MT_AGG_PCR0(phy->band_idx, 0), MT_AGG_PCR0_PTA_WIN_DIS);
384
385 mt76_wr(dev, MT_AGG_PCR0(phy->band_idx, 1), MT_AGG_PCR1_RTS0_NUM_THRES |
386 MT_AGG_PCR1_RTS0_LEN_THRES);
387
388 mt76_clear(dev, MT_AGG_MRCR(phy->band_idx), MT_AGG_MRCR_BAR_CNT_LIMIT |
389 MT_AGG_MRCR_LAST_RTS_CTS_RN | MT_AGG_MRCR_RTS_FAIL_LIMIT |
390 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT);
391
392 mt76_rmw(dev, MT_AGG_MRCR(phy->band_idx), MT_AGG_MRCR_RTS_FAIL_LIMIT |
393 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT,
394 FIELD_PREP(MT_AGG_MRCR_RTS_FAIL_LIMIT, 1) |
395 FIELD_PREP(MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT, 1));
396
397 mt76_wr(dev, MT_TMAC_TFCR0(phy->band_idx), 0);
398 mt76_clear(dev, MT_TMAC_TCR0(phy->band_idx), MT_TMAC_TCR0_TBTT_STOP_CTRL);
399
400 /* config rx filter for testmode rx */
401 mt76_wr(dev, MT_WF_RFCR(phy->band_idx), 0xcf70a);
402 mt76_wr(dev, MT_WF_RFCR1(phy->band_idx), 0);
403}
404
405static void
406bersa_tm_init(struct bersa_phy *phy, bool en)
407{
408 struct bersa_dev *dev = phy->dev;
409
410 if (!test_bit(MT76_STATE_RUNNING, &phy->mt76->state))
411 return;
412
413 /* TODO: need check */
414 /* bersa_mcu_set_sku_en(phy, !en); */
415
416 bersa_tm_mode_ctrl(dev, en);
417 bersa_tm_reg_backup_restore(phy);
418 bersa_tm_set_trx(phy, TM_MAC_TXRX, !en);
419
420 bersa_mcu_add_bss_info(phy, phy->monitor_vif, en);
421 bersa_mcu_add_sta(dev, phy->monitor_vif, NULL, en);
422
423 if (!en)
424 bersa_tm_set_tam_arb(phy, en, 0);
425}
426
427static void
428bersa_tm_update_channel(struct bersa_phy *phy)
429{
430 mutex_unlock(&phy->dev->mt76.mutex);
431 bersa_set_channel(phy);
432 mutex_lock(&phy->dev->mt76.mutex);
433
434 bersa_mcu_set_chan_info(phy, UNI_CHANNEL_RX_PATH);
435}
436
437static void
438bersa_tm_set_tx_frames(struct bersa_phy *phy, bool en)
439{
440 static const u8 spe_idx_map[] = {0, 0, 1, 0, 3, 2, 4, 0,
441 9, 8, 6, 10, 16, 12, 18, 0};
442 struct mt76_testmode_data *td = &phy->mt76->test;
443 struct bersa_dev *dev = phy->dev;
444 struct ieee80211_tx_info *info;
445 u8 duty_cycle = td->tx_duty_cycle;
446 u32 tx_time = td->tx_time;
447 u32 ipg = td->tx_ipg;
448 u8 phy_idx = bersa_get_phy_id(phy);
449 u16 chainshift;
450
451 bersa_tm_set_trx(phy, TM_MAC_RX_RXV, false);
452 bersa_tm_clean_hwq(phy, dev->mt76.global_wcid.idx);
453
454 if (en) {
455 bersa_tm_update_channel(phy);
456
457 if (td->tx_spe_idx) {
458 phy->test.spe_idx = td->tx_spe_idx;
459 } else {
460 u8 tx_ant = td->tx_antenna_mask;
461
462 if (phy_idx == MT_EXT_PHY)
463 chainshift = dev->chain_shift_ext;
464 else if (phy_idx == MT_TRI_PHY)
465 chainshift = dev->chain_shift_tri;
466 else
467 chainshift = 0;
468
469 tx_ant >>= chainshift;
470 phy->test.spe_idx = spe_idx_map[tx_ant];
471 }
472 }
473
474 bersa_tm_set_tam_arb(phy, en,
475 td->tx_rate_mode == MT76_TM_TX_MODE_HE_MU);
476
477 /* if all three params are set, duty_cycle will be ignored */
478 if (duty_cycle && tx_time && !ipg) {
479 ipg = tx_time * 100 / duty_cycle - tx_time;
480 } else if (duty_cycle && !tx_time && ipg) {
481 if (duty_cycle < 100)
482 tx_time = duty_cycle * ipg / (100 - duty_cycle);
483 }
484
485 bersa_tm_set_ipg_params(phy, ipg, td->tx_rate_mode);
486 bersa_tm_set_tx_len(phy, tx_time);
487
488 if (ipg)
489 td->tx_queued_limit = MT76_TM_TIMEOUT * 1000000 / ipg / 2;
490
491 if (!en || !td->tx_skb)
492 return;
493
494 info = IEEE80211_SKB_CB(td->tx_skb);
495 info->control.vif = phy->monitor_vif;
496
497 bersa_tm_set_trx(phy, TM_MAC_TX, en);
498}
499
500static void
501bersa_tm_set_rx_frames(struct bersa_phy *phy, bool en)
502{
503 bersa_tm_set_trx(phy, TM_MAC_RX_RXV, false);
504
505 if (en) {
506 struct bersa_dev *dev = phy->dev;
507
508 bersa_tm_update_channel(phy);
509
510 /* read-clear */
511 mt76_rr(dev, MT_MIB_SDR3(phy->band_idx));
512 bersa_tm_set_trx(phy, TM_MAC_RX_RXV, en);
513 }
514}
515
516static int
517bersa_tm_rf_switch_mode(struct bersa_dev *dev, u32 oper)
518{
519 struct bersa_tm_rf_test req = {
520 .op.op_mode = cpu_to_le32(oper),
521 };
522
523 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
524 sizeof(req), true);
525}
526
527static int
528bersa_tm_set_tx_cont(struct bersa_phy *phy, bool en)
529{
530#define TX_CONT_START 0x05
531#define TX_CONT_STOP 0x06
532 struct bersa_dev *dev = phy->dev;
533 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
534 int freq1 = ieee80211_frequency_to_channel(chandef->center_freq1);
535 struct mt76_testmode_data *td = &phy->mt76->test;
536 u32 func_idx = en ? TX_CONT_START : TX_CONT_STOP;
537 u8 rate_idx = td->tx_rate_idx, mode;
538 u16 rateval;
539 struct bersa_tm_rf_test req = {
540 .action = 1,
541 .icap_len = 120,
542 .op.rf.func_idx = cpu_to_le32(func_idx),
543 };
544 struct tm_tx_cont *tx_cont = &req.op.rf.param.tx_cont;
545
546 tx_cont->control_ch = chandef->chan->hw_value;
547 tx_cont->center_ch = freq1;
548 tx_cont->tx_ant = td->tx_antenna_mask;
549 tx_cont->band = phy->band_idx;
550
551 switch (chandef->width) {
552 case NL80211_CHAN_WIDTH_40:
553 tx_cont->bw = CMD_CBW_40MHZ;
554 break;
555 case NL80211_CHAN_WIDTH_80:
556 tx_cont->bw = CMD_CBW_80MHZ;
557 break;
558 case NL80211_CHAN_WIDTH_80P80:
559 tx_cont->bw = CMD_CBW_8080MHZ;
560 break;
561 case NL80211_CHAN_WIDTH_160:
562 tx_cont->bw = CMD_CBW_160MHZ;
563 break;
564 case NL80211_CHAN_WIDTH_5:
565 tx_cont->bw = CMD_CBW_5MHZ;
566 break;
567 case NL80211_CHAN_WIDTH_10:
568 tx_cont->bw = CMD_CBW_10MHZ;
569 break;
570 case NL80211_CHAN_WIDTH_20:
571 tx_cont->bw = CMD_CBW_20MHZ;
572 break;
573 case NL80211_CHAN_WIDTH_20_NOHT:
574 tx_cont->bw = CMD_CBW_20MHZ;
575 break;
576 default:
577 return -EINVAL;
578 }
579
580 if (!en) {
581 req.op.rf.param.func_data = cpu_to_le32(phy->band_idx);
582 goto out;
583 }
584
585 if (td->tx_rate_mode <= MT76_TM_TX_MODE_OFDM) {
586 struct ieee80211_supported_band *sband;
587 u8 idx = rate_idx;
588
589 if (chandef->chan->band == NL80211_BAND_5GHZ)
590 sband = &phy->mt76->sband_5g.sband;
591 else
592 sband = &phy->mt76->sband_2g.sband;
593
594 if (td->tx_rate_mode == MT76_TM_TX_MODE_OFDM)
595 idx += 4;
596 rate_idx = sband->bitrates[idx].hw_value & 0xff;
597 }
598
599 switch (td->tx_rate_mode) {
600 case MT76_TM_TX_MODE_CCK:
601 mode = MT_PHY_TYPE_CCK;
602 break;
603 case MT76_TM_TX_MODE_OFDM:
604 mode = MT_PHY_TYPE_OFDM;
605 break;
606 case MT76_TM_TX_MODE_HT:
607 mode = MT_PHY_TYPE_HT;
608 break;
609 case MT76_TM_TX_MODE_VHT:
610 mode = MT_PHY_TYPE_VHT;
611 break;
612 case MT76_TM_TX_MODE_HE_SU:
613 mode = MT_PHY_TYPE_HE_SU;
614 break;
615 case MT76_TM_TX_MODE_HE_EXT_SU:
616 mode = MT_PHY_TYPE_HE_EXT_SU;
617 break;
618 case MT76_TM_TX_MODE_HE_TB:
619 mode = MT_PHY_TYPE_HE_TB;
620 break;
621 case MT76_TM_TX_MODE_HE_MU:
622 mode = MT_PHY_TYPE_HE_MU;
623 break;
624 default:
625 return -EINVAL;
626 }
627
628 rateval = mode << 6 | rate_idx;
629 tx_cont->rateval = cpu_to_le16(rateval);
630
631out:
632 if (!en) {
633 int ret;
634
635 ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
636 sizeof(req), true);
637 if (ret)
638 return ret;
639
640 return bersa_tm_rf_switch_mode(dev, RF_OPER_NORMAL);
641 }
642
643 bersa_tm_rf_switch_mode(dev, RF_OPER_RF_TEST);
644 bersa_tm_update_channel(phy);
645
646 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
647 sizeof(req), true);
648}
649
650static void
651bersa_tm_update_params(struct bersa_phy *phy, u32 changed)
652{
653 struct mt76_testmode_data *td = &phy->mt76->test;
654 bool en = phy->mt76->test.state != MT76_TM_STATE_OFF;
655
656 if (changed & BIT(TM_CHANGED_FREQ_OFFSET))
657 bersa_tm_set_freq_offset(phy, en, en ? td->freq_offset : 0);
658 if (changed & BIT(TM_CHANGED_TXPOWER))
659 bersa_tm_set_tx_power(phy);
660}
661
662static int
663bersa_tm_set_state(struct mt76_phy *mphy, enum mt76_testmode_state state)
664{
665 struct mt76_testmode_data *td = &mphy->test;
666 struct bersa_phy *phy = mphy->priv;
667 enum mt76_testmode_state prev_state = td->state;
668
669 mphy->test.state = state;
670
671 if (prev_state == MT76_TM_STATE_TX_FRAMES ||
672 state == MT76_TM_STATE_TX_FRAMES)
673 bersa_tm_set_tx_frames(phy, state == MT76_TM_STATE_TX_FRAMES);
674 else if (prev_state == MT76_TM_STATE_RX_FRAMES ||
675 state == MT76_TM_STATE_RX_FRAMES)
676 bersa_tm_set_rx_frames(phy, state == MT76_TM_STATE_RX_FRAMES);
677 else if (prev_state == MT76_TM_STATE_TX_CONT ||
678 state == MT76_TM_STATE_TX_CONT)
679 bersa_tm_set_tx_cont(phy, state == MT76_TM_STATE_TX_CONT);
680 else if (prev_state == MT76_TM_STATE_OFF ||
681 state == MT76_TM_STATE_OFF)
682 bersa_tm_init(phy, !(state == MT76_TM_STATE_OFF));
683
684 if ((state == MT76_TM_STATE_IDLE &&
685 prev_state == MT76_TM_STATE_OFF) ||
686 (state == MT76_TM_STATE_OFF &&
687 prev_state == MT76_TM_STATE_IDLE)) {
688 u32 changed = 0;
689 int i;
690
691 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
692 u16 cur = tm_change_map[i];
693
694 if (td->param_set[cur / 32] & BIT(cur % 32))
695 changed |= BIT(i);
696 }
697
698 bersa_tm_update_params(phy, changed);
699 }
700
701 return 0;
702}
703
704static int
705bersa_tm_set_params(struct mt76_phy *mphy, struct nlattr **tb,
706 enum mt76_testmode_state new_state)
707{
708 struct mt76_testmode_data *td = &mphy->test;
709 struct bersa_phy *phy = mphy->priv;
710 u32 changed = 0;
711 int i;
712
713 BUILD_BUG_ON(NUM_TM_CHANGED >= 32);
714
715 if (new_state == MT76_TM_STATE_OFF ||
716 td->state == MT76_TM_STATE_OFF)
717 return 0;
718
719 if (td->tx_antenna_mask & ~mphy->chainmask)
720 return -EINVAL;
721
722 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
723 if (tb[tm_change_map[i]])
724 changed |= BIT(i);
725 }
726
727 bersa_tm_update_params(phy, changed);
728
729 return 0;
730}
731
732static int
733bersa_tm_dump_stats(struct mt76_phy *mphy, struct sk_buff *msg)
734{
735 struct bersa_phy *phy = mphy->priv;
736 struct bersa_dev *dev = phy->dev;
737 enum mt76_rxq_id q;
738 void *rx, *rssi;
739 u16 fcs_err;
740 int i;
741 u32 cnt;
742
743 rx = nla_nest_start(msg, MT76_TM_STATS_ATTR_LAST_RX);
744 if (!rx)
745 return -ENOMEM;
746
747 if (nla_put_s32(msg, MT76_TM_RX_ATTR_FREQ_OFFSET, phy->test.last_freq_offset))
748 return -ENOMEM;
749
750 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_RCPI);
751 if (!rssi)
752 return -ENOMEM;
753
754 for (i = 0; i < ARRAY_SIZE(phy->test.last_rcpi); i++)
755 if (nla_put_u8(msg, i, phy->test.last_rcpi[i]))
756 return -ENOMEM;
757
758 nla_nest_end(msg, rssi);
759
760 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_IB_RSSI);
761 if (!rssi)
762 return -ENOMEM;
763
764 for (i = 0; i < ARRAY_SIZE(phy->test.last_ib_rssi); i++)
765 if (nla_put_s8(msg, i, phy->test.last_ib_rssi[i]))
766 return -ENOMEM;
767
768 nla_nest_end(msg, rssi);
769
770 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_WB_RSSI);
771 if (!rssi)
772 return -ENOMEM;
773
774 for (i = 0; i < ARRAY_SIZE(phy->test.last_wb_rssi); i++)
775 if (nla_put_s8(msg, i, phy->test.last_wb_rssi[i]))
776 return -ENOMEM;
777
778 nla_nest_end(msg, rssi);
779
780 if (nla_put_u8(msg, MT76_TM_RX_ATTR_SNR, phy->test.last_snr))
781 return -ENOMEM;
782
783 nla_nest_end(msg, rx);
784
785 cnt = mt76_rr(dev, MT_MIB_SDR3(phy->band_idx));
786 fcs_err = FIELD_GET(MT_MIB_SDR3_FCS_ERR_MASK, cnt);
787
788 q = phy->band_idx ? MT_RXQ_EXT : MT_RXQ_MAIN;
789 mphy->test.rx_stats.packets[q] += fcs_err;
790 mphy->test.rx_stats.fcs_error[q] += fcs_err;
791
792 return 0;
793}
794
795const struct mt76_testmode_ops bersa_testmode_ops = {
796 .set_state = bersa_tm_set_state,
797 .set_params = bersa_tm_set_params,
798 .dump_stats = bersa_tm_dump_stats,
799};