blob: 197a9226518e22ecbf35328cd3b5bc3fc4998f2e [file] [log] [blame]
developerb11a5392022-03-31 00:34:47 +08001// SPDX-License-Identifier: ISC
2/* Copyright (C) 2020 MediaTek Inc. */
3
4#include "bersa.h"
5#include "mac.h"
6#include "mcu.h"
7#include "testmode.h"
8
9enum {
10 TM_CHANGED_TXPOWER,
11 TM_CHANGED_FREQ_OFFSET,
12
13 /* must be last */
14 NUM_TM_CHANGED
15};
16
17static const u8 tm_change_map[] = {
18 [TM_CHANGED_TXPOWER] = MT76_TM_ATTR_TX_POWER,
19 [TM_CHANGED_FREQ_OFFSET] = MT76_TM_ATTR_FREQ_OFFSET,
20};
21
22struct reg_band {
23 u32 band[2];
24};
25
26#define REG_BAND(_list, _reg) \
27 { _list.band[0] = MT_##_reg(0); \
28 _list.band[1] = MT_##_reg(1); }
29#define REG_BAND_IDX(_list, _reg, _idx) \
30 { _list.band[0] = MT_##_reg(0, _idx); \
31 _list.band[1] = MT_##_reg(1, _idx); }
32
33#define TM_REG_MAX_ID 17
34static struct reg_band reg_backup_list[TM_REG_MAX_ID];
35
36
37static int
38bersa_tm_set_tx_power(struct bersa_phy *phy)
39{
40 struct bersa_dev *dev = phy->dev;
41 struct mt76_phy *mphy = phy->mt76;
42 struct cfg80211_chan_def *chandef = &mphy->chandef;
43 int freq = chandef->center_freq1;
44 int ret;
45 struct {
46 u8 format_id;
47 u8 band;
48 s8 tx_power;
49 u8 ant_idx; /* Only 0 is valid */
50 u8 center_chan;
51 u8 rsv[3];
52 } __packed req = {
53 .format_id = 0xf,
54 .band = phy->band_idx,
55 .center_chan = ieee80211_frequency_to_channel(freq),
56 };
57 u8 *tx_power = NULL;
58
59 if (phy->mt76->test.state != MT76_TM_STATE_OFF)
60 tx_power = phy->mt76->test.tx_power;
61
62 /* Tx power of the other antennas are the same as antenna 0 */
63 if (tx_power && tx_power[0])
64 req.tx_power = tx_power[0];
65
66 ret = mt76_mcu_send_msg(&dev->mt76,
67 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
68 &req, sizeof(req), false);
69
70 return ret;
71}
72
73static int
74bersa_tm_set_freq_offset(struct bersa_phy *phy, bool en, u32 val)
75{
76 struct bersa_dev *dev = phy->dev;
77 struct bersa_tm_cmd req = {
78 .testmode_en = en,
79 .param_idx = MCU_ATE_SET_FREQ_OFFSET,
80 .param.freq.band = phy->band_idx,
81 .param.freq.freq_offset = cpu_to_le32(val),
82 };
83
84 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
85 sizeof(req), false);
86}
87
88static int
89bersa_tm_mode_ctrl(struct bersa_dev *dev, bool enable)
90{
91 struct {
92 u8 format_id;
93 bool enable;
94 u8 rsv[2];
95 } __packed req = {
96 .format_id = 0x6,
97 .enable = enable,
98 };
99
100 return mt76_mcu_send_msg(&dev->mt76,
101 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
102 &req, sizeof(req), false);
103}
104
105static int
106bersa_tm_set_trx(struct bersa_phy *phy, int type, bool en)
107{
108 struct bersa_dev *dev = phy->dev;
109 struct bersa_tm_cmd req = {
110 .testmode_en = 1,
111 .param_idx = MCU_ATE_SET_TRX,
112 .param.trx.type = type,
113 .param.trx.enable = en,
114 .param.trx.band = phy->band_idx,
115 };
116
117 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
118 sizeof(req), false);
119}
120
121static int
122bersa_tm_clean_hwq(struct bersa_phy *phy, u8 wcid)
123{
124 struct bersa_dev *dev = phy->dev;
125 struct bersa_tm_cmd req = {
126 .testmode_en = 1,
127 .param_idx = MCU_ATE_CLEAN_TXQUEUE,
128 .param.clean.wcid = wcid,
129 .param.clean.band = phy->band_idx,
130 };
131
132 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
133 sizeof(req), false);
134}
135
136static int
137bersa_tm_set_slot_time(struct bersa_phy *phy, u8 slot_time, u8 sifs)
138{
139 struct bersa_dev *dev = phy->dev;
140 struct bersa_tm_cmd req = {
141 .testmode_en = !(phy->mt76->test.state == MT76_TM_STATE_OFF),
142 .param_idx = MCU_ATE_SET_SLOT_TIME,
143 .param.slot.slot_time = slot_time,
144 .param.slot.sifs = sifs,
145 .param.slot.rifs = 2,
146 .param.slot.eifs = cpu_to_le16(60),
147 .param.slot.band = phy->band_idx,
148 };
149
150 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
151 sizeof(req), false);
152}
153
154static int
155bersa_tm_set_tam_arb(struct bersa_phy *phy, bool enable, bool mu)
156{
157 struct bersa_dev *dev = phy->dev;
158 u32 op_mode;
159
160 if (!enable)
161 op_mode = TAM_ARB_OP_MODE_NORMAL;
162 else if (mu)
163 op_mode = TAM_ARB_OP_MODE_TEST;
164 else
165 op_mode = TAM_ARB_OP_MODE_FORCE_SU;
166
167 return bersa_mcu_set_muru_ctrl(dev, MURU_SET_ARB_OP_MODE, op_mode);
168}
169
170static int
171bersa_tm_set_wmm_qid(struct bersa_dev *dev, u8 qid, u8 aifs, u8 cw_min,
172 u16 cw_max, u16 txop)
173{
174#if 0
175 struct bersa_mcu_tx req = { .total = 1 };
176 struct edca *e = &req.edca[0];
177
178 e->queue = qid;
179 e->set = WMM_PARAM_SET;
180
181 e->aifs = aifs;
182 e->cw_min = cw_min;
183 e->cw_max = cpu_to_le16(cw_max);
184 e->txop = cpu_to_le16(txop);
185
186 return bersa_mcu_update_edca(dev, &req);
187#endif
188 return 0;
189}
190
191static int
192bersa_tm_set_ipg_params(struct bersa_phy *phy, u32 ipg, u8 mode)
193{
194#define TM_DEFAULT_SIFS 10
195#define TM_MAX_SIFS 127
196#define TM_MAX_AIFSN 0xf
197#define TM_MIN_AIFSN 0x1
198#define BBP_PROC_TIME 1500
199 struct bersa_dev *dev = phy->dev;
200 u8 sig_ext = (mode == MT76_TM_TX_MODE_CCK) ? 0 : 6;
201 u8 slot_time = 9, sifs = TM_DEFAULT_SIFS;
202 u8 aifsn = TM_MIN_AIFSN;
203 u32 i2t_time, tr2t_time, txv_time;
204 u16 cw = 0;
205
206 if (ipg < sig_ext + slot_time + sifs)
207 ipg = 0;
208
209 if (!ipg)
210 goto done;
211
212 ipg -= sig_ext;
213
214 if (ipg <= (TM_MAX_SIFS + slot_time)) {
215 sifs = ipg - slot_time;
216 } else {
217 u32 val = (ipg + slot_time) / slot_time;
218
219 while (val >>= 1)
220 cw++;
221
222 if (cw > 16)
223 cw = 16;
224
225 ipg -= ((1 << cw) - 1) * slot_time;
226
227 aifsn = ipg / slot_time;
228 if (aifsn > TM_MAX_AIFSN)
229 aifsn = TM_MAX_AIFSN;
230
231 ipg -= aifsn * slot_time;
232
developer66cd2092022-05-10 15:43:01 +0800233 if (ipg > TM_DEFAULT_SIFS)
234 sifs = min_t(u32, ipg, TM_MAX_SIFS);
developerb11a5392022-03-31 00:34:47 +0800235 }
236done:
237 txv_time = mt76_get_field(dev, MT_TMAC_ATCR(phy->band_idx),
238 MT_TMAC_ATCR_TXV_TOUT);
239 txv_time *= 50; /* normal clock time */
240
241 i2t_time = (slot_time * 1000 - txv_time - BBP_PROC_TIME) / 50;
242 tr2t_time = (sifs * 1000 - txv_time - BBP_PROC_TIME) / 50;
243
244 mt76_set(dev, MT_TMAC_TRCR0(phy->band_idx),
245 FIELD_PREP(MT_TMAC_TRCR0_TR2T_CHK, tr2t_time) |
246 FIELD_PREP(MT_TMAC_TRCR0_I2T_CHK, i2t_time));
247
248 bersa_tm_set_slot_time(phy, slot_time, sifs);
249
250 return bersa_tm_set_wmm_qid(dev,
251 mt76_connac_lmac_mapping(IEEE80211_AC_BE),
252 aifsn, cw, cw, 0);
253}
254
255static int
256bersa_tm_set_tx_len(struct bersa_phy *phy, u32 tx_time)
257{
258 struct mt76_phy *mphy = phy->mt76;
259 struct mt76_testmode_data *td = &mphy->test;
260 struct ieee80211_supported_band *sband;
261 struct rate_info rate = {};
262 u16 flags = 0, tx_len;
263 u32 bitrate;
264 int ret;
265
266 if (!tx_time)
267 return 0;
268
269 rate.mcs = td->tx_rate_idx;
270 rate.nss = td->tx_rate_nss;
271
272 switch (td->tx_rate_mode) {
273 case MT76_TM_TX_MODE_CCK:
274 case MT76_TM_TX_MODE_OFDM:
275 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ)
276 sband = &mphy->sband_5g.sband;
developer66cd2092022-05-10 15:43:01 +0800277 else if (mphy->chandef.chan->band == NL80211_BAND_6GHZ)
278 sband = &mphy->sband_6g.sband;
developerb11a5392022-03-31 00:34:47 +0800279 else
280 sband = &mphy->sband_2g.sband;
281
282 rate.legacy = sband->bitrates[rate.mcs].bitrate;
283 break;
284 case MT76_TM_TX_MODE_HT:
285 rate.mcs += rate.nss * 8;
286 flags |= RATE_INFO_FLAGS_MCS;
287
288 if (td->tx_rate_sgi)
289 flags |= RATE_INFO_FLAGS_SHORT_GI;
290 break;
291 case MT76_TM_TX_MODE_VHT:
292 flags |= RATE_INFO_FLAGS_VHT_MCS;
293
294 if (td->tx_rate_sgi)
295 flags |= RATE_INFO_FLAGS_SHORT_GI;
296 break;
297 case MT76_TM_TX_MODE_HE_SU:
298 case MT76_TM_TX_MODE_HE_EXT_SU:
299 case MT76_TM_TX_MODE_HE_TB:
300 case MT76_TM_TX_MODE_HE_MU:
301 rate.he_gi = td->tx_rate_sgi;
302 flags |= RATE_INFO_FLAGS_HE_MCS;
303 break;
304 default:
305 break;
306 }
307 rate.flags = flags;
308
309 switch (mphy->chandef.width) {
310 case NL80211_CHAN_WIDTH_160:
311 case NL80211_CHAN_WIDTH_80P80:
312 rate.bw = RATE_INFO_BW_160;
313 break;
314 case NL80211_CHAN_WIDTH_80:
315 rate.bw = RATE_INFO_BW_80;
316 break;
317 case NL80211_CHAN_WIDTH_40:
318 rate.bw = RATE_INFO_BW_40;
319 break;
320 default:
321 rate.bw = RATE_INFO_BW_20;
322 break;
323 }
324
325 bitrate = cfg80211_calculate_bitrate(&rate);
326 tx_len = bitrate * tx_time / 10 / 8;
327
328 ret = mt76_testmode_alloc_skb(phy->mt76, tx_len);
329 if (ret)
330 return ret;
331
332 return 0;
333}
334
335static void
336bersa_tm_reg_backup_restore(struct bersa_phy *phy)
337{
338 int n_regs = ARRAY_SIZE(reg_backup_list);
339 struct bersa_dev *dev = phy->dev;
340 u32 *b = phy->test.reg_backup;
341 int i;
342
343 REG_BAND_IDX(reg_backup_list[0], AGG_PCR0, 0);
344 REG_BAND_IDX(reg_backup_list[1], AGG_PCR0, 1);
345 REG_BAND_IDX(reg_backup_list[2], AGG_AWSCR0, 0);
346 REG_BAND_IDX(reg_backup_list[3], AGG_AWSCR0, 1);
347 REG_BAND_IDX(reg_backup_list[4], AGG_AWSCR0, 2);
348 REG_BAND_IDX(reg_backup_list[5], AGG_AWSCR0, 3);
349 REG_BAND(reg_backup_list[6], AGG_MRCR);
350 REG_BAND(reg_backup_list[7], TMAC_TFCR0);
351 REG_BAND(reg_backup_list[8], TMAC_TCR0);
352 REG_BAND(reg_backup_list[9], AGG_ATCR1);
353 REG_BAND(reg_backup_list[10], AGG_ATCR3);
354 REG_BAND(reg_backup_list[11], TMAC_TRCR0);
355 REG_BAND(reg_backup_list[12], TMAC_ICR0);
356 REG_BAND_IDX(reg_backup_list[13], ARB_DRNGR0, 0);
357 REG_BAND_IDX(reg_backup_list[14], ARB_DRNGR0, 1);
358 REG_BAND(reg_backup_list[15], WF_RFCR);
359 REG_BAND(reg_backup_list[16], WF_RFCR1);
360
361 if (phy->mt76->test.state == MT76_TM_STATE_OFF) {
362 for (i = 0; i < n_regs; i++)
363 mt76_wr(dev, reg_backup_list[i].band[phy->band_idx], b[i]);
364 return;
365 }
366
367 if (!b) {
368 b = devm_kzalloc(dev->mt76.dev, 4 * n_regs, GFP_KERNEL);
369 if (!b)
370 return;
371
372 phy->test.reg_backup = b;
373 for (i = 0; i < n_regs; i++)
374 b[i] = mt76_rr(dev, reg_backup_list[i].band[phy->band_idx]);
375 }
376
377 mt76_clear(dev, MT_AGG_PCR0(phy->band_idx, 0), MT_AGG_PCR0_MM_PROT |
378 MT_AGG_PCR0_GF_PROT | MT_AGG_PCR0_ERP_PROT |
379 MT_AGG_PCR0_VHT_PROT | MT_AGG_PCR0_BW20_PROT |
380 MT_AGG_PCR0_BW40_PROT | MT_AGG_PCR0_BW80_PROT);
381 mt76_set(dev, MT_AGG_PCR0(phy->band_idx, 0), MT_AGG_PCR0_PTA_WIN_DIS);
382
383 mt76_wr(dev, MT_AGG_PCR0(phy->band_idx, 1), MT_AGG_PCR1_RTS0_NUM_THRES |
384 MT_AGG_PCR1_RTS0_LEN_THRES);
385
386 mt76_clear(dev, MT_AGG_MRCR(phy->band_idx), MT_AGG_MRCR_BAR_CNT_LIMIT |
387 MT_AGG_MRCR_LAST_RTS_CTS_RN | MT_AGG_MRCR_RTS_FAIL_LIMIT |
388 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT);
389
390 mt76_rmw(dev, MT_AGG_MRCR(phy->band_idx), MT_AGG_MRCR_RTS_FAIL_LIMIT |
391 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT,
392 FIELD_PREP(MT_AGG_MRCR_RTS_FAIL_LIMIT, 1) |
393 FIELD_PREP(MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT, 1));
394
395 mt76_wr(dev, MT_TMAC_TFCR0(phy->band_idx), 0);
396 mt76_clear(dev, MT_TMAC_TCR0(phy->band_idx), MT_TMAC_TCR0_TBTT_STOP_CTRL);
397
398 /* config rx filter for testmode rx */
399 mt76_wr(dev, MT_WF_RFCR(phy->band_idx), 0xcf70a);
400 mt76_wr(dev, MT_WF_RFCR1(phy->band_idx), 0);
401}
402
403static void
404bersa_tm_init(struct bersa_phy *phy, bool en)
405{
406 struct bersa_dev *dev = phy->dev;
407
408 if (!test_bit(MT76_STATE_RUNNING, &phy->mt76->state))
409 return;
410
411 /* TODO: need check */
412 /* bersa_mcu_set_sku_en(phy, !en); */
413
414 bersa_tm_mode_ctrl(dev, en);
415 bersa_tm_reg_backup_restore(phy);
416 bersa_tm_set_trx(phy, TM_MAC_TXRX, !en);
417
418 bersa_mcu_add_bss_info(phy, phy->monitor_vif, en);
419 bersa_mcu_add_sta(dev, phy->monitor_vif, NULL, en);
420
421 if (!en)
422 bersa_tm_set_tam_arb(phy, en, 0);
423}
424
425static void
426bersa_tm_update_channel(struct bersa_phy *phy)
427{
428 mutex_unlock(&phy->dev->mt76.mutex);
429 bersa_set_channel(phy);
430 mutex_lock(&phy->dev->mt76.mutex);
431
432 bersa_mcu_set_chan_info(phy, UNI_CHANNEL_RX_PATH);
433}
434
435static void
436bersa_tm_set_tx_frames(struct bersa_phy *phy, bool en)
437{
438 static const u8 spe_idx_map[] = {0, 0, 1, 0, 3, 2, 4, 0,
439 9, 8, 6, 10, 16, 12, 18, 0};
440 struct mt76_testmode_data *td = &phy->mt76->test;
441 struct bersa_dev *dev = phy->dev;
442 struct ieee80211_tx_info *info;
443 u8 duty_cycle = td->tx_duty_cycle;
444 u32 tx_time = td->tx_time;
445 u32 ipg = td->tx_ipg;
446 u8 phy_idx = bersa_get_phy_id(phy);
447 u16 chainshift;
448
449 bersa_tm_set_trx(phy, TM_MAC_RX_RXV, false);
450 bersa_tm_clean_hwq(phy, dev->mt76.global_wcid.idx);
451
452 if (en) {
453 bersa_tm_update_channel(phy);
454
455 if (td->tx_spe_idx) {
456 phy->test.spe_idx = td->tx_spe_idx;
457 } else {
458 u8 tx_ant = td->tx_antenna_mask;
459
460 if (phy_idx == MT_EXT_PHY)
461 chainshift = dev->chain_shift_ext;
462 else if (phy_idx == MT_TRI_PHY)
463 chainshift = dev->chain_shift_tri;
464 else
465 chainshift = 0;
466
467 tx_ant >>= chainshift;
468 phy->test.spe_idx = spe_idx_map[tx_ant];
469 }
470 }
471
472 bersa_tm_set_tam_arb(phy, en,
473 td->tx_rate_mode == MT76_TM_TX_MODE_HE_MU);
474
475 /* if all three params are set, duty_cycle will be ignored */
476 if (duty_cycle && tx_time && !ipg) {
477 ipg = tx_time * 100 / duty_cycle - tx_time;
478 } else if (duty_cycle && !tx_time && ipg) {
479 if (duty_cycle < 100)
480 tx_time = duty_cycle * ipg / (100 - duty_cycle);
481 }
482
483 bersa_tm_set_ipg_params(phy, ipg, td->tx_rate_mode);
484 bersa_tm_set_tx_len(phy, tx_time);
485
486 if (ipg)
487 td->tx_queued_limit = MT76_TM_TIMEOUT * 1000000 / ipg / 2;
488
489 if (!en || !td->tx_skb)
490 return;
491
492 info = IEEE80211_SKB_CB(td->tx_skb);
493 info->control.vif = phy->monitor_vif;
494
495 bersa_tm_set_trx(phy, TM_MAC_TX, en);
496}
497
498static void
499bersa_tm_set_rx_frames(struct bersa_phy *phy, bool en)
500{
501 bersa_tm_set_trx(phy, TM_MAC_RX_RXV, false);
502
503 if (en) {
504 struct bersa_dev *dev = phy->dev;
505
506 bersa_tm_update_channel(phy);
507
508 /* read-clear */
509 mt76_rr(dev, MT_MIB_SDR3(phy->band_idx));
510 bersa_tm_set_trx(phy, TM_MAC_RX_RXV, en);
511 }
512}
513
514static int
515bersa_tm_rf_switch_mode(struct bersa_dev *dev, u32 oper)
516{
517 struct bersa_tm_rf_test req = {
518 .op.op_mode = cpu_to_le32(oper),
519 };
520
521 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
522 sizeof(req), true);
523}
524
525static int
526bersa_tm_set_tx_cont(struct bersa_phy *phy, bool en)
527{
528#define TX_CONT_START 0x05
529#define TX_CONT_STOP 0x06
530 struct bersa_dev *dev = phy->dev;
531 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
532 int freq1 = ieee80211_frequency_to_channel(chandef->center_freq1);
533 struct mt76_testmode_data *td = &phy->mt76->test;
534 u32 func_idx = en ? TX_CONT_START : TX_CONT_STOP;
535 u8 rate_idx = td->tx_rate_idx, mode;
536 u16 rateval;
537 struct bersa_tm_rf_test req = {
538 .action = 1,
539 .icap_len = 120,
540 .op.rf.func_idx = cpu_to_le32(func_idx),
541 };
542 struct tm_tx_cont *tx_cont = &req.op.rf.param.tx_cont;
543
544 tx_cont->control_ch = chandef->chan->hw_value;
545 tx_cont->center_ch = freq1;
546 tx_cont->tx_ant = td->tx_antenna_mask;
547 tx_cont->band = phy->band_idx;
548
549 switch (chandef->width) {
550 case NL80211_CHAN_WIDTH_40:
551 tx_cont->bw = CMD_CBW_40MHZ;
552 break;
553 case NL80211_CHAN_WIDTH_80:
554 tx_cont->bw = CMD_CBW_80MHZ;
555 break;
556 case NL80211_CHAN_WIDTH_80P80:
557 tx_cont->bw = CMD_CBW_8080MHZ;
558 break;
559 case NL80211_CHAN_WIDTH_160:
560 tx_cont->bw = CMD_CBW_160MHZ;
561 break;
562 case NL80211_CHAN_WIDTH_5:
563 tx_cont->bw = CMD_CBW_5MHZ;
564 break;
565 case NL80211_CHAN_WIDTH_10:
566 tx_cont->bw = CMD_CBW_10MHZ;
567 break;
568 case NL80211_CHAN_WIDTH_20:
569 tx_cont->bw = CMD_CBW_20MHZ;
570 break;
571 case NL80211_CHAN_WIDTH_20_NOHT:
572 tx_cont->bw = CMD_CBW_20MHZ;
573 break;
574 default:
575 return -EINVAL;
576 }
577
578 if (!en) {
579 req.op.rf.param.func_data = cpu_to_le32(phy->band_idx);
580 goto out;
581 }
582
583 if (td->tx_rate_mode <= MT76_TM_TX_MODE_OFDM) {
584 struct ieee80211_supported_band *sband;
585 u8 idx = rate_idx;
586
587 if (chandef->chan->band == NL80211_BAND_5GHZ)
588 sband = &phy->mt76->sband_5g.sband;
developer66cd2092022-05-10 15:43:01 +0800589 else if (chandef->chan->band == NL80211_BAND_6GHZ)
590 sband = &phy->mt76->sband_6g.sband;
developerb11a5392022-03-31 00:34:47 +0800591 else
592 sband = &phy->mt76->sband_2g.sband;
593
594 if (td->tx_rate_mode == MT76_TM_TX_MODE_OFDM)
595 idx += 4;
596 rate_idx = sband->bitrates[idx].hw_value & 0xff;
597 }
598
599 switch (td->tx_rate_mode) {
600 case MT76_TM_TX_MODE_CCK:
601 mode = MT_PHY_TYPE_CCK;
602 break;
603 case MT76_TM_TX_MODE_OFDM:
604 mode = MT_PHY_TYPE_OFDM;
605 break;
606 case MT76_TM_TX_MODE_HT:
607 mode = MT_PHY_TYPE_HT;
608 break;
609 case MT76_TM_TX_MODE_VHT:
610 mode = MT_PHY_TYPE_VHT;
611 break;
612 case MT76_TM_TX_MODE_HE_SU:
613 mode = MT_PHY_TYPE_HE_SU;
614 break;
615 case MT76_TM_TX_MODE_HE_EXT_SU:
616 mode = MT_PHY_TYPE_HE_EXT_SU;
617 break;
618 case MT76_TM_TX_MODE_HE_TB:
619 mode = MT_PHY_TYPE_HE_TB;
620 break;
621 case MT76_TM_TX_MODE_HE_MU:
622 mode = MT_PHY_TYPE_HE_MU;
623 break;
624 default:
625 return -EINVAL;
626 }
627
628 rateval = mode << 6 | rate_idx;
629 tx_cont->rateval = cpu_to_le16(rateval);
630
631out:
632 if (!en) {
633 int ret;
634
635 ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
636 sizeof(req), true);
637 if (ret)
638 return ret;
639
640 return bersa_tm_rf_switch_mode(dev, RF_OPER_NORMAL);
641 }
642
643 bersa_tm_rf_switch_mode(dev, RF_OPER_RF_TEST);
644 bersa_tm_update_channel(phy);
645
646 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
647 sizeof(req), true);
648}
649
650static void
651bersa_tm_update_params(struct bersa_phy *phy, u32 changed)
652{
653 struct mt76_testmode_data *td = &phy->mt76->test;
654 bool en = phy->mt76->test.state != MT76_TM_STATE_OFF;
655
656 if (changed & BIT(TM_CHANGED_FREQ_OFFSET))
657 bersa_tm_set_freq_offset(phy, en, en ? td->freq_offset : 0);
658 if (changed & BIT(TM_CHANGED_TXPOWER))
659 bersa_tm_set_tx_power(phy);
660}
661
662static int
663bersa_tm_set_state(struct mt76_phy *mphy, enum mt76_testmode_state state)
664{
665 struct mt76_testmode_data *td = &mphy->test;
666 struct bersa_phy *phy = mphy->priv;
667 enum mt76_testmode_state prev_state = td->state;
668
669 mphy->test.state = state;
670
671 if (prev_state == MT76_TM_STATE_TX_FRAMES ||
672 state == MT76_TM_STATE_TX_FRAMES)
673 bersa_tm_set_tx_frames(phy, state == MT76_TM_STATE_TX_FRAMES);
674 else if (prev_state == MT76_TM_STATE_RX_FRAMES ||
675 state == MT76_TM_STATE_RX_FRAMES)
676 bersa_tm_set_rx_frames(phy, state == MT76_TM_STATE_RX_FRAMES);
677 else if (prev_state == MT76_TM_STATE_TX_CONT ||
678 state == MT76_TM_STATE_TX_CONT)
679 bersa_tm_set_tx_cont(phy, state == MT76_TM_STATE_TX_CONT);
680 else if (prev_state == MT76_TM_STATE_OFF ||
681 state == MT76_TM_STATE_OFF)
682 bersa_tm_init(phy, !(state == MT76_TM_STATE_OFF));
683
684 if ((state == MT76_TM_STATE_IDLE &&
685 prev_state == MT76_TM_STATE_OFF) ||
686 (state == MT76_TM_STATE_OFF &&
687 prev_state == MT76_TM_STATE_IDLE)) {
688 u32 changed = 0;
689 int i;
690
691 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
692 u16 cur = tm_change_map[i];
693
694 if (td->param_set[cur / 32] & BIT(cur % 32))
695 changed |= BIT(i);
696 }
697
698 bersa_tm_update_params(phy, changed);
699 }
700
701 return 0;
702}
703
704static int
705bersa_tm_set_params(struct mt76_phy *mphy, struct nlattr **tb,
706 enum mt76_testmode_state new_state)
707{
708 struct mt76_testmode_data *td = &mphy->test;
709 struct bersa_phy *phy = mphy->priv;
710 u32 changed = 0;
711 int i;
712
713 BUILD_BUG_ON(NUM_TM_CHANGED >= 32);
714
715 if (new_state == MT76_TM_STATE_OFF ||
716 td->state == MT76_TM_STATE_OFF)
717 return 0;
718
719 if (td->tx_antenna_mask & ~mphy->chainmask)
720 return -EINVAL;
721
722 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
723 if (tb[tm_change_map[i]])
724 changed |= BIT(i);
725 }
726
727 bersa_tm_update_params(phy, changed);
728
729 return 0;
730}
731
732static int
733bersa_tm_dump_stats(struct mt76_phy *mphy, struct sk_buff *msg)
734{
735 struct bersa_phy *phy = mphy->priv;
736 struct bersa_dev *dev = phy->dev;
737 enum mt76_rxq_id q;
738 void *rx, *rssi;
739 u16 fcs_err;
740 int i;
741 u32 cnt;
742
743 rx = nla_nest_start(msg, MT76_TM_STATS_ATTR_LAST_RX);
744 if (!rx)
745 return -ENOMEM;
746
747 if (nla_put_s32(msg, MT76_TM_RX_ATTR_FREQ_OFFSET, phy->test.last_freq_offset))
748 return -ENOMEM;
749
750 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_RCPI);
751 if (!rssi)
752 return -ENOMEM;
753
754 for (i = 0; i < ARRAY_SIZE(phy->test.last_rcpi); i++)
755 if (nla_put_u8(msg, i, phy->test.last_rcpi[i]))
756 return -ENOMEM;
757
758 nla_nest_end(msg, rssi);
759
760 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_IB_RSSI);
761 if (!rssi)
762 return -ENOMEM;
763
764 for (i = 0; i < ARRAY_SIZE(phy->test.last_ib_rssi); i++)
765 if (nla_put_s8(msg, i, phy->test.last_ib_rssi[i]))
766 return -ENOMEM;
767
768 nla_nest_end(msg, rssi);
769
770 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_WB_RSSI);
771 if (!rssi)
772 return -ENOMEM;
773
774 for (i = 0; i < ARRAY_SIZE(phy->test.last_wb_rssi); i++)
775 if (nla_put_s8(msg, i, phy->test.last_wb_rssi[i]))
776 return -ENOMEM;
777
778 nla_nest_end(msg, rssi);
779
780 if (nla_put_u8(msg, MT76_TM_RX_ATTR_SNR, phy->test.last_snr))
781 return -ENOMEM;
782
783 nla_nest_end(msg, rx);
784
785 cnt = mt76_rr(dev, MT_MIB_SDR3(phy->band_idx));
786 fcs_err = FIELD_GET(MT_MIB_SDR3_FCS_ERR_MASK, cnt);
787
788 q = phy->band_idx ? MT_RXQ_EXT : MT_RXQ_MAIN;
789 mphy->test.rx_stats.packets[q] += fcs_err;
790 mphy->test.rx_stats.fcs_error[q] += fcs_err;
791
792 return 0;
793}
794
795const struct mt76_testmode_ops bersa_testmode_ops = {
796 .set_state = bersa_tm_set_state,
797 .set_params = bersa_tm_set_params,
798 .dump_stats = bersa_tm_dump_stats,
799};