developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: ISC |
| 2 | /* Copyright (C) 2020 MediaTek Inc. */ |
| 3 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 4 | #include "besra.h" |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 5 | #include "mac.h" |
| 6 | #include "mcu.h" |
| 7 | #include "testmode.h" |
| 8 | |
| 9 | enum { |
| 10 | TM_CHANGED_TXPOWER, |
| 11 | TM_CHANGED_FREQ_OFFSET, |
| 12 | |
| 13 | /* must be last */ |
| 14 | NUM_TM_CHANGED |
| 15 | }; |
| 16 | |
| 17 | static const u8 tm_change_map[] = { |
| 18 | [TM_CHANGED_TXPOWER] = MT76_TM_ATTR_TX_POWER, |
| 19 | [TM_CHANGED_FREQ_OFFSET] = MT76_TM_ATTR_FREQ_OFFSET, |
| 20 | }; |
| 21 | |
| 22 | struct reg_band { |
| 23 | u32 band[2]; |
| 24 | }; |
| 25 | |
| 26 | #define REG_BAND(_list, _reg) \ |
| 27 | { _list.band[0] = MT_##_reg(0); \ |
| 28 | _list.band[1] = MT_##_reg(1); } |
| 29 | #define REG_BAND_IDX(_list, _reg, _idx) \ |
| 30 | { _list.band[0] = MT_##_reg(0, _idx); \ |
| 31 | _list.band[1] = MT_##_reg(1, _idx); } |
| 32 | |
| 33 | #define TM_REG_MAX_ID 17 |
| 34 | static struct reg_band reg_backup_list[TM_REG_MAX_ID]; |
| 35 | |
| 36 | |
| 37 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 38 | besra_tm_set_tx_power(struct besra_phy *phy) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 39 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 40 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 41 | struct mt76_phy *mphy = phy->mt76; |
| 42 | struct cfg80211_chan_def *chandef = &mphy->chandef; |
| 43 | int freq = chandef->center_freq1; |
| 44 | int ret; |
| 45 | struct { |
| 46 | u8 format_id; |
| 47 | u8 band; |
| 48 | s8 tx_power; |
| 49 | u8 ant_idx; /* Only 0 is valid */ |
| 50 | u8 center_chan; |
| 51 | u8 rsv[3]; |
| 52 | } __packed req = { |
| 53 | .format_id = 0xf, |
| 54 | .band = phy->band_idx, |
| 55 | .center_chan = ieee80211_frequency_to_channel(freq), |
| 56 | }; |
| 57 | u8 *tx_power = NULL; |
| 58 | |
| 59 | if (phy->mt76->test.state != MT76_TM_STATE_OFF) |
| 60 | tx_power = phy->mt76->test.tx_power; |
| 61 | |
| 62 | /* Tx power of the other antennas are the same as antenna 0 */ |
| 63 | if (tx_power && tx_power[0]) |
| 64 | req.tx_power = tx_power[0]; |
| 65 | |
| 66 | ret = mt76_mcu_send_msg(&dev->mt76, |
| 67 | MCU_EXT_CMD(TX_POWER_FEATURE_CTRL), |
| 68 | &req, sizeof(req), false); |
| 69 | |
| 70 | return ret; |
| 71 | } |
| 72 | |
| 73 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 74 | besra_tm_set_freq_offset(struct besra_phy *phy, bool en, u32 val) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 75 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 76 | struct besra_dev *dev = phy->dev; |
| 77 | struct besra_tm_cmd req = { |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 78 | .testmode_en = en, |
| 79 | .param_idx = MCU_ATE_SET_FREQ_OFFSET, |
| 80 | .param.freq.band = phy->band_idx, |
| 81 | .param.freq.freq_offset = cpu_to_le32(val), |
| 82 | }; |
| 83 | |
| 84 | return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req, |
| 85 | sizeof(req), false); |
| 86 | } |
| 87 | |
| 88 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 89 | besra_tm_mode_ctrl(struct besra_dev *dev, bool enable) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 90 | { |
| 91 | struct { |
| 92 | u8 format_id; |
| 93 | bool enable; |
| 94 | u8 rsv[2]; |
| 95 | } __packed req = { |
| 96 | .format_id = 0x6, |
| 97 | .enable = enable, |
| 98 | }; |
| 99 | |
| 100 | return mt76_mcu_send_msg(&dev->mt76, |
| 101 | MCU_EXT_CMD(TX_POWER_FEATURE_CTRL), |
| 102 | &req, sizeof(req), false); |
| 103 | } |
| 104 | |
| 105 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 106 | besra_tm_set_trx(struct besra_phy *phy, int type, bool en) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 107 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 108 | struct besra_dev *dev = phy->dev; |
| 109 | struct besra_tm_cmd req = { |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 110 | .testmode_en = 1, |
| 111 | .param_idx = MCU_ATE_SET_TRX, |
| 112 | .param.trx.type = type, |
| 113 | .param.trx.enable = en, |
| 114 | .param.trx.band = phy->band_idx, |
| 115 | }; |
| 116 | |
| 117 | return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req, |
| 118 | sizeof(req), false); |
| 119 | } |
| 120 | |
| 121 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 122 | besra_tm_clean_hwq(struct besra_phy *phy, u8 wcid) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 123 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 124 | struct besra_dev *dev = phy->dev; |
| 125 | struct besra_tm_cmd req = { |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 126 | .testmode_en = 1, |
| 127 | .param_idx = MCU_ATE_CLEAN_TXQUEUE, |
| 128 | .param.clean.wcid = wcid, |
| 129 | .param.clean.band = phy->band_idx, |
| 130 | }; |
| 131 | |
| 132 | return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req, |
| 133 | sizeof(req), false); |
| 134 | } |
| 135 | |
| 136 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 137 | besra_tm_set_slot_time(struct besra_phy *phy, u8 slot_time, u8 sifs) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 138 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 139 | struct besra_dev *dev = phy->dev; |
| 140 | struct besra_tm_cmd req = { |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 141 | .testmode_en = !(phy->mt76->test.state == MT76_TM_STATE_OFF), |
| 142 | .param_idx = MCU_ATE_SET_SLOT_TIME, |
| 143 | .param.slot.slot_time = slot_time, |
| 144 | .param.slot.sifs = sifs, |
| 145 | .param.slot.rifs = 2, |
| 146 | .param.slot.eifs = cpu_to_le16(60), |
| 147 | .param.slot.band = phy->band_idx, |
| 148 | }; |
| 149 | |
| 150 | return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req, |
| 151 | sizeof(req), false); |
| 152 | } |
| 153 | |
| 154 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 155 | besra_tm_set_tam_arb(struct besra_phy *phy, bool enable, bool mu) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 156 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 157 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 158 | u32 op_mode; |
| 159 | |
| 160 | if (!enable) |
| 161 | op_mode = TAM_ARB_OP_MODE_NORMAL; |
| 162 | else if (mu) |
| 163 | op_mode = TAM_ARB_OP_MODE_TEST; |
| 164 | else |
| 165 | op_mode = TAM_ARB_OP_MODE_FORCE_SU; |
| 166 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 167 | return besra_mcu_set_muru_ctrl(dev, MURU_SET_ARB_OP_MODE, op_mode); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 168 | } |
| 169 | |
| 170 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 171 | besra_tm_set_wmm_qid(struct besra_dev *dev, u8 qid, u8 aifs, u8 cw_min, |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 172 | u16 cw_max, u16 txop) |
| 173 | { |
| 174 | #if 0 |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 175 | struct besra_mcu_tx req = { .total = 1 }; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 176 | struct edca *e = &req.edca[0]; |
| 177 | |
| 178 | e->queue = qid; |
| 179 | e->set = WMM_PARAM_SET; |
| 180 | |
| 181 | e->aifs = aifs; |
| 182 | e->cw_min = cw_min; |
| 183 | e->cw_max = cpu_to_le16(cw_max); |
| 184 | e->txop = cpu_to_le16(txop); |
| 185 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 186 | return besra_mcu_update_edca(dev, &req); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 187 | #endif |
| 188 | return 0; |
| 189 | } |
| 190 | |
| 191 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 192 | besra_tm_set_ipg_params(struct besra_phy *phy, u32 ipg, u8 mode) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 193 | { |
| 194 | #define TM_DEFAULT_SIFS 10 |
| 195 | #define TM_MAX_SIFS 127 |
| 196 | #define TM_MAX_AIFSN 0xf |
| 197 | #define TM_MIN_AIFSN 0x1 |
| 198 | #define BBP_PROC_TIME 1500 |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 199 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 200 | u8 sig_ext = (mode == MT76_TM_TX_MODE_CCK) ? 0 : 6; |
| 201 | u8 slot_time = 9, sifs = TM_DEFAULT_SIFS; |
| 202 | u8 aifsn = TM_MIN_AIFSN; |
| 203 | u32 i2t_time, tr2t_time, txv_time; |
| 204 | u16 cw = 0; |
| 205 | |
| 206 | if (ipg < sig_ext + slot_time + sifs) |
| 207 | ipg = 0; |
| 208 | |
| 209 | if (!ipg) |
| 210 | goto done; |
| 211 | |
| 212 | ipg -= sig_ext; |
| 213 | |
| 214 | if (ipg <= (TM_MAX_SIFS + slot_time)) { |
| 215 | sifs = ipg - slot_time; |
| 216 | } else { |
| 217 | u32 val = (ipg + slot_time) / slot_time; |
| 218 | |
| 219 | while (val >>= 1) |
| 220 | cw++; |
| 221 | |
| 222 | if (cw > 16) |
| 223 | cw = 16; |
| 224 | |
| 225 | ipg -= ((1 << cw) - 1) * slot_time; |
| 226 | |
| 227 | aifsn = ipg / slot_time; |
| 228 | if (aifsn > TM_MAX_AIFSN) |
| 229 | aifsn = TM_MAX_AIFSN; |
| 230 | |
| 231 | ipg -= aifsn * slot_time; |
| 232 | |
developer | 66cd209 | 2022-05-10 15:43:01 +0800 | [diff] [blame] | 233 | if (ipg > TM_DEFAULT_SIFS) |
| 234 | sifs = min_t(u32, ipg, TM_MAX_SIFS); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 235 | } |
| 236 | done: |
| 237 | txv_time = mt76_get_field(dev, MT_TMAC_ATCR(phy->band_idx), |
| 238 | MT_TMAC_ATCR_TXV_TOUT); |
| 239 | txv_time *= 50; /* normal clock time */ |
| 240 | |
| 241 | i2t_time = (slot_time * 1000 - txv_time - BBP_PROC_TIME) / 50; |
| 242 | tr2t_time = (sifs * 1000 - txv_time - BBP_PROC_TIME) / 50; |
| 243 | |
| 244 | mt76_set(dev, MT_TMAC_TRCR0(phy->band_idx), |
| 245 | FIELD_PREP(MT_TMAC_TRCR0_TR2T_CHK, tr2t_time) | |
| 246 | FIELD_PREP(MT_TMAC_TRCR0_I2T_CHK, i2t_time)); |
| 247 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 248 | besra_tm_set_slot_time(phy, slot_time, sifs); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 249 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 250 | return besra_tm_set_wmm_qid(dev, |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 251 | mt76_connac_lmac_mapping(IEEE80211_AC_BE), |
| 252 | aifsn, cw, cw, 0); |
| 253 | } |
| 254 | |
| 255 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 256 | besra_tm_set_tx_len(struct besra_phy *phy, u32 tx_time) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 257 | { |
| 258 | struct mt76_phy *mphy = phy->mt76; |
| 259 | struct mt76_testmode_data *td = &mphy->test; |
| 260 | struct ieee80211_supported_band *sband; |
| 261 | struct rate_info rate = {}; |
| 262 | u16 flags = 0, tx_len; |
| 263 | u32 bitrate; |
| 264 | int ret; |
| 265 | |
| 266 | if (!tx_time) |
| 267 | return 0; |
| 268 | |
| 269 | rate.mcs = td->tx_rate_idx; |
| 270 | rate.nss = td->tx_rate_nss; |
| 271 | |
| 272 | switch (td->tx_rate_mode) { |
| 273 | case MT76_TM_TX_MODE_CCK: |
| 274 | case MT76_TM_TX_MODE_OFDM: |
| 275 | if (mphy->chandef.chan->band == NL80211_BAND_5GHZ) |
| 276 | sband = &mphy->sband_5g.sband; |
developer | 66cd209 | 2022-05-10 15:43:01 +0800 | [diff] [blame] | 277 | else if (mphy->chandef.chan->band == NL80211_BAND_6GHZ) |
| 278 | sband = &mphy->sband_6g.sband; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 279 | else |
| 280 | sband = &mphy->sband_2g.sband; |
| 281 | |
| 282 | rate.legacy = sband->bitrates[rate.mcs].bitrate; |
| 283 | break; |
| 284 | case MT76_TM_TX_MODE_HT: |
| 285 | rate.mcs += rate.nss * 8; |
| 286 | flags |= RATE_INFO_FLAGS_MCS; |
| 287 | |
| 288 | if (td->tx_rate_sgi) |
| 289 | flags |= RATE_INFO_FLAGS_SHORT_GI; |
| 290 | break; |
| 291 | case MT76_TM_TX_MODE_VHT: |
| 292 | flags |= RATE_INFO_FLAGS_VHT_MCS; |
| 293 | |
| 294 | if (td->tx_rate_sgi) |
| 295 | flags |= RATE_INFO_FLAGS_SHORT_GI; |
| 296 | break; |
| 297 | case MT76_TM_TX_MODE_HE_SU: |
| 298 | case MT76_TM_TX_MODE_HE_EXT_SU: |
| 299 | case MT76_TM_TX_MODE_HE_TB: |
| 300 | case MT76_TM_TX_MODE_HE_MU: |
| 301 | rate.he_gi = td->tx_rate_sgi; |
| 302 | flags |= RATE_INFO_FLAGS_HE_MCS; |
| 303 | break; |
| 304 | default: |
| 305 | break; |
| 306 | } |
| 307 | rate.flags = flags; |
| 308 | |
| 309 | switch (mphy->chandef.width) { |
| 310 | case NL80211_CHAN_WIDTH_160: |
| 311 | case NL80211_CHAN_WIDTH_80P80: |
| 312 | rate.bw = RATE_INFO_BW_160; |
| 313 | break; |
| 314 | case NL80211_CHAN_WIDTH_80: |
| 315 | rate.bw = RATE_INFO_BW_80; |
| 316 | break; |
| 317 | case NL80211_CHAN_WIDTH_40: |
| 318 | rate.bw = RATE_INFO_BW_40; |
| 319 | break; |
| 320 | default: |
| 321 | rate.bw = RATE_INFO_BW_20; |
| 322 | break; |
| 323 | } |
| 324 | |
| 325 | bitrate = cfg80211_calculate_bitrate(&rate); |
| 326 | tx_len = bitrate * tx_time / 10 / 8; |
| 327 | |
| 328 | ret = mt76_testmode_alloc_skb(phy->mt76, tx_len); |
| 329 | if (ret) |
| 330 | return ret; |
| 331 | |
| 332 | return 0; |
| 333 | } |
| 334 | |
| 335 | static void |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 336 | besra_tm_reg_backup_restore(struct besra_phy *phy) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 337 | { |
| 338 | int n_regs = ARRAY_SIZE(reg_backup_list); |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 339 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 340 | u32 *b = phy->test.reg_backup; |
| 341 | int i; |
| 342 | |
| 343 | REG_BAND_IDX(reg_backup_list[0], AGG_PCR0, 0); |
| 344 | REG_BAND_IDX(reg_backup_list[1], AGG_PCR0, 1); |
| 345 | REG_BAND_IDX(reg_backup_list[2], AGG_AWSCR0, 0); |
| 346 | REG_BAND_IDX(reg_backup_list[3], AGG_AWSCR0, 1); |
| 347 | REG_BAND_IDX(reg_backup_list[4], AGG_AWSCR0, 2); |
| 348 | REG_BAND_IDX(reg_backup_list[5], AGG_AWSCR0, 3); |
| 349 | REG_BAND(reg_backup_list[6], AGG_MRCR); |
| 350 | REG_BAND(reg_backup_list[7], TMAC_TFCR0); |
| 351 | REG_BAND(reg_backup_list[8], TMAC_TCR0); |
| 352 | REG_BAND(reg_backup_list[9], AGG_ATCR1); |
| 353 | REG_BAND(reg_backup_list[10], AGG_ATCR3); |
| 354 | REG_BAND(reg_backup_list[11], TMAC_TRCR0); |
| 355 | REG_BAND(reg_backup_list[12], TMAC_ICR0); |
| 356 | REG_BAND_IDX(reg_backup_list[13], ARB_DRNGR0, 0); |
| 357 | REG_BAND_IDX(reg_backup_list[14], ARB_DRNGR0, 1); |
| 358 | REG_BAND(reg_backup_list[15], WF_RFCR); |
| 359 | REG_BAND(reg_backup_list[16], WF_RFCR1); |
| 360 | |
| 361 | if (phy->mt76->test.state == MT76_TM_STATE_OFF) { |
| 362 | for (i = 0; i < n_regs; i++) |
| 363 | mt76_wr(dev, reg_backup_list[i].band[phy->band_idx], b[i]); |
| 364 | return; |
| 365 | } |
| 366 | |
| 367 | if (!b) { |
| 368 | b = devm_kzalloc(dev->mt76.dev, 4 * n_regs, GFP_KERNEL); |
| 369 | if (!b) |
| 370 | return; |
| 371 | |
| 372 | phy->test.reg_backup = b; |
| 373 | for (i = 0; i < n_regs; i++) |
| 374 | b[i] = mt76_rr(dev, reg_backup_list[i].band[phy->band_idx]); |
| 375 | } |
| 376 | |
| 377 | mt76_clear(dev, MT_AGG_PCR0(phy->band_idx, 0), MT_AGG_PCR0_MM_PROT | |
| 378 | MT_AGG_PCR0_GF_PROT | MT_AGG_PCR0_ERP_PROT | |
| 379 | MT_AGG_PCR0_VHT_PROT | MT_AGG_PCR0_BW20_PROT | |
| 380 | MT_AGG_PCR0_BW40_PROT | MT_AGG_PCR0_BW80_PROT); |
| 381 | mt76_set(dev, MT_AGG_PCR0(phy->band_idx, 0), MT_AGG_PCR0_PTA_WIN_DIS); |
| 382 | |
| 383 | mt76_wr(dev, MT_AGG_PCR0(phy->band_idx, 1), MT_AGG_PCR1_RTS0_NUM_THRES | |
| 384 | MT_AGG_PCR1_RTS0_LEN_THRES); |
| 385 | |
| 386 | mt76_clear(dev, MT_AGG_MRCR(phy->band_idx), MT_AGG_MRCR_BAR_CNT_LIMIT | |
| 387 | MT_AGG_MRCR_LAST_RTS_CTS_RN | MT_AGG_MRCR_RTS_FAIL_LIMIT | |
| 388 | MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT); |
| 389 | |
| 390 | mt76_rmw(dev, MT_AGG_MRCR(phy->band_idx), MT_AGG_MRCR_RTS_FAIL_LIMIT | |
| 391 | MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT, |
| 392 | FIELD_PREP(MT_AGG_MRCR_RTS_FAIL_LIMIT, 1) | |
| 393 | FIELD_PREP(MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT, 1)); |
| 394 | |
| 395 | mt76_wr(dev, MT_TMAC_TFCR0(phy->band_idx), 0); |
| 396 | mt76_clear(dev, MT_TMAC_TCR0(phy->band_idx), MT_TMAC_TCR0_TBTT_STOP_CTRL); |
| 397 | |
| 398 | /* config rx filter for testmode rx */ |
| 399 | mt76_wr(dev, MT_WF_RFCR(phy->band_idx), 0xcf70a); |
| 400 | mt76_wr(dev, MT_WF_RFCR1(phy->band_idx), 0); |
| 401 | } |
| 402 | |
| 403 | static void |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 404 | besra_tm_init(struct besra_phy *phy, bool en) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 405 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 406 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 407 | |
| 408 | if (!test_bit(MT76_STATE_RUNNING, &phy->mt76->state)) |
| 409 | return; |
| 410 | |
| 411 | /* TODO: need check */ |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 412 | /* besra_mcu_set_sku_en(phy, !en); */ |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 413 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 414 | besra_tm_mode_ctrl(dev, en); |
| 415 | besra_tm_reg_backup_restore(phy); |
| 416 | besra_tm_set_trx(phy, TM_MAC_TXRX, !en); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 417 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 418 | besra_mcu_add_bss_info(phy, phy->monitor_vif, en); |
| 419 | besra_mcu_add_sta(dev, phy->monitor_vif, NULL, en); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 420 | |
| 421 | if (!en) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 422 | besra_tm_set_tam_arb(phy, en, 0); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 423 | } |
| 424 | |
| 425 | static void |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 426 | besra_tm_update_channel(struct besra_phy *phy) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 427 | { |
| 428 | mutex_unlock(&phy->dev->mt76.mutex); |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 429 | besra_set_channel(phy); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 430 | mutex_lock(&phy->dev->mt76.mutex); |
| 431 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 432 | besra_mcu_set_chan_info(phy, UNI_CHANNEL_RX_PATH); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 433 | } |
| 434 | |
| 435 | static void |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 436 | besra_tm_set_tx_frames(struct besra_phy *phy, bool en) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 437 | { |
| 438 | static const u8 spe_idx_map[] = {0, 0, 1, 0, 3, 2, 4, 0, |
| 439 | 9, 8, 6, 10, 16, 12, 18, 0}; |
| 440 | struct mt76_testmode_data *td = &phy->mt76->test; |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 441 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 442 | struct ieee80211_tx_info *info; |
| 443 | u8 duty_cycle = td->tx_duty_cycle; |
| 444 | u32 tx_time = td->tx_time; |
| 445 | u32 ipg = td->tx_ipg; |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 446 | u8 phy_idx = besra_get_phy_id(phy); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 447 | u16 chainshift; |
| 448 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 449 | besra_tm_set_trx(phy, TM_MAC_RX_RXV, false); |
| 450 | besra_tm_clean_hwq(phy, dev->mt76.global_wcid.idx); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 451 | |
| 452 | if (en) { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 453 | besra_tm_update_channel(phy); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 454 | |
| 455 | if (td->tx_spe_idx) { |
| 456 | phy->test.spe_idx = td->tx_spe_idx; |
| 457 | } else { |
| 458 | u8 tx_ant = td->tx_antenna_mask; |
| 459 | |
| 460 | if (phy_idx == MT_EXT_PHY) |
| 461 | chainshift = dev->chain_shift_ext; |
| 462 | else if (phy_idx == MT_TRI_PHY) |
| 463 | chainshift = dev->chain_shift_tri; |
| 464 | else |
| 465 | chainshift = 0; |
| 466 | |
| 467 | tx_ant >>= chainshift; |
| 468 | phy->test.spe_idx = spe_idx_map[tx_ant]; |
| 469 | } |
| 470 | } |
| 471 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 472 | besra_tm_set_tam_arb(phy, en, |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 473 | td->tx_rate_mode == MT76_TM_TX_MODE_HE_MU); |
| 474 | |
| 475 | /* if all three params are set, duty_cycle will be ignored */ |
| 476 | if (duty_cycle && tx_time && !ipg) { |
| 477 | ipg = tx_time * 100 / duty_cycle - tx_time; |
| 478 | } else if (duty_cycle && !tx_time && ipg) { |
| 479 | if (duty_cycle < 100) |
| 480 | tx_time = duty_cycle * ipg / (100 - duty_cycle); |
| 481 | } |
| 482 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 483 | besra_tm_set_ipg_params(phy, ipg, td->tx_rate_mode); |
| 484 | besra_tm_set_tx_len(phy, tx_time); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 485 | |
| 486 | if (ipg) |
| 487 | td->tx_queued_limit = MT76_TM_TIMEOUT * 1000000 / ipg / 2; |
| 488 | |
| 489 | if (!en || !td->tx_skb) |
| 490 | return; |
| 491 | |
| 492 | info = IEEE80211_SKB_CB(td->tx_skb); |
| 493 | info->control.vif = phy->monitor_vif; |
| 494 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 495 | besra_tm_set_trx(phy, TM_MAC_TX, en); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 496 | } |
| 497 | |
| 498 | static void |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 499 | besra_tm_set_rx_frames(struct besra_phy *phy, bool en) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 500 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 501 | besra_tm_set_trx(phy, TM_MAC_RX_RXV, false); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 502 | |
| 503 | if (en) { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 504 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 505 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 506 | besra_tm_update_channel(phy); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 507 | |
| 508 | /* read-clear */ |
| 509 | mt76_rr(dev, MT_MIB_SDR3(phy->band_idx)); |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 510 | besra_tm_set_trx(phy, TM_MAC_RX_RXV, en); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 511 | } |
| 512 | } |
| 513 | |
| 514 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 515 | besra_tm_rf_switch_mode(struct besra_dev *dev, u32 oper) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 516 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 517 | struct besra_tm_rf_test req = { |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 518 | .op.op_mode = cpu_to_le32(oper), |
| 519 | }; |
| 520 | |
| 521 | return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req, |
| 522 | sizeof(req), true); |
| 523 | } |
| 524 | |
| 525 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 526 | besra_tm_set_tx_cont(struct besra_phy *phy, bool en) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 527 | { |
| 528 | #define TX_CONT_START 0x05 |
| 529 | #define TX_CONT_STOP 0x06 |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 530 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 531 | struct cfg80211_chan_def *chandef = &phy->mt76->chandef; |
| 532 | int freq1 = ieee80211_frequency_to_channel(chandef->center_freq1); |
| 533 | struct mt76_testmode_data *td = &phy->mt76->test; |
| 534 | u32 func_idx = en ? TX_CONT_START : TX_CONT_STOP; |
| 535 | u8 rate_idx = td->tx_rate_idx, mode; |
| 536 | u16 rateval; |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 537 | struct besra_tm_rf_test req = { |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 538 | .action = 1, |
| 539 | .icap_len = 120, |
| 540 | .op.rf.func_idx = cpu_to_le32(func_idx), |
| 541 | }; |
| 542 | struct tm_tx_cont *tx_cont = &req.op.rf.param.tx_cont; |
| 543 | |
| 544 | tx_cont->control_ch = chandef->chan->hw_value; |
| 545 | tx_cont->center_ch = freq1; |
| 546 | tx_cont->tx_ant = td->tx_antenna_mask; |
| 547 | tx_cont->band = phy->band_idx; |
| 548 | |
| 549 | switch (chandef->width) { |
| 550 | case NL80211_CHAN_WIDTH_40: |
| 551 | tx_cont->bw = CMD_CBW_40MHZ; |
| 552 | break; |
| 553 | case NL80211_CHAN_WIDTH_80: |
| 554 | tx_cont->bw = CMD_CBW_80MHZ; |
| 555 | break; |
| 556 | case NL80211_CHAN_WIDTH_80P80: |
| 557 | tx_cont->bw = CMD_CBW_8080MHZ; |
| 558 | break; |
| 559 | case NL80211_CHAN_WIDTH_160: |
| 560 | tx_cont->bw = CMD_CBW_160MHZ; |
| 561 | break; |
| 562 | case NL80211_CHAN_WIDTH_5: |
| 563 | tx_cont->bw = CMD_CBW_5MHZ; |
| 564 | break; |
| 565 | case NL80211_CHAN_WIDTH_10: |
| 566 | tx_cont->bw = CMD_CBW_10MHZ; |
| 567 | break; |
| 568 | case NL80211_CHAN_WIDTH_20: |
| 569 | tx_cont->bw = CMD_CBW_20MHZ; |
| 570 | break; |
| 571 | case NL80211_CHAN_WIDTH_20_NOHT: |
| 572 | tx_cont->bw = CMD_CBW_20MHZ; |
| 573 | break; |
| 574 | default: |
| 575 | return -EINVAL; |
| 576 | } |
| 577 | |
| 578 | if (!en) { |
| 579 | req.op.rf.param.func_data = cpu_to_le32(phy->band_idx); |
| 580 | goto out; |
| 581 | } |
| 582 | |
| 583 | if (td->tx_rate_mode <= MT76_TM_TX_MODE_OFDM) { |
| 584 | struct ieee80211_supported_band *sband; |
| 585 | u8 idx = rate_idx; |
| 586 | |
| 587 | if (chandef->chan->band == NL80211_BAND_5GHZ) |
| 588 | sband = &phy->mt76->sband_5g.sband; |
developer | 66cd209 | 2022-05-10 15:43:01 +0800 | [diff] [blame] | 589 | else if (chandef->chan->band == NL80211_BAND_6GHZ) |
| 590 | sband = &phy->mt76->sband_6g.sband; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 591 | else |
| 592 | sband = &phy->mt76->sband_2g.sband; |
| 593 | |
| 594 | if (td->tx_rate_mode == MT76_TM_TX_MODE_OFDM) |
| 595 | idx += 4; |
| 596 | rate_idx = sband->bitrates[idx].hw_value & 0xff; |
| 597 | } |
| 598 | |
| 599 | switch (td->tx_rate_mode) { |
| 600 | case MT76_TM_TX_MODE_CCK: |
| 601 | mode = MT_PHY_TYPE_CCK; |
| 602 | break; |
| 603 | case MT76_TM_TX_MODE_OFDM: |
| 604 | mode = MT_PHY_TYPE_OFDM; |
| 605 | break; |
| 606 | case MT76_TM_TX_MODE_HT: |
| 607 | mode = MT_PHY_TYPE_HT; |
| 608 | break; |
| 609 | case MT76_TM_TX_MODE_VHT: |
| 610 | mode = MT_PHY_TYPE_VHT; |
| 611 | break; |
| 612 | case MT76_TM_TX_MODE_HE_SU: |
| 613 | mode = MT_PHY_TYPE_HE_SU; |
| 614 | break; |
| 615 | case MT76_TM_TX_MODE_HE_EXT_SU: |
| 616 | mode = MT_PHY_TYPE_HE_EXT_SU; |
| 617 | break; |
| 618 | case MT76_TM_TX_MODE_HE_TB: |
| 619 | mode = MT_PHY_TYPE_HE_TB; |
| 620 | break; |
| 621 | case MT76_TM_TX_MODE_HE_MU: |
| 622 | mode = MT_PHY_TYPE_HE_MU; |
| 623 | break; |
| 624 | default: |
| 625 | return -EINVAL; |
| 626 | } |
| 627 | |
| 628 | rateval = mode << 6 | rate_idx; |
| 629 | tx_cont->rateval = cpu_to_le16(rateval); |
| 630 | |
| 631 | out: |
| 632 | if (!en) { |
| 633 | int ret; |
| 634 | |
| 635 | ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req, |
| 636 | sizeof(req), true); |
| 637 | if (ret) |
| 638 | return ret; |
| 639 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 640 | return besra_tm_rf_switch_mode(dev, RF_OPER_NORMAL); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 641 | } |
| 642 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 643 | besra_tm_rf_switch_mode(dev, RF_OPER_RF_TEST); |
| 644 | besra_tm_update_channel(phy); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 645 | |
| 646 | return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req, |
| 647 | sizeof(req), true); |
| 648 | } |
| 649 | |
| 650 | static void |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 651 | besra_tm_update_params(struct besra_phy *phy, u32 changed) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 652 | { |
| 653 | struct mt76_testmode_data *td = &phy->mt76->test; |
| 654 | bool en = phy->mt76->test.state != MT76_TM_STATE_OFF; |
| 655 | |
| 656 | if (changed & BIT(TM_CHANGED_FREQ_OFFSET)) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 657 | besra_tm_set_freq_offset(phy, en, en ? td->freq_offset : 0); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 658 | if (changed & BIT(TM_CHANGED_TXPOWER)) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 659 | besra_tm_set_tx_power(phy); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 660 | } |
| 661 | |
| 662 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 663 | besra_tm_set_state(struct mt76_phy *mphy, enum mt76_testmode_state state) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 664 | { |
| 665 | struct mt76_testmode_data *td = &mphy->test; |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 666 | struct besra_phy *phy = mphy->priv; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 667 | enum mt76_testmode_state prev_state = td->state; |
| 668 | |
| 669 | mphy->test.state = state; |
| 670 | |
| 671 | if (prev_state == MT76_TM_STATE_TX_FRAMES || |
| 672 | state == MT76_TM_STATE_TX_FRAMES) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 673 | besra_tm_set_tx_frames(phy, state == MT76_TM_STATE_TX_FRAMES); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 674 | else if (prev_state == MT76_TM_STATE_RX_FRAMES || |
| 675 | state == MT76_TM_STATE_RX_FRAMES) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 676 | besra_tm_set_rx_frames(phy, state == MT76_TM_STATE_RX_FRAMES); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 677 | else if (prev_state == MT76_TM_STATE_TX_CONT || |
| 678 | state == MT76_TM_STATE_TX_CONT) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 679 | besra_tm_set_tx_cont(phy, state == MT76_TM_STATE_TX_CONT); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 680 | else if (prev_state == MT76_TM_STATE_OFF || |
| 681 | state == MT76_TM_STATE_OFF) |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 682 | besra_tm_init(phy, !(state == MT76_TM_STATE_OFF)); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 683 | |
| 684 | if ((state == MT76_TM_STATE_IDLE && |
| 685 | prev_state == MT76_TM_STATE_OFF) || |
| 686 | (state == MT76_TM_STATE_OFF && |
| 687 | prev_state == MT76_TM_STATE_IDLE)) { |
| 688 | u32 changed = 0; |
| 689 | int i; |
| 690 | |
| 691 | for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) { |
| 692 | u16 cur = tm_change_map[i]; |
| 693 | |
| 694 | if (td->param_set[cur / 32] & BIT(cur % 32)) |
| 695 | changed |= BIT(i); |
| 696 | } |
| 697 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 698 | besra_tm_update_params(phy, changed); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 699 | } |
| 700 | |
| 701 | return 0; |
| 702 | } |
| 703 | |
| 704 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 705 | besra_tm_set_params(struct mt76_phy *mphy, struct nlattr **tb, |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 706 | enum mt76_testmode_state new_state) |
| 707 | { |
| 708 | struct mt76_testmode_data *td = &mphy->test; |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 709 | struct besra_phy *phy = mphy->priv; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 710 | u32 changed = 0; |
| 711 | int i; |
| 712 | |
| 713 | BUILD_BUG_ON(NUM_TM_CHANGED >= 32); |
| 714 | |
| 715 | if (new_state == MT76_TM_STATE_OFF || |
| 716 | td->state == MT76_TM_STATE_OFF) |
| 717 | return 0; |
| 718 | |
| 719 | if (td->tx_antenna_mask & ~mphy->chainmask) |
| 720 | return -EINVAL; |
| 721 | |
| 722 | for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) { |
| 723 | if (tb[tm_change_map[i]]) |
| 724 | changed |= BIT(i); |
| 725 | } |
| 726 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 727 | besra_tm_update_params(phy, changed); |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 728 | |
| 729 | return 0; |
| 730 | } |
| 731 | |
| 732 | static int |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 733 | besra_tm_dump_stats(struct mt76_phy *mphy, struct sk_buff *msg) |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 734 | { |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 735 | struct besra_phy *phy = mphy->priv; |
| 736 | struct besra_dev *dev = phy->dev; |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 737 | enum mt76_rxq_id q; |
| 738 | void *rx, *rssi; |
| 739 | u16 fcs_err; |
| 740 | int i; |
| 741 | u32 cnt; |
| 742 | |
| 743 | rx = nla_nest_start(msg, MT76_TM_STATS_ATTR_LAST_RX); |
| 744 | if (!rx) |
| 745 | return -ENOMEM; |
| 746 | |
| 747 | if (nla_put_s32(msg, MT76_TM_RX_ATTR_FREQ_OFFSET, phy->test.last_freq_offset)) |
| 748 | return -ENOMEM; |
| 749 | |
| 750 | rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_RCPI); |
| 751 | if (!rssi) |
| 752 | return -ENOMEM; |
| 753 | |
| 754 | for (i = 0; i < ARRAY_SIZE(phy->test.last_rcpi); i++) |
| 755 | if (nla_put_u8(msg, i, phy->test.last_rcpi[i])) |
| 756 | return -ENOMEM; |
| 757 | |
| 758 | nla_nest_end(msg, rssi); |
| 759 | |
| 760 | rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_IB_RSSI); |
| 761 | if (!rssi) |
| 762 | return -ENOMEM; |
| 763 | |
| 764 | for (i = 0; i < ARRAY_SIZE(phy->test.last_ib_rssi); i++) |
| 765 | if (nla_put_s8(msg, i, phy->test.last_ib_rssi[i])) |
| 766 | return -ENOMEM; |
| 767 | |
| 768 | nla_nest_end(msg, rssi); |
| 769 | |
| 770 | rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_WB_RSSI); |
| 771 | if (!rssi) |
| 772 | return -ENOMEM; |
| 773 | |
| 774 | for (i = 0; i < ARRAY_SIZE(phy->test.last_wb_rssi); i++) |
| 775 | if (nla_put_s8(msg, i, phy->test.last_wb_rssi[i])) |
| 776 | return -ENOMEM; |
| 777 | |
| 778 | nla_nest_end(msg, rssi); |
| 779 | |
| 780 | if (nla_put_u8(msg, MT76_TM_RX_ATTR_SNR, phy->test.last_snr)) |
| 781 | return -ENOMEM; |
| 782 | |
| 783 | nla_nest_end(msg, rx); |
| 784 | |
| 785 | cnt = mt76_rr(dev, MT_MIB_SDR3(phy->band_idx)); |
| 786 | fcs_err = FIELD_GET(MT_MIB_SDR3_FCS_ERR_MASK, cnt); |
| 787 | |
| 788 | q = phy->band_idx ? MT_RXQ_EXT : MT_RXQ_MAIN; |
| 789 | mphy->test.rx_stats.packets[q] += fcs_err; |
| 790 | mphy->test.rx_stats.fcs_error[q] += fcs_err; |
| 791 | |
| 792 | return 0; |
| 793 | } |
| 794 | |
developer | 7800b8d | 2022-06-23 22:15:56 +0800 | [diff] [blame] | 795 | const struct mt76_testmode_ops besra_testmode_ops = { |
| 796 | .set_state = besra_tm_set_state, |
| 797 | .set_params = besra_tm_set_params, |
| 798 | .dump_stats = besra_tm_dump_stats, |
developer | b11a539 | 2022-03-31 00:34:47 +0800 | [diff] [blame] | 799 | }; |