blob: 85af9dc4a727b9a31eea2a3f7af3a81b2a4dcfe1 [file] [log] [blame]
developer3abe1ad2022-01-24 11:13:32 +08001/* Copyright (C) 2021-2022 Mediatek Inc. */
2#define _GNU_SOURCE
3
4#include <unl.h>
5
6#include "atenl.h"
7
8#define to_rssi(_rcpi) ((_rcpi - 220) / 2)
9
10struct atenl_nl_priv {
11 struct atenl *an;
12 struct unl unl;
13 struct nl_msg *msg;
14 int attr;
15 void *res;
16};
17
18struct atenl_nl_ops {
19 int set;
20 int dump;
21 int (*ops)(struct atenl *an, struct atenl_data *data,
22 struct atenl_nl_priv *nl_priv);
23};
24
25static struct nla_policy testdata_policy[NUM_MT76_TM_ATTRS] = {
26 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
27 [MT76_TM_ATTR_MTD_PART] = { .type = NLA_STRING },
28 [MT76_TM_ATTR_MTD_OFFSET] = { .type = NLA_U32 },
developer071927d2022-08-31 20:39:29 +080029 [MT76_TM_ATTR_IS_MAIN_PHY] = { .type = NLA_U8 },
developer3abe1ad2022-01-24 11:13:32 +080030 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
31 [MT76_TM_ATTR_TX_LENGTH] = { .type = NLA_U32 },
32 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
33 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
34 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
35 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
36 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
37 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
38 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
39 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
40 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
41 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
42 [MT76_TM_ATTR_STATS] = { .type = NLA_NESTED },
developer071927d2022-08-31 20:39:29 +080043 [MT76_TM_ATTR_PRECAL] = { .type = NLA_NESTED },
44 [MT76_TM_ATTR_PRECAL_INFO] = { .type = NLA_NESTED },
developer3abe1ad2022-01-24 11:13:32 +080045};
46
47static struct nla_policy stats_policy[NUM_MT76_TM_STATS_ATTRS] = {
48 [MT76_TM_STATS_ATTR_TX_PENDING] = { .type = NLA_U32 },
49 [MT76_TM_STATS_ATTR_TX_QUEUED] = { .type = NLA_U32 },
50 [MT76_TM_STATS_ATTR_TX_DONE] = { .type = NLA_U32 },
51 [MT76_TM_STATS_ATTR_RX_PACKETS] = { .type = NLA_U64 },
52 [MT76_TM_STATS_ATTR_RX_FCS_ERROR] = { .type = NLA_U64 },
53};
54
55static struct nla_policy rx_policy[NUM_MT76_TM_RX_ATTRS] = {
56 [MT76_TM_RX_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
57 [MT76_TM_RX_ATTR_RCPI] = { .type = NLA_NESTED },
58 [MT76_TM_RX_ATTR_IB_RSSI] = { .type = NLA_NESTED },
59 [MT76_TM_RX_ATTR_WB_RSSI] = { .type = NLA_NESTED },
60 [MT76_TM_RX_ATTR_SNR] = { .type = NLA_U8 },
61};
62
63struct he_sgi {
64 enum mt76_testmode_tx_mode tx_mode;
65 u8 sgi;
66 u8 tx_ltf;
67};
68
69#define HE_SGI_GROUP(_tx_mode, _sgi, _tx_ltf) \
70 { .tx_mode = MT76_TM_TX_MODE_##_tx_mode, .sgi = _sgi, .tx_ltf = _tx_ltf }
71static const struct he_sgi he_sgi_groups[] = {
72 HE_SGI_GROUP(HE_SU, 0, 0),
73 HE_SGI_GROUP(HE_SU, 0, 1),
74 HE_SGI_GROUP(HE_SU, 1, 1),
75 HE_SGI_GROUP(HE_SU, 2, 2),
76 HE_SGI_GROUP(HE_SU, 0, 2),
77 HE_SGI_GROUP(HE_EXT_SU, 0, 0),
78 HE_SGI_GROUP(HE_EXT_SU, 0, 1),
79 HE_SGI_GROUP(HE_EXT_SU, 1, 1),
80 HE_SGI_GROUP(HE_EXT_SU, 2, 2),
81 HE_SGI_GROUP(HE_EXT_SU, 0, 2),
82 HE_SGI_GROUP(HE_TB, 1, 0),
83 HE_SGI_GROUP(HE_TB, 1, 1),
84 HE_SGI_GROUP(HE_TB, 2, 2),
85 HE_SGI_GROUP(HE_MU, 0, 2),
86 HE_SGI_GROUP(HE_MU, 0, 1),
87 HE_SGI_GROUP(HE_MU, 1, 1),
88 HE_SGI_GROUP(HE_MU, 2, 2),
89};
90#undef HE_SGI_LTF_GROUP
91
92static u8 phy_type_to_attr(u8 phy_type)
93{
94 static const u8 phy_type_to_attr[] = {
95 [ATENL_PHY_TYPE_CCK] = MT76_TM_TX_MODE_CCK,
96 [ATENL_PHY_TYPE_OFDM] = MT76_TM_TX_MODE_OFDM,
97 [ATENL_PHY_TYPE_HT] = MT76_TM_TX_MODE_HT,
98 [ATENL_PHY_TYPE_HT_GF] = MT76_TM_TX_MODE_HT,
99 [ATENL_PHY_TYPE_VHT] = MT76_TM_TX_MODE_VHT,
100 [ATENL_PHY_TYPE_HE_SU] = MT76_TM_TX_MODE_HE_SU,
101 [ATENL_PHY_TYPE_HE_EXT_SU] = MT76_TM_TX_MODE_HE_EXT_SU,
102 [ATENL_PHY_TYPE_HE_TB] = MT76_TM_TX_MODE_HE_TB,
103 [ATENL_PHY_TYPE_HE_MU] = MT76_TM_TX_MODE_HE_MU,
104 };
105
106 if (phy_type >= ARRAY_SIZE(phy_type_to_attr))
107 return 0;
108
109 return phy_type_to_attr[phy_type];
110}
111
112static void
113atenl_set_attr_state(struct atenl *an, struct nl_msg *msg,
114 u8 band, enum mt76_testmode_state state)
115{
116 if (get_band_val(an, band, cur_state) == state)
117 return;
118
119 nla_put_u8(msg, MT76_TM_ATTR_STATE, state);
120 set_band_val(an, band, cur_state, state);
121}
122
123static void
124atenl_set_attr_antenna(struct atenl *an, struct nl_msg *msg, u8 tx_antenna)
125{
126 if (!tx_antenna)
127 return;
developer3abe1ad2022-01-24 11:13:32 +0800128 if (is_mt7915(an))
129 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA,
130 tx_antenna << (2 * an->cur_band));
131 else if (is_mt7916(an) || is_mt7986(an))
132 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, tx_antenna);
133}
134
135static int
136atenl_nl_set_attr(struct atenl *an, struct atenl_data *data,
137 struct atenl_nl_priv *nl_priv)
138{
139 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
140 struct nl_msg *msg = nl_priv->msg;
141 u32 val = ntohl(*(u32 *)hdr->data);
142 int attr = nl_priv->attr;
143 void *ptr, *a;
144
145 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
146 if (!ptr)
147 return -ENOMEM;
148
149 switch (attr) {
150 case MT76_TM_ATTR_TX_ANTENNA:
151 atenl_set_attr_antenna(an, msg, val);
152 break;
153 case MT76_TM_ATTR_FREQ_OFFSET:
154 nla_put_u32(msg, attr, val);
155 break;
156 case MT76_TM_ATTR_TX_POWER:
157 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
158 if (!a)
159 return -ENOMEM;
160 nla_put_u8(msg, 0, val);
161 nla_nest_end(msg, a);
162 break;
163 default:
164 nla_put_u8(msg, attr, val);
165 break;
166 }
167
168 nla_nest_end(msg, ptr);
169
170 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
171}
172
173static int
174atenl_nl_set_cfg(struct atenl *an, struct atenl_data *data,
175 struct atenl_nl_priv *nl_priv)
176{
177 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
178 struct nl_msg *msg = nl_priv->msg;
179 enum atenl_cmd cmd = data->cmd;
180 u32 *v = (u32 *)hdr->data;
181 u8 type = ntohl(v[0]);
182 u8 enable = ntohl(v[1]);
183 void *ptr, *cfg;
184
185 if (cmd == HQA_CMD_SET_TSSI) {
186 type = 0;
187 enable = 1;
188 }
189
190 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
191 if (!ptr)
192 return -ENOMEM;
193
194 cfg = nla_nest_start(msg, MT76_TM_ATTR_CFG);
195 if (!cfg)
196 return -ENOMEM;
197
198 if (nla_put_u8(msg, 0, type) ||
199 nla_put_u8(msg, 1, enable))
200 return -EINVAL;
201
202 nla_nest_end(msg, cfg);
203
204 nla_nest_end(msg, ptr);
205
206 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
207}
208
209static int
210atenl_nl_set_tx(struct atenl *an, struct atenl_data *data,
211 struct atenl_nl_priv *nl_priv)
212{
213 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
214 struct nl_msg *msg = nl_priv->msg;
215 u32 *v = (u32 *)hdr->data;
216 u8 *addr1 = hdr->data + 36;
217 u8 *addr2 = addr1 + ETH_ALEN;
218 u8 *addr3 = addr2 + ETH_ALEN;
developer5698c9c2022-05-30 16:40:23 +0800219 u8 def_mac[ETH_ALEN] = {0x00, 0x11, 0x22, 0x33, 0x44, 0x55};
developer3abe1ad2022-01-24 11:13:32 +0800220 void *ptr, *a;
221
developer5698c9c2022-05-30 16:40:23 +0800222 if (get_band_val(an, an->cur_band, use_tx_time))
223 set_band_val(an, an->cur_band, tx_time, ntohl(v[7]));
224 else
225 set_band_val(an, an->cur_band, tx_mpdu_len, ntohl(v[7]));
226
developer3abe1ad2022-01-24 11:13:32 +0800227 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
228 if (!ptr)
229 return -ENOMEM;
230
developer3abe1ad2022-01-24 11:13:32 +0800231 a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
232 if (!a)
233 return -ENOMEM;
234
developer5698c9c2022-05-30 16:40:23 +0800235 nla_put(msg, 0, ETH_ALEN, use_default_addr(addr1) ? def_mac : addr1);
236 nla_put(msg, 1, ETH_ALEN, use_default_addr(addr2) ? def_mac : addr2);
237 nla_put(msg, 2, ETH_ALEN, use_default_addr(addr3) ? def_mac : addr3);
developer3abe1ad2022-01-24 11:13:32 +0800238
239 nla_nest_end(msg, a);
240
241 nla_nest_end(msg, ptr);
242
243 *(u32 *)(hdr->data + 2) = data->ext_id;
244
245 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
246}
247
248static int
249atenl_nl_tx(struct atenl *an, struct atenl_data *data, struct atenl_nl_priv *nl_priv)
250{
251#define USE_SPE_IDX BIT(31)
252 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
253 struct nl_msg *msg = nl_priv->msg;
254 u32 *v = (u32 *)hdr->data;
255 u8 band = ntohl(v[2]);
256 void *ptr;
257 int ret;
258
259 if (band >= MAX_BAND_NUM)
260 return -EINVAL;
261
262 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
263 if (!ptr)
264 return -ENOMEM;
265
266 if (data->ext_cmd == HQA_EXT_CMD_STOP_TX) {
267 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
268 } else {
269 u32 tx_count = ntohl(v[3]);
270 u8 tx_rate_mode = phy_type_to_attr(ntohl(v[4]));
271 u8 aid = ntohl(v[11]);
272 u8 sgi = ntohl(v[13]);
273 u32 tx_antenna = ntohl(v[14]);
274 void *a;
275
276 if (sgi > 5)
277 return -EINVAL;
278
279 if (!tx_count)
280 tx_count = 10000000;
281
282 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, tx_count);
283 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, ntohl(v[12]));
284 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, tx_rate_mode);
285 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, ntohl(v[5]));
286 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, ntohl(v[7]));
287 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, ntohl(v[8]));
288 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, ntohl(v[15]));
289
developer5698c9c2022-05-30 16:40:23 +0800290 if (get_band_val(an, band, use_tx_time))
291 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME,
292 get_band_val(an, band, tx_time));
293 else
developer93dadcc2022-07-13 10:25:35 +0800294 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH,
295 get_band_val(an, band, tx_mpdu_len));
developer5698c9c2022-05-30 16:40:23 +0800296
developer3abe1ad2022-01-24 11:13:32 +0800297 /* for chips after 7915, tx need to use at least wcid = 1 */
298 if (!is_mt7915(an) && !aid)
299 aid = 1;
300 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
301
302 if (tx_antenna & USE_SPE_IDX) {
303 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX,
304 tx_antenna & ~USE_SPE_IDX);
305 } else {
306 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, 0);
307 atenl_set_attr_antenna(an, msg, tx_antenna);
308 }
309
310 if (tx_rate_mode >= MT76_TM_TX_MODE_HE_SU) {
311 u8 ofs = sgi;
312 size_t i;
313
314 for (i = 0; i < ARRAY_SIZE(he_sgi_groups); i++)
315 if (he_sgi_groups[i].tx_mode == tx_rate_mode)
316 break;
317
318 if ((i + ofs) >= ARRAY_SIZE(he_sgi_groups))
319 return -EINVAL;
320
321 sgi = he_sgi_groups[i + ofs].sgi;
322 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF,
323 he_sgi_groups[i + ofs].tx_ltf);
324 }
325 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, sgi);
326
327 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
328 if (!a)
329 return -ENOMEM;
330 nla_put_u8(msg, 0, ntohl(v[6]));
331 nla_nest_end(msg, a);
332
333 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_TX_FRAMES);
334 }
335
336 nla_nest_end(msg, ptr);
337
338 ret = unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
339 if (ret)
340 return ret;
341
342 *(u32 *)(hdr->data + 2) = data->ext_id;
343
344 return 0;
345}
346
347static int
348atenl_nl_rx(struct atenl *an, struct atenl_data *data, struct atenl_nl_priv *nl_priv)
349{
350 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
351 struct atenl_band *anb = &an->anb[an->cur_band];
352 struct nl_msg *msg = nl_priv->msg;
353 u32 *v = (u32 *)hdr->data;
354 u8 band = ntohl(v[2]);
355 void *ptr;
356
357 if (band >= MAX_BAND_NUM)
358 return -EINVAL;
359
360 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
361 if (!ptr)
362 return -ENOMEM;
363
364 if (data->ext_cmd == HQA_EXT_CMD_STOP_RX) {
365 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
366 } else {
367 v = (u32 *)(hdr->data + 18);
368
369 atenl_set_attr_antenna(an, msg, ntohl(v[0]));
370 nla_put_u8(msg, MT76_TM_ATTR_AID, ntohl(v[1]));
371 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_RX_FRAMES);
372
373 anb->reset_rx_cnt = false;
374
375 /* clear history buffer */
376 memset(&anb->rx_stat, 0, sizeof(anb->rx_stat));
377 }
378
379 nla_nest_end(msg, ptr);
380
381 *(u32 *)(hdr->data + 2) = data->ext_id;
382
383 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
384}
385
386static int
387atenl_off_ch_scan(struct atenl *an, struct atenl_data *data,
388 struct atenl_nl_priv *nl_priv)
389{
390 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
391 struct nl_msg *msg = nl_priv->msg;
392 u32 *v = (u32 *)hdr->data;
393 u8 ch = ntohl(v[2]);
394 u8 bw = ntohl(v[4]);
395 u8 tx_path = ntohl(v[5]);
396 u8 status = ntohl(v[6]);
397 void *ptr;
398
399 if (!status)
400 ch = 0; /* stop */
401
402 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
403 if (!ptr)
404 return -ENOMEM;
405
406 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_CH, ch);
407 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_CENTER_CH,
408 atenl_get_center_channel(bw, CH_BAND_5GHZ, ch));
409 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_BW, bw);
410 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_PATH, tx_path);
411
412 nla_nest_end(msg, ptr);
413
developer5698c9c2022-05-30 16:40:23 +0800414 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +0800415
416 return 0;
417}
418
419static int atenl_nl_dump_cb(struct nl_msg *msg, void *arg)
420{
421 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
422 struct nlattr *tb1[NUM_MT76_TM_ATTRS];
423 struct nlattr *tb2[NUM_MT76_TM_STATS_ATTRS];
424 struct nlattr *nl_attr;
425 int attr = nl_priv->attr;
426 u64 *res = nl_priv->res;
427
428 nl_attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
429 if (!nl_attr) {
developer5698c9c2022-05-30 16:40:23 +0800430 atenl_err("Testdata attribute not found\n");
developer3abe1ad2022-01-24 11:13:32 +0800431 return NL_SKIP;
432 }
433
434 nla_parse_nested(tb1, MT76_TM_ATTR_MAX, nl_attr, testdata_policy);
435 nla_parse_nested(tb2, MT76_TM_STATS_ATTR_MAX,
436 tb1[MT76_TM_ATTR_STATS], stats_policy);
437
438 if (attr == MT76_TM_STATS_ATTR_TX_DONE)
439 *res = nla_get_u32(tb2[MT76_TM_STATS_ATTR_TX_DONE]);
440
441 return NL_SKIP;
442}
443
444static int
445atenl_nl_dump_attr(struct atenl *an, struct atenl_data *data,
446 struct atenl_nl_priv *nl_priv)
447{
448 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
449 struct nl_msg *msg = nl_priv->msg;
450 void *ptr;
451 u64 res = 0;
452
453 nl_priv->res = (void *)&res;
454
455 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
456 if (!ptr)
457 return -ENOMEM;
458 nla_put_flag(msg, MT76_TM_ATTR_STATS);
459 nla_nest_end(msg, ptr);
460
461 unl_genl_request(&nl_priv->unl, msg, atenl_nl_dump_cb, (void *)nl_priv);
462
463 if (nl_priv->attr == MT76_TM_STATS_ATTR_TX_DONE)
464 *(u32 *)(hdr->data + 2 + 4 * an->cur_band) = htonl(res);
465
466 return 0;
467}
468
469static int atenl_nl_continuous_tx(struct atenl *an,
470 struct atenl_data *data,
471 struct atenl_nl_priv *nl_priv)
472{
473 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
474 struct nl_msg *msg = nl_priv->msg;
475 u32 *v = (u32 *)hdr->data;
476 u8 band = ntohl(v[0]);
477 bool enable = ntohl(v[1]);
478 void *ptr;
479
480 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
481 if (!ptr)
482 return -ENOMEM;
483
484 if (band >= MAX_BAND_NUM)
485 return -EINVAL;
486
487 if (!enable) {
488 int phy = get_band_val(an, band, phy_idx);
489 char cmd[64];
490
491 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
492 nla_nest_end(msg, ptr);
493 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
494
495 sprintf(cmd, "iw dev mon%d del", phy);
496 system(cmd);
497 sprintf(cmd, "iw phy phy%d interface add mon%d type monitor", phy, phy);
498 system(cmd);
499 sprintf(cmd, "ifconfig mon%d up", phy);
500 system(cmd);
501
502 return 0;
503 }
504
505 if (get_band_val(an, band, rf_mode) != ATENL_RF_MODE_TEST)
506 return 0;
507
508 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, ntohl(v[2]));
509 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, phy_type_to_attr(ntohl(v[3])));
510 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, ntohl(v[6]));
511
512 atenl_dbg("%s: enable = %d, ant=%u, tx_rate_mode=%u, rate_idx=%u\n",
513 __func__, enable, ntohl(v[2]), ntohl(v[3]), ntohl(v[6]));
514
515 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_TX_CONT);
516
517 nla_nest_end(msg, ptr);
518
519 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
520}
521
522static int atenl_nl_get_rx_info_cb(struct nl_msg *msg, void *arg)
523{
524 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
525 struct atenl *an = nl_priv->an;
526 struct atenl_band *anb = &an->anb[an->cur_band];
527 struct atenl_data *data = nl_priv->res;
528 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
529 struct atenl_rx_info_hdr *rx_hdr;
530 struct atenl_rx_info_band *rx_band;
531 struct atenl_rx_info_user *rx_user;
532 struct atenl_rx_info_path *rx_path;
533 struct atenl_rx_info_comm *rx_comm;
534 struct nlattr *tb1[NUM_MT76_TM_ATTRS];
535 struct nlattr *tb2[NUM_MT76_TM_STATS_ATTRS];
536 struct nlattr *tb3[NUM_MT76_TM_RX_ATTRS];
537 struct nlattr *nl_attr, *cur;
538 struct atenl_rx_stat rx_cur, rx_diff = {};
539 u32 rcpi[4] = {};
540 u32 type_num = htonl(4);
541 s32 ib_rssi[4] = {}, wb_rssi[4] = {};
542 u8 path = an->anb[an->cur_band].chainmask;
543 u8 path_num = __builtin_popcount(path);
544 u8 *buf = hdr->data + 2;
545 int i, rem;
546
547 *(u32 *)buf = type_num;
548 buf += sizeof(type_num);
549
550#define RX_PUT_HDR(_hdr, _type, _val, _size) do { \
551 _hdr->type = htonl(_type); \
552 _hdr->val = htonl(_val); \
553 _hdr->len = htonl(_size); \
554 buf += sizeof(*_hdr); \
555 } while (0)
556
557 rx_hdr = (struct atenl_rx_info_hdr *)buf;
558 RX_PUT_HDR(rx_hdr, 0, BIT(an->cur_band), sizeof(*rx_band));
559 rx_band = (struct atenl_rx_info_band *)buf;
560 buf += sizeof(*rx_band);
561
562 rx_hdr = (struct atenl_rx_info_hdr *)buf;
563 RX_PUT_HDR(rx_hdr, 1, path, path_num * sizeof(*rx_path));
564 rx_path = (struct atenl_rx_info_path *)buf;
565 buf += path_num * sizeof(*rx_path);
566
567 rx_hdr = (struct atenl_rx_info_hdr *)buf;
568 RX_PUT_HDR(rx_hdr, 2, GENMASK(15, 0), 16 * sizeof(*rx_user));
569 rx_user = (struct atenl_rx_info_user *)buf;
570 buf += 16 * sizeof(*rx_user);
571
572 rx_hdr = (struct atenl_rx_info_hdr *)buf;
573 RX_PUT_HDR(rx_hdr, 3, BIT(0), sizeof(*rx_comm));
574 rx_comm = (struct atenl_rx_info_comm *)buf;
575 buf += sizeof(*rx_comm);
576
577 hdr->len = htons(buf - hdr->data);
578
579 nl_attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
580 if (!nl_attr) {
developer5698c9c2022-05-30 16:40:23 +0800581 atenl_err("Testdata attribute not found\n");
developer3abe1ad2022-01-24 11:13:32 +0800582 return NL_SKIP;
583 }
584
585 nla_parse_nested(tb1, MT76_TM_ATTR_MAX, nl_attr, testdata_policy);
586 nla_parse_nested(tb2, MT76_TM_STATS_ATTR_MAX,
587 tb1[MT76_TM_ATTR_STATS], stats_policy);
588
589 rx_cur.total = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_PACKETS]);
590 rx_cur.err_cnt = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_FCS_ERROR]);
591 rx_cur.len_mismatch = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_LEN_MISMATCH]);
592 rx_cur.ok_cnt = rx_cur.total - rx_cur.err_cnt - rx_cur.len_mismatch;
593
developer5698c9c2022-05-30 16:40:23 +0800594 if (!anb->reset_rx_cnt ||
595 get_band_val(an, an->cur_band, cur_state) == MT76_TM_STATE_RX_FRAMES) {
developer3abe1ad2022-01-24 11:13:32 +0800596#define RX_COUNT_DIFF(_field) \
developer5698c9c2022-05-30 16:40:23 +0800597 rx_diff._field = (rx_cur._field) - (anb->rx_stat._field);
developer3abe1ad2022-01-24 11:13:32 +0800598 RX_COUNT_DIFF(total);
599 RX_COUNT_DIFF(err_cnt);
600 RX_COUNT_DIFF(len_mismatch);
601 RX_COUNT_DIFF(ok_cnt);
602#undef RX_COUNT_DIFF
603
604 memcpy(&anb->rx_stat, &rx_cur, sizeof(anb->rx_stat));
605 }
606
607 rx_band->mac_rx_mdrdy_cnt = htonl((u32)rx_diff.total);
608 rx_band->mac_rx_fcs_err_cnt = htonl((u32)rx_diff.err_cnt);
609 rx_band->mac_rx_fcs_ok_cnt = htonl((u32)rx_diff.ok_cnt);
610 rx_band->mac_rx_len_mismatch = htonl((u32)rx_diff.len_mismatch);
611 rx_user->fcs_error_cnt = htonl((u32)rx_diff.err_cnt);
612
613 nla_parse_nested(tb3, MT76_TM_RX_ATTR_MAX,
614 tb2[MT76_TM_STATS_ATTR_LAST_RX], rx_policy);
615
616 rx_user->freq_offset = htonl(nla_get_u32(tb3[MT76_TM_RX_ATTR_FREQ_OFFSET]));
617 rx_user->snr = htonl(nla_get_u8(tb3[MT76_TM_RX_ATTR_SNR]));
618
619 i = 0;
620 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_RCPI], rem) {
621 if (nla_len(cur) != 1 || i >= 4)
622 break;
623
624 rcpi[i++] = nla_get_u8(cur);
625 }
626
627 i = 0;
628 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_IB_RSSI], rem) {
629 if (nla_len(cur) != 1 || i >= 4)
630 break;
631
632 ib_rssi[i++] = (s8)nla_get_u8(cur);
633 }
634
635 i = 0;
636 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_WB_RSSI], rem) {
637 if (nla_len(cur) != 1 || i >= 4)
638 break;
639
640 wb_rssi[i++] = (s8)nla_get_u8(cur);
641 }
642
643 for (i = 0; i < 4; i++) {
644 struct atenl_rx_info_path *path = &rx_path[i];
645
646 path->rcpi = htonl(rcpi[i]);
647 path->rssi = htonl(to_rssi((u8)rcpi[i]));
648 path->fagc_ib_rssi = htonl(ib_rssi[i]);
649 path->fagc_wb_rssi = htonl(wb_rssi[i]);
650 }
651
652 return NL_SKIP;
653}
654
655static int atenl_nl_get_rx_info(struct atenl *an, struct atenl_data *data,
656 struct atenl_nl_priv *nl_priv)
657{
658 struct nl_msg *msg = nl_priv->msg;
659 void *ptr;
660
661 nl_priv->an = an;
662 nl_priv->res = (void *)data;
663
664 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
665 if (!ptr)
666 return -ENOMEM;
667
668 nla_put_flag(msg, MT76_TM_ATTR_STATS);
669
670 nla_nest_end(msg, ptr);
671
672 return unl_genl_request(&nl_priv->unl, msg, atenl_nl_get_rx_info_cb,
673 (void *)nl_priv);
674}
675
676static int
677atenl_nl_set_ru(struct atenl *an, struct atenl_data *data,
678 struct atenl_nl_priv *nl_priv)
679{
680 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
681 struct nl_msg *msg;
682 u32 *v = (u32 *)(hdr->data + 4);
683 u32 seg0_num = ntohl(v[0]); /* v[1] seg1_num unused */
684 void *ptr;
685 int i, ret;
686
687 if (seg0_num > 8)
688 return -EINVAL;
689
690 for (i = 0, v = &v[2]; i < seg0_num; i++, v += 11) {
691 u32 ru_alloc = ntohl(v[1]);
692 u32 aid = ntohl(v[2]);
693 u32 ru_idx = ntohl(v[3]);
694 u32 mcs = ntohl(v[4]);
695 u32 ldpc = ntohl(v[5]);
696 u32 nss = ntohl(v[6]);
697 u32 tx_length = ntohl(v[8]);
698 char buf[10];
699
700 if (unl_genl_init(&nl_priv->unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +0800701 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +0800702 return 2;
703 }
704
705 msg = unl_genl_msg(&nl_priv->unl, NL80211_CMD_TESTMODE, false);
706 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
707
708 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
709 if (!ptr)
710 return -ENOMEM;
711
712 if (i == 0)
713 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_IDLE);
714
715 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
716 nla_put_u8(msg, MT76_TM_ATTR_RU_IDX, ru_idx);
717 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, mcs);
718 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, ldpc);
719 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, nss);
developer93dadcc2022-07-13 10:25:35 +0800720 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, tx_length);
developer3abe1ad2022-01-24 11:13:32 +0800721
722 ret = snprintf(buf, sizeof(buf), "%x", ru_alloc);
723 if (snprintf_error(sizeof(buf), ret))
724 return -EINVAL;
725
726 nla_put_u8(msg, MT76_TM_ATTR_RU_ALLOC, strtol(buf, NULL, 2));
727
728 nla_nest_end(msg, ptr);
729
730 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
731
732 unl_free(&nl_priv->unl);
733 }
734
735 return 0;
736}
737
developer5698c9c2022-05-30 16:40:23 +0800738static int
739atenl_nl_ibf_init(struct atenl *an, u8 band)
740{
741 struct atenl_nl_priv nl_priv = {};
742 struct nl_msg *msg;
743 void *ptr, *a;
744 int ret;
745
746 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
747 atenl_err("Failed to connect to nl80211\n");
748 return 2;
749 }
750
751 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
752 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
753
754 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
755 if (!ptr) {
756 ret = -ENOMEM;
757 goto out;
758 }
759
760 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, MT76_TM_TX_MODE_HT);
761 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, an->ibf_mcs);
762 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, an->ibf_ant);
763 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_INIT);
764
765 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
766 if (!a) {
767 ret = -ENOMEM;
768 goto out;
769 }
770 nla_put_u16(msg, 0, 1);
771 nla_nest_end(msg, a);
772
773 nla_nest_end(msg, ptr);
774
775 ret = unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
776
777out:
778 unl_free(&nl_priv.unl);
779 return ret;
780}
781
782static int
783atenl_nl_ibf_e2p_update(struct atenl *an)
784{
785 struct atenl_nl_priv nl_priv = {};
786 struct nl_msg *msg;
787 void *ptr, *a;
788 int ret;
789
790 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
791 atenl_err("Failed to connect to nl80211\n");
792 return 2;
793 }
794
795 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
796 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
797
798 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
799 if (!ptr) {
800 ret = -ENOMEM;
801 goto out;
802 }
803
804 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_E2P_UPDATE);
805 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
806 if (!a) {
807 ret = -ENOMEM;
808 goto out;
809 }
810 nla_put_u16(msg, 0, 0);
811 nla_nest_end(msg, a);
812
813 nla_nest_end(msg, ptr);
814
815 ret = unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
816
817out:
818 unl_free(&nl_priv.unl);
819 return ret;
820}
821
822static void
823atenl_get_ibf_cal_result(struct atenl *an)
824{
825 u16 offset;
826
827 if (an->adie_id == 0x7975)
828 offset = 0x651;
829 else if (an->adie_id == 0x7976)
830 offset = 0x60a;
831
832 /* per group size = 40, for group 0-8 */
833 atenl_eeprom_read_from_driver(an, offset, 40 * 9);
834}
835
836static int
837atenl_nl_ibf_set_val(struct atenl *an, struct atenl_data *data,
838 struct atenl_nl_priv *nl_priv)
developer3abe1ad2022-01-24 11:13:32 +0800839{
840#define MT_IBF(_act) MT76_TM_TXBF_ACT_##_act
841 static const u8 bf_act_map[] = {
842 [TXBF_ACT_IBF_PHASE_COMP] = MT_IBF(PHASE_COMP),
843 [TXBF_ACT_IBF_PROF_UPDATE] = MT_IBF(IBF_PROF_UPDATE),
844 [TXBF_ACT_EBF_PROF_UPDATE] = MT_IBF(EBF_PROF_UPDATE),
845 [TXBF_ACT_IBF_PHASE_CAL] = MT_IBF(PHASE_CAL),
846 };
847#undef MT_IBF
848 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
849 struct nl_msg *msg = nl_priv->msg;
850 u32 *v = (u32 *)(hdr->data + 4);
851 u32 action = ntohl(v[0]);
852 u16 val[8];
developer5698c9c2022-05-30 16:40:23 +0800853 u8 tmp_ant;
developer3abe1ad2022-01-24 11:13:32 +0800854 void *ptr, *a;
855 char cmd[64];
856 int i;
857
858 for (i = 0; i < 8; i++)
859 val[i] = ntohl(v[i + 1]);
860
861 atenl_dbg("%s: action = %u, val = %u, %u, %u, %u, %u\n",
862 __func__, action, val[0], val[1], val[2], val[3], val[4]);
863
864 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
865 if (!ptr)
866 return -ENOMEM;
867
868 switch (action) {
developer3abe1ad2022-01-24 11:13:32 +0800869 case TXBF_ACT_CHANNEL:
developer5698c9c2022-05-30 16:40:23 +0800870 an->cur_band = val[1];
871 /* a sanity to prevent script band idx error */
872 if (val[0] > 14)
873 an->cur_band = 1;
874 atenl_nl_ibf_init(an, an->cur_band);
875 atenl_set_channel(an, 0, an->cur_band, val[0], 0, 0);
876
877 nla_put_u8(msg, MT76_TM_ATTR_AID, 0);
developer3abe1ad2022-01-24 11:13:32 +0800878 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_UPDATE_CH);
879 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
880 if (!a)
881 return -ENOMEM;
882 nla_put_u16(msg, 0, 0);
883 nla_nest_end(msg, a);
884 break;
885 case TXBF_ACT_MCS:
developer5698c9c2022-05-30 16:40:23 +0800886 tmp_ant = (1 << DIV_ROUND_UP(val[0], 8)) - 1 ?: 1;
887 /* sometimes the correct band idx will be set after this action,
888 * so maintain a temp variable to allow mcs update in anthor action.
889 */
890 an->ibf_mcs = val[0];
891 an->ibf_ant = tmp_ant;
892 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, an->ibf_mcs);
893 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, an->ibf_ant);
developer3abe1ad2022-01-24 11:13:32 +0800894 break;
895 case TXBF_ACT_TX_ANT:
896 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, val[0]);
897 break;
898 case TXBF_ACT_RX_START:
899 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_RX_FRAMES);
900 break;
901 case TXBF_ACT_RX_ANT:
902 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, val[0]);
903 break;
904 case TXBF_ACT_TX_PKT:
905 nla_put_u8(msg, MT76_TM_ATTR_AID, val[1]);
developer3abe1ad2022-01-24 11:13:32 +0800906 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_TX_PREP);
developer5698c9c2022-05-30 16:40:23 +0800907 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, 10000000);
developer93dadcc2022-07-13 10:25:35 +0800908 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, 1024);
developer3abe1ad2022-01-24 11:13:32 +0800909 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
910 if (!a)
911 return -ENOMEM;
912
913 for (i = 0; i < 5; i++)
914 nla_put_u16(msg, i, val[i]);
915 nla_nest_end(msg, a);
916
917 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_TX_FRAMES);
918 break;
919 case TXBF_ACT_IBF_PHASE_COMP:
developer5698c9c2022-05-30 16:40:23 +0800920 nla_put_u8(msg, MT76_TM_ATTR_AID, 1);
developer3abe1ad2022-01-24 11:13:32 +0800921 case TXBF_ACT_IBF_PROF_UPDATE:
922 case TXBF_ACT_EBF_PROF_UPDATE:
923 case TXBF_ACT_IBF_PHASE_CAL:
924 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, bf_act_map[action]);
925 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
926 if (!a)
927 return -ENOMEM;
928
929 for (i = 0; i < 5; i++)
930 nla_put_u16(msg, i, val[i]);
931 nla_nest_end(msg, a);
932 break;
933 case TXBF_ACT_IBF_PHASE_E2P_UPDATE:
developer5698c9c2022-05-30 16:40:23 +0800934 atenl_nl_ibf_e2p_update(an);
935 atenl_get_ibf_cal_result(an);
developer3abe1ad2022-01-24 11:13:32 +0800936
937 nla_put_u8(msg, MT76_TM_ATTR_AID, 0);
938 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_INIT);
939
940 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
941 if (!a)
942 return -ENOMEM;
943 nla_put_u16(msg, 0, 0);
944 nla_nest_end(msg, a);
945 break;
developer5698c9c2022-05-30 16:40:23 +0800946 case TXBF_ACT_INIT:
947 case TXBF_ACT_POWER:
developer3abe1ad2022-01-24 11:13:32 +0800948 default:
949 break;
950 }
951
952 nla_nest_end(msg, ptr);
953
developer5698c9c2022-05-30 16:40:23 +0800954 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +0800955
956 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
957}
958
959static int
960atenl_nl_ibf_get_status(struct atenl *an, struct atenl_data *data,
961 struct atenl_nl_priv *nl_priv)
962{
963 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
964 u32 status = htonl(1);
965
developer5698c9c2022-05-30 16:40:23 +0800966 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +0800967 memcpy(hdr->data + 6, &status, 4);
968
969 return 0;
970}
971
972static int
973atenl_nl_ibf_profile_update_all(struct atenl *an, struct atenl_data *data,
974 struct atenl_nl_priv *nl_priv)
975{
976 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
977 struct nl_msg *msg;
978 void *ptr, *a;
979 u32 *v = (u32 *)(hdr->data + 4);
980 u16 pfmu_idx = ntohl(v[0]);
981 int i;
982
983 for (i = 0, v = &v[5]; i < 64; i++, v += 5) {
984 int j;
985
986 if (unl_genl_init(&nl_priv->unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +0800987 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +0800988 return 2;
989 }
990
991 msg = unl_genl_msg(&nl_priv->unl, NL80211_CMD_TESTMODE, false);
992 nla_put_u32(msg, NL80211_ATTR_WIPHY,
993 get_band_val(an, an->cur_band, phy_idx));
994
995 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
996 if (!ptr)
997 return -ENOMEM;
998
999 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_PROF_UPDATE_ALL);
1000 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
1001 if (!a)
1002 return -ENOMEM;
1003 nla_put_u16(msg, 0, pfmu_idx);
1004
1005 for (j = 0; j < 5; j++)
1006 nla_put_u16(msg, j + 1, ntohl(v[j]));
1007 nla_nest_end(msg, a);
1008
1009 nla_nest_end(msg, ptr);
1010
1011 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
1012
1013 unl_free(&nl_priv->unl);
1014 }
1015
developer5698c9c2022-05-30 16:40:23 +08001016 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +08001017
1018 return 0;
1019}
1020
1021#define NL_OPS_GROUP(cmd, ...) [HQA_CMD_##cmd] = { __VA_ARGS__ }
1022static const struct atenl_nl_ops nl_ops[] = {
1023 NL_OPS_GROUP(SET_TX_PATH, .set=MT76_TM_ATTR_TX_ANTENNA),
1024 NL_OPS_GROUP(SET_TX_POWER, .set=MT76_TM_ATTR_TX_POWER),
1025 NL_OPS_GROUP(SET_RX_PATH, .set=MT76_TM_ATTR_TX_ANTENNA),
1026 NL_OPS_GROUP(SET_FREQ_OFFSET, .set=MT76_TM_ATTR_FREQ_OFFSET),
1027 NL_OPS_GROUP(SET_CFG, .ops=atenl_nl_set_cfg),
1028 NL_OPS_GROUP(SET_TSSI, .ops=atenl_nl_set_cfg),
1029 NL_OPS_GROUP(CONTINUOUS_TX, .ops=atenl_nl_continuous_tx),
1030 NL_OPS_GROUP(GET_TX_INFO, .dump=MT76_TM_STATS_ATTR_TX_DONE),
1031 NL_OPS_GROUP(GET_RX_INFO, .ops=atenl_nl_get_rx_info, .dump=true),
1032 NL_OPS_GROUP(SET_RU, .ops=atenl_nl_set_ru),
1033};
1034#undef NL_OPS_GROUP
1035
1036#define NL_OPS_EXT(cmd, ...) [HQA_EXT_CMD_##cmd] = { __VA_ARGS__ }
1037static const struct atenl_nl_ops nl_ops_ext[] = {
1038 NL_OPS_EXT(SET_TX, .ops=atenl_nl_set_tx),
1039 NL_OPS_EXT(START_TX, .ops=atenl_nl_tx),
1040 NL_OPS_EXT(STOP_TX, .ops=atenl_nl_tx),
1041 NL_OPS_EXT(START_RX, .ops=atenl_nl_rx),
1042 NL_OPS_EXT(STOP_RX, .ops=atenl_nl_rx),
1043 NL_OPS_EXT(OFF_CH_SCAN, .ops=atenl_off_ch_scan),
1044 NL_OPS_EXT(IBF_SET_VAL, .ops=atenl_nl_ibf_set_val),
1045 NL_OPS_EXT(IBF_GET_STATUS, .ops=atenl_nl_ibf_get_status),
1046 NL_OPS_EXT(IBF_PROF_UPDATE_ALL, .ops=atenl_nl_ibf_profile_update_all),
1047};
1048#undef NL_OPS_EXT
1049
1050int atenl_nl_process(struct atenl *an, struct atenl_data *data)
1051{
1052 struct atenl_nl_priv nl_priv = {};
1053 const struct atenl_nl_ops *ops;
1054 struct nl_msg *msg;
1055 int ret = 0;
1056
1057 if (data->ext_cmd != 0)
1058 ops = &nl_ops_ext[data->ext_cmd];
1059 else
1060 ops = &nl_ops[data->cmd];
1061
1062 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001063 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001064 return -1;
1065 }
1066
1067 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, !!ops->dump);
1068 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
1069 nl_priv.msg = msg;
1070
1071 if (ops->ops) {
1072 ret = ops->ops(an, data, &nl_priv);
1073 } else if (ops->dump) {
1074 nl_priv.attr = ops->dump;
1075 ret = atenl_nl_dump_attr(an, data, &nl_priv);
1076 } else {
1077 nl_priv.attr = ops->set;
1078 ret = atenl_nl_set_attr(an, data, &nl_priv);
1079 }
1080
1081 if (ret)
developer5698c9c2022-05-30 16:40:23 +08001082 atenl_err("command process error: 0x%x (0x%x)\n", data->cmd_id, data->ext_id);
developer3abe1ad2022-01-24 11:13:32 +08001083
1084 unl_free(&nl_priv.unl);
1085
1086 return ret;
1087}
1088
1089int atenl_nl_process_many(struct atenl *an, struct atenl_data *data)
1090{
1091 struct atenl_nl_priv nl_priv = {};
1092 const struct atenl_nl_ops *ops;
1093 int ret = 0;
1094
1095 if (data->ext_cmd != 0)
1096 ops = &nl_ops_ext[data->ext_cmd];
1097 else
1098 ops = &nl_ops[data->cmd];
1099
1100 if (ops->ops)
1101 ret = ops->ops(an, data, &nl_priv);
1102
1103 return ret;
1104}
1105
1106int atenl_nl_set_state(struct atenl *an, u8 band,
1107 enum mt76_testmode_state state)
1108{
1109 struct atenl_nl_priv nl_priv = {};
1110 struct nl_msg *msg;
1111 void *ptr;
1112
1113 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001114 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001115 return 2;
1116 }
1117
1118 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1119 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
1120
1121 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1122 if (!ptr)
1123 return -ENOMEM;
1124
1125 atenl_set_attr_state(an, msg, band, state);
1126
1127 nla_nest_end(msg, ptr);
1128
1129 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1130
1131 unl_free(&nl_priv.unl);
1132
1133 return 0;
1134}
1135
developer5698c9c2022-05-30 16:40:23 +08001136int atenl_nl_set_aid(struct atenl *an, u8 band, u8 aid)
1137{
1138 struct atenl_nl_priv nl_priv = {};
1139 struct nl_msg *msg;
1140 void *ptr;
1141
1142 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1143 atenl_err("Failed to connect to nl80211\n");
1144 return 2;
1145 }
1146
1147 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1148 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
1149
1150 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1151 if (!ptr)
1152 return -ENOMEM;
1153
1154 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
1155
1156 nla_nest_end(msg, ptr);
1157
1158 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1159
1160 unl_free(&nl_priv.unl);
1161
1162 return 0;
1163}
1164
developer3abe1ad2022-01-24 11:13:32 +08001165static int atenl_nl_check_mtd_cb(struct nl_msg *msg, void *arg)
1166{
1167 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
1168 struct atenl *an = nl_priv->an;
1169 struct nlattr *tb[NUM_MT76_TM_ATTRS];
1170 struct nlattr *attr;
1171
1172 attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
1173 if (!attr)
1174 return NL_SKIP;
1175
1176 nla_parse_nested(tb, MT76_TM_ATTR_MAX, attr, testdata_policy);
1177 if (!tb[MT76_TM_ATTR_MTD_PART] || !tb[MT76_TM_ATTR_MTD_OFFSET])
1178 return NL_SKIP;
1179
1180 an->mtd_part = strdup(nla_get_string(tb[MT76_TM_ATTR_MTD_PART]));
1181 an->mtd_offset = nla_get_u32(tb[MT76_TM_ATTR_MTD_OFFSET]);
developer071927d2022-08-31 20:39:29 +08001182 an->is_main_phy = nla_get_u32(tb[MT76_TM_ATTR_IS_MAIN_PHY]);
developer3abe1ad2022-01-24 11:13:32 +08001183
1184 return NL_SKIP;
1185}
1186
1187int atenl_nl_check_mtd(struct atenl *an)
1188{
1189 struct atenl_nl_priv nl_priv = { .an = an };
1190 struct nl_msg *msg;
1191
1192 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001193 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001194 return 2;
1195 }
1196
1197 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, true);
1198 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1199 unl_genl_request(&nl_priv.unl, msg, atenl_nl_check_mtd_cb, (void *)&nl_priv);
1200
1201 unl_free(&nl_priv.unl);
1202
1203 return 0;
1204}
1205
1206int atenl_nl_write_eeprom(struct atenl *an, u32 offset, u8 *val, int len)
1207{
1208 struct atenl_nl_priv nl_priv = {};
1209 struct nl_msg *msg;
1210 void *ptr, *a;
1211 int i;
1212
1213 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001214 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001215 return 2;
1216 }
1217
1218 if (len > 16)
1219 return -EINVAL;
1220
1221 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1222 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1223
1224 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1225 if (!ptr)
1226 return -ENOMEM;
1227
1228 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1229 MT76_TM_EEPROM_ACTION_UPDATE_DATA);
1230 nla_put_u32(msg, MT76_TM_ATTR_EEPROM_OFFSET, offset);
1231
1232 a = nla_nest_start(msg, MT76_TM_ATTR_EEPROM_VAL);
1233 if (!a)
1234 return -ENOMEM;
1235
1236 for (i = 0; i < len; i++)
1237 if (nla_put_u8(msg, i, val[i]))
1238 goto out;
1239
1240 nla_nest_end(msg, a);
1241
1242 nla_nest_end(msg, ptr);
1243
1244 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1245
1246 unl_free(&nl_priv.unl);
1247
1248out:
1249 return 0;
1250}
1251
developer9b7cdad2022-03-10 14:24:55 +08001252int atenl_nl_write_efuse_all(struct atenl *an)
developer3abe1ad2022-01-24 11:13:32 +08001253{
1254 struct atenl_nl_priv nl_priv = {};
1255 struct nl_msg *msg;
1256 void *ptr;
1257
1258 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001259 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001260 return 2;
1261 }
1262
1263 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1264 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1265
1266 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1267 if (!ptr)
1268 return -ENOMEM;
1269
1270 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1271 MT76_TM_EEPROM_ACTION_WRITE_TO_EFUSE);
1272
1273 nla_nest_end(msg, ptr);
1274
1275 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1276
1277 unl_free(&nl_priv.unl);
1278
1279 return 0;
1280}
1281
1282int atenl_nl_update_buffer_mode(struct atenl *an)
1283{
1284 struct atenl_nl_priv nl_priv = {};
1285 struct nl_msg *msg;
1286 void *ptr;
1287
1288 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001289 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001290 return 2;
1291 }
1292
1293 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1294 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1295
1296 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1297 if (!ptr)
1298 return -ENOMEM;
1299
1300 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1301 MT76_TM_EEPROM_ACTION_UPDATE_BUFFER_MODE);
1302
1303 nla_nest_end(msg, ptr);
1304
1305 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1306
1307 unl_free(&nl_priv.unl);
1308
1309 return 0;
1310}
1311
developer071927d2022-08-31 20:39:29 +08001312static int atenl_nl_precal_sync_from_driver_cb(struct nl_msg *msg, void *arg)
1313{
1314 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
1315 struct atenl *an = nl_priv->an;
1316 struct nlattr *tb[NUM_MT76_TM_ATTRS];
1317 struct nlattr *attr, *cur;
1318 int i, rem, prek_offset = nl_priv->attr;
1319
1320
1321 attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
1322 if (!attr)
1323 return NL_SKIP;
1324
1325 nla_parse_nested(tb, MT76_TM_ATTR_MAX, attr, testdata_policy);
1326
1327 if (!tb[MT76_TM_ATTR_PRECAL_INFO] && !tb[MT76_TM_ATTR_PRECAL]) {
1328 atenl_info("No Pre cal data or info!\n");
1329 return NL_SKIP;
1330 }
1331
1332 if (tb[MT76_TM_ATTR_PRECAL_INFO]) {
1333 i = 0;
1334 nla_for_each_nested(cur, tb[MT76_TM_ATTR_PRECAL_INFO], rem) {
1335 an->cal_info[i] = (u32) nla_get_u32(cur);
1336 i++;
1337 }
1338 return NL_SKIP;
1339 }
1340
1341 if (tb[MT76_TM_ATTR_PRECAL] && an->cal) {
1342 i = prek_offset;
1343 nla_for_each_nested(cur, tb[MT76_TM_ATTR_PRECAL], rem) {
1344 an->cal[i] = (u8) nla_get_u8(cur);
1345 i++;
1346 }
1347 return NL_SKIP;
1348 }
1349 atenl_info("No data found for pre-cal!\n");
1350
1351 return NL_SKIP;
1352}
1353
1354static int
1355atenl_nl_precal_sync_partition(struct atenl_nl_priv *nl_priv, enum mt76_testmode_attr attr,
1356 int prek_type, int prek_offset)
1357{
1358 int ret;
1359 void *ptr;
1360 struct nl_msg *msg;
1361 struct atenl *an = nl_priv->an;
1362
1363 msg = unl_genl_msg(&(nl_priv->unl), NL80211_CMD_TESTMODE, true);
1364 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
1365 nl_priv->msg = msg;
1366 nl_priv->attr = prek_offset;
1367
1368 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1369 if (!ptr)
1370 return -ENOMEM;
1371
1372 nla_put_flag(msg, attr);
1373 if (attr == MT76_TM_ATTR_PRECAL)
1374 nla_put_u8(msg, MT76_TM_ATTR_PRECAL_INFO, prek_type);
1375 nla_nest_end(msg, ptr);
1376
1377 ret = unl_genl_request(&(nl_priv->unl), msg, atenl_nl_precal_sync_from_driver_cb, (void *)nl_priv);
1378
1379 if (ret) {
1380 atenl_err("command process error!\n");
1381 return ret;
1382 }
1383
1384 return 0;
1385}
1386
1387int atenl_nl_precal_sync_from_driver(struct atenl *an, enum prek_ops ops)
1388{
1389#define GROUP_IND_MASK BIT(0)
1390#define DPD_IND_MASK GENMASK(3, 1)
1391 int ret;
1392 u32 i, times, group_size, dpd_size, total_size, transmit_size, offs;
1393 u32 dpd_per_chan_size, dpd_chan_num[3], total_chan_num;
1394 u32 size, base, base_idx, *size_ptr;
1395 u8 cal_indicator, *precal_info;
1396 struct atenl_nl_priv nl_priv = { .an = an };
1397
1398 offs = an->eeprom_prek_offs;
1399 cal_indicator = an->eeprom_data[offs];
1400
1401 if (cal_indicator) {
1402 precal_info = an->eeprom_data + an->eeprom_size;
1403 memcpy(an->cal_info, precal_info, PRE_CAL_INFO);
1404 group_size = an->cal_info[0];
1405 dpd_size = an->cal_info[1];
1406 total_size = group_size + dpd_size;
1407 dpd_chan_num[0] = (an->cal_info[2] >> DPD_INFO_6G_SHIFT) & DPD_INFO_MASK;
1408 dpd_chan_num[1] = (an->cal_info[2] >> DPD_INFO_5G_SHIFT) & DPD_INFO_MASK;
1409 dpd_chan_num[2] = (an->cal_info[2] >> DPD_INFO_2G_SHIFT) & DPD_INFO_MASK;
1410 dpd_per_chan_size = (an->cal_info[2] >> DPD_INFO_CH_SHIFT) & DPD_INFO_MASK;
1411 total_chan_num = dpd_chan_num[0] + dpd_chan_num[1] + dpd_chan_num[2];
1412 }
1413
1414 switch (ops){
1415 case PREK_SYNC_ALL:
1416 size_ptr = &total_size;
1417 base_idx = 0;
1418 goto start;
1419 case PREK_SYNC_GROUP:
1420 size_ptr = &group_size;
1421 base_idx = 0;
1422 goto start;
1423 case PREK_SYNC_DPD_6G:
1424 size_ptr = &dpd_size;
1425 base_idx = 0;
1426 goto start;
1427 case PREK_SYNC_DPD_5G:
1428 size_ptr = &dpd_size;
1429 base_idx = 1;
1430 goto start;
1431 case PREK_SYNC_DPD_2G:
1432 size_ptr = &dpd_size;
1433 base_idx = 2;
1434
1435start:
1436 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1437 atenl_err("Failed to connect to nl80211\n");
1438 return 2;
1439 }
1440
1441 ret = atenl_nl_precal_sync_partition(&nl_priv, MT76_TM_ATTR_PRECAL_INFO, 0, 0);
1442 if (ret || !an->cal_info)
1443 goto out;
1444
1445 group_size = an->cal_info[0];
1446 dpd_size = an->cal_info[1];
1447 total_size = group_size + dpd_size;
1448 dpd_chan_num[0] = (an->cal_info[2] >> DPD_INFO_6G_SHIFT) & DPD_INFO_MASK;
1449 dpd_chan_num[1] = (an->cal_info[2] >> DPD_INFO_5G_SHIFT) & DPD_INFO_MASK;
1450 dpd_chan_num[2] = (an->cal_info[2] >> DPD_INFO_2G_SHIFT) & DPD_INFO_MASK;
1451 dpd_per_chan_size = (an->cal_info[2] >> DPD_INFO_CH_SHIFT) & DPD_INFO_MASK;
1452 total_chan_num = dpd_chan_num[0] + dpd_chan_num[1] + dpd_chan_num[2];
1453 transmit_size = an->cal_info[3];
1454
1455 size = *size_ptr;
1456 size = (size_ptr == &dpd_size) ? (size / total_chan_num * dpd_chan_num[base_idx]) :
1457 size;
1458 base = 0;
1459 for (i = 0; i < base_idx; i++) {
1460 base += dpd_chan_num[i] * dpd_per_chan_size * MT_EE_CAL_UNIT;
1461 }
1462 base += (size_ptr == &dpd_size) ? group_size : 0;
1463
1464 if (!an->cal)
1465 an->cal = (u8 *) calloc(size, sizeof(u8));
1466 times = size / transmit_size + 1;
1467 for (i = 0; i < times; i++) {
1468 ret = atenl_nl_precal_sync_partition(&nl_priv, MT76_TM_ATTR_PRECAL, ops,
1469 i * transmit_size);
1470 if (ret)
1471 goto out;
1472 }
1473
1474 ret = atenl_eeprom_update_precal(an, base, size);
1475 break;
1476 case PREK_CLEAN_GROUP:
1477 if (!(cal_indicator & GROUP_IND_MASK))
1478 return 0;
1479 an->cal_info[4] = cal_indicator & (u8) ~GROUP_IND_MASK;
1480 ret = atenl_eeprom_update_precal(an, 0, group_size);
1481 break;
1482 case PREK_CLEAN_DPD:
1483 if (!(cal_indicator & DPD_IND_MASK))
1484 return 0;
1485 an->cal_info[4] = cal_indicator & (u8) ~DPD_IND_MASK;
1486 ret = atenl_eeprom_update_precal(an, group_size, dpd_size);
1487 break;
1488 default:
1489 break;
1490 }
1491
1492out:
1493 unl_free(&nl_priv.unl);
1494 return ret;
1495}