blob: e0ad020009dc1bce6281a56b0d1a5f3b3833b863 [file] [log] [blame]
developerb9b4cd12022-10-11 13:18:59 +08001/* Copyright (C) 2021-2022 Mediatek Inc. */
2#define _GNU_SOURCE
3
4#include <unl.h>
5
6#include "atenl.h"
7
8#define to_rssi(_rcpi) ((_rcpi - 220) / 2)
9
10struct atenl_nl_priv {
11 struct atenl *an;
12 struct unl unl;
13 struct nl_msg *msg;
14 int attr;
15 void *res;
16};
17
18struct atenl_nl_ops {
19 int set;
20 int dump;
21 int (*ops)(struct atenl *an, struct atenl_data *data,
22 struct atenl_nl_priv *nl_priv);
23};
24
25static struct nla_policy testdata_policy[NUM_MT76_TM_ATTRS] = {
26 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
27 [MT76_TM_ATTR_MTD_PART] = { .type = NLA_STRING },
28 [MT76_TM_ATTR_MTD_OFFSET] = { .type = NLA_U32 },
developer13655da2023-01-10 19:53:25 +080029 [MT76_TM_ATTR_BAND_IDX] = { .type = NLA_U8 },
developerb9b4cd12022-10-11 13:18:59 +080030 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
31 [MT76_TM_ATTR_TX_LENGTH] = { .type = NLA_U32 },
32 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
33 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
34 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
35 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
36 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
37 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
38 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
39 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
40 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
41 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
42 [MT76_TM_ATTR_STATS] = { .type = NLA_NESTED },
43 [MT76_TM_ATTR_PRECAL] = { .type = NLA_NESTED },
44 [MT76_TM_ATTR_PRECAL_INFO] = { .type = NLA_NESTED },
45};
46
47static struct nla_policy stats_policy[NUM_MT76_TM_STATS_ATTRS] = {
48 [MT76_TM_STATS_ATTR_TX_PENDING] = { .type = NLA_U32 },
49 [MT76_TM_STATS_ATTR_TX_QUEUED] = { .type = NLA_U32 },
50 [MT76_TM_STATS_ATTR_TX_DONE] = { .type = NLA_U32 },
51 [MT76_TM_STATS_ATTR_RX_PACKETS] = { .type = NLA_U64 },
52 [MT76_TM_STATS_ATTR_RX_FCS_ERROR] = { .type = NLA_U64 },
53};
54
55static struct nla_policy rx_policy[NUM_MT76_TM_RX_ATTRS] = {
56 [MT76_TM_RX_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
57 [MT76_TM_RX_ATTR_RCPI] = { .type = NLA_NESTED },
58 [MT76_TM_RX_ATTR_IB_RSSI] = { .type = NLA_NESTED },
59 [MT76_TM_RX_ATTR_WB_RSSI] = { .type = NLA_NESTED },
60 [MT76_TM_RX_ATTR_SNR] = { .type = NLA_U8 },
61};
62
63struct he_sgi {
64 enum mt76_testmode_tx_mode tx_mode;
65 u8 sgi;
66 u8 tx_ltf;
67};
68
69#define HE_SGI_GROUP(_tx_mode, _sgi, _tx_ltf) \
70 { .tx_mode = MT76_TM_TX_MODE_##_tx_mode, .sgi = _sgi, .tx_ltf = _tx_ltf }
71static const struct he_sgi he_sgi_groups[] = {
72 HE_SGI_GROUP(HE_SU, 0, 0),
73 HE_SGI_GROUP(HE_SU, 0, 1),
74 HE_SGI_GROUP(HE_SU, 1, 1),
75 HE_SGI_GROUP(HE_SU, 2, 2),
76 HE_SGI_GROUP(HE_SU, 0, 2),
77 HE_SGI_GROUP(HE_EXT_SU, 0, 0),
78 HE_SGI_GROUP(HE_EXT_SU, 0, 1),
79 HE_SGI_GROUP(HE_EXT_SU, 1, 1),
80 HE_SGI_GROUP(HE_EXT_SU, 2, 2),
81 HE_SGI_GROUP(HE_EXT_SU, 0, 2),
82 HE_SGI_GROUP(HE_TB, 1, 0),
83 HE_SGI_GROUP(HE_TB, 1, 1),
84 HE_SGI_GROUP(HE_TB, 2, 2),
85 HE_SGI_GROUP(HE_MU, 0, 2),
86 HE_SGI_GROUP(HE_MU, 0, 1),
87 HE_SGI_GROUP(HE_MU, 1, 1),
88 HE_SGI_GROUP(HE_MU, 2, 2),
89};
90#undef HE_SGI_LTF_GROUP
91
92static u8 phy_type_to_attr(u8 phy_type)
93{
94 static const u8 phy_type_to_attr[] = {
95 [ATENL_PHY_TYPE_CCK] = MT76_TM_TX_MODE_CCK,
96 [ATENL_PHY_TYPE_OFDM] = MT76_TM_TX_MODE_OFDM,
97 [ATENL_PHY_TYPE_HT] = MT76_TM_TX_MODE_HT,
98 [ATENL_PHY_TYPE_HT_GF] = MT76_TM_TX_MODE_HT,
99 [ATENL_PHY_TYPE_VHT] = MT76_TM_TX_MODE_VHT,
100 [ATENL_PHY_TYPE_HE_SU] = MT76_TM_TX_MODE_HE_SU,
101 [ATENL_PHY_TYPE_HE_EXT_SU] = MT76_TM_TX_MODE_HE_EXT_SU,
102 [ATENL_PHY_TYPE_HE_TB] = MT76_TM_TX_MODE_HE_TB,
103 [ATENL_PHY_TYPE_HE_MU] = MT76_TM_TX_MODE_HE_MU,
developer7af0f762023-05-22 15:16:16 +0800104 [ATENL_PHY_TYPE_EHT_SU] = MT76_TM_TX_MODE_EHT_SU,
105 [ATENL_PHY_TYPE_EHT_TRIG] = MT76_TM_TX_MODE_EHT_TRIG,
106 [ATENL_PHY_TYPE_EHT_MU] = MT76_TM_TX_MODE_EHT_MU,
developerb9b4cd12022-10-11 13:18:59 +0800107 };
108
109 if (phy_type >= ARRAY_SIZE(phy_type_to_attr))
110 return 0;
111
112 return phy_type_to_attr[phy_type];
113}
114
115static void
116atenl_set_attr_state(struct atenl *an, struct nl_msg *msg,
117 u8 band, enum mt76_testmode_state state)
118{
119 if (get_band_val(an, band, cur_state) == state)
120 return;
121
122 nla_put_u8(msg, MT76_TM_ATTR_STATE, state);
123 set_band_val(an, band, cur_state, state);
124}
125
126static void
127atenl_set_attr_antenna(struct atenl *an, struct nl_msg *msg, u8 tx_antenna)
128{
129 if (!tx_antenna)
130 return;
developer17bb0a82022-12-13 15:52:04 +0800131
132 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, tx_antenna);
developerb9b4cd12022-10-11 13:18:59 +0800133}
134
135static int
136atenl_nl_set_attr(struct atenl *an, struct atenl_data *data,
137 struct atenl_nl_priv *nl_priv)
138{
139 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
140 struct nl_msg *msg = nl_priv->msg;
141 u32 val = ntohl(*(u32 *)hdr->data);
142 int attr = nl_priv->attr;
143 void *ptr, *a;
144
145 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
146 if (!ptr)
147 return -ENOMEM;
148
149 switch (attr) {
150 case MT76_TM_ATTR_TX_ANTENNA:
151 atenl_set_attr_antenna(an, msg, val);
152 break;
153 case MT76_TM_ATTR_FREQ_OFFSET:
154 nla_put_u32(msg, attr, val);
155 break;
156 case MT76_TM_ATTR_TX_POWER:
157 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
158 if (!a)
159 return -ENOMEM;
160 nla_put_u8(msg, 0, val);
161 nla_nest_end(msg, a);
162 break;
163 default:
164 nla_put_u8(msg, attr, val);
165 break;
166 }
167
168 nla_nest_end(msg, ptr);
169
170 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
171}
172
173static int
174atenl_nl_set_cfg(struct atenl *an, struct atenl_data *data,
175 struct atenl_nl_priv *nl_priv)
176{
177 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
178 struct nl_msg *msg = nl_priv->msg;
179 enum atenl_cmd cmd = data->cmd;
180 u32 *v = (u32 *)hdr->data;
181 u8 type = ntohl(v[0]);
182 u8 enable = ntohl(v[1]);
183 void *ptr, *cfg;
184
185 if (cmd == HQA_CMD_SET_TSSI) {
186 type = 0;
187 enable = 1;
188 }
189
190 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
191 if (!ptr)
192 return -ENOMEM;
193
194 cfg = nla_nest_start(msg, MT76_TM_ATTR_CFG);
195 if (!cfg)
196 return -ENOMEM;
197
198 if (nla_put_u8(msg, 0, type) ||
199 nla_put_u8(msg, 1, enable))
200 return -EINVAL;
201
202 nla_nest_end(msg, cfg);
203
204 nla_nest_end(msg, ptr);
205
206 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
207}
208
209static int
210atenl_nl_set_tx(struct atenl *an, struct atenl_data *data,
211 struct atenl_nl_priv *nl_priv)
212{
213 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
214 struct nl_msg *msg = nl_priv->msg;
215 u32 *v = (u32 *)hdr->data;
216 u8 *addr1 = hdr->data + 36;
217 u8 *addr2 = addr1 + ETH_ALEN;
218 u8 *addr3 = addr2 + ETH_ALEN;
219 u8 def_mac[ETH_ALEN] = {0x00, 0x11, 0x22, 0x33, 0x44, 0x55};
220 void *ptr, *a;
221
222 if (get_band_val(an, an->cur_band, use_tx_time))
223 set_band_val(an, an->cur_band, tx_time, ntohl(v[7]));
224 else
225 set_band_val(an, an->cur_band, tx_mpdu_len, ntohl(v[7]));
226
227 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
228 if (!ptr)
229 return -ENOMEM;
230
231 a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
232 if (!a)
233 return -ENOMEM;
234
235 nla_put(msg, 0, ETH_ALEN, use_default_addr(addr1) ? def_mac : addr1);
236 nla_put(msg, 1, ETH_ALEN, use_default_addr(addr2) ? def_mac : addr2);
237 nla_put(msg, 2, ETH_ALEN, use_default_addr(addr3) ? def_mac : addr3);
238
239 nla_nest_end(msg, a);
240
241 nla_nest_end(msg, ptr);
242
243 *(u32 *)(hdr->data + 2) = data->ext_id;
244
245 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
246}
247
248static int
249atenl_nl_tx(struct atenl *an, struct atenl_data *data, struct atenl_nl_priv *nl_priv)
250{
251#define USE_SPE_IDX BIT(31)
252 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
253 struct nl_msg *msg = nl_priv->msg;
254 u32 *v = (u32 *)hdr->data;
255 u8 band = ntohl(v[2]);
256 void *ptr;
257 int ret;
258
259 if (band >= MAX_BAND_NUM)
260 return -EINVAL;
261
262 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
263 if (!ptr)
264 return -ENOMEM;
265
266 if (data->ext_cmd == HQA_EXT_CMD_STOP_TX) {
267 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
268 } else {
269 u32 tx_count = ntohl(v[3]);
270 u8 tx_rate_mode = phy_type_to_attr(ntohl(v[4]));
271 u8 aid = ntohl(v[11]);
272 u8 sgi = ntohl(v[13]);
273 u32 tx_antenna = ntohl(v[14]);
274 void *a;
275
276 if (sgi > 5)
277 return -EINVAL;
278
279 if (!tx_count)
280 tx_count = 10000000;
281
282 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, tx_count);
283 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, ntohl(v[12]));
284 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, tx_rate_mode);
285 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, ntohl(v[5]));
286 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, ntohl(v[7]));
287 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, ntohl(v[8]));
288 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, ntohl(v[15]));
289
290 if (get_band_val(an, band, use_tx_time))
291 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME,
292 get_band_val(an, band, tx_time));
developer7af0f762023-05-22 15:16:16 +0800293 else if (get_band_val(an, band, tx_mpdu_len))
developerb9b4cd12022-10-11 13:18:59 +0800294 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH,
295 get_band_val(an, band, tx_mpdu_len));
296
297 /* for chips after 7915, tx need to use at least wcid = 1 */
298 if (!is_mt7915(an) && !aid)
299 aid = 1;
300 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
301
302 if (tx_antenna & USE_SPE_IDX) {
303 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX,
304 tx_antenna & ~USE_SPE_IDX);
305 } else {
306 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, 0);
307 atenl_set_attr_antenna(an, msg, tx_antenna);
308 }
309
developer7af0f762023-05-22 15:16:16 +0800310 if (!is_mt7996(an) && tx_rate_mode >= MT76_TM_TX_MODE_HE_SU) {
developerb9b4cd12022-10-11 13:18:59 +0800311 u8 ofs = sgi;
312 size_t i;
313
314 for (i = 0; i < ARRAY_SIZE(he_sgi_groups); i++)
315 if (he_sgi_groups[i].tx_mode == tx_rate_mode)
316 break;
317
318 if ((i + ofs) >= ARRAY_SIZE(he_sgi_groups))
319 return -EINVAL;
320
321 sgi = he_sgi_groups[i + ofs].sgi;
322 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF,
323 he_sgi_groups[i + ofs].tx_ltf);
324 }
325 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, sgi);
326
327 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
328 if (!a)
329 return -ENOMEM;
330 nla_put_u8(msg, 0, ntohl(v[6]));
331 nla_nest_end(msg, a);
332
333 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_TX_FRAMES);
334 }
335
336 nla_nest_end(msg, ptr);
337
338 ret = unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
339 if (ret)
340 return ret;
341
342 *(u32 *)(hdr->data + 2) = data->ext_id;
343
344 return 0;
345}
346
347static int
348atenl_nl_rx(struct atenl *an, struct atenl_data *data, struct atenl_nl_priv *nl_priv)
349{
350 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
351 struct atenl_band *anb = &an->anb[an->cur_band];
352 struct nl_msg *msg = nl_priv->msg;
353 u32 *v = (u32 *)hdr->data;
354 u8 band = ntohl(v[2]);
355 void *ptr;
356
357 if (band >= MAX_BAND_NUM)
358 return -EINVAL;
359
360 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
361 if (!ptr)
362 return -ENOMEM;
363
364 if (data->ext_cmd == HQA_EXT_CMD_STOP_RX) {
365 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
366 } else {
367 v = (u32 *)(hdr->data + 18);
368
369 atenl_set_attr_antenna(an, msg, ntohl(v[0]));
developer7af0f762023-05-22 15:16:16 +0800370 if (is_mt7996(an)) {
371 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE,
372 phy_type_to_attr(ntohl(v[2])));
373 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, ntohl(v[3]));
374 nla_put_u8(msg, MT76_TM_ATTR_AID, ntohl(v[4]));
375 } else {
376 nla_put_u8(msg, MT76_TM_ATTR_AID, ntohl(v[1]));
377 }
developerb9b4cd12022-10-11 13:18:59 +0800378 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_RX_FRAMES);
379
380 anb->reset_rx_cnt = false;
381
382 /* clear history buffer */
383 memset(&anb->rx_stat, 0, sizeof(anb->rx_stat));
384 }
385
386 nla_nest_end(msg, ptr);
387
388 *(u32 *)(hdr->data + 2) = data->ext_id;
389
390 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
391}
392
393static int
394atenl_off_ch_scan(struct atenl *an, struct atenl_data *data,
395 struct atenl_nl_priv *nl_priv)
396{
397 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
398 struct nl_msg *msg = nl_priv->msg;
399 u32 *v = (u32 *)hdr->data;
400 u8 ch = ntohl(v[2]);
401 u8 bw = ntohl(v[4]);
402 u8 tx_path = ntohl(v[5]);
403 u8 status = ntohl(v[6]);
404 void *ptr;
405
406 if (!status)
407 ch = 0; /* stop */
408
409 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
410 if (!ptr)
411 return -ENOMEM;
412
413 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_CH, ch);
414 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_CENTER_CH,
415 atenl_get_center_channel(bw, CH_BAND_5GHZ, ch));
416 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_BW, bw);
417 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_PATH, tx_path);
418
419 nla_nest_end(msg, ptr);
420
421 *(u32 *)(hdr->data + 2) = data->ext_id;
422
423 return 0;
424}
425
426static int atenl_nl_dump_cb(struct nl_msg *msg, void *arg)
427{
428 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
429 struct nlattr *tb1[NUM_MT76_TM_ATTRS];
430 struct nlattr *tb2[NUM_MT76_TM_STATS_ATTRS];
431 struct nlattr *nl_attr;
432 int attr = nl_priv->attr;
433 u64 *res = nl_priv->res;
434
435 nl_attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
436 if (!nl_attr) {
437 atenl_err("Testdata attribute not found\n");
438 return NL_SKIP;
439 }
440
441 nla_parse_nested(tb1, MT76_TM_ATTR_MAX, nl_attr, testdata_policy);
442 nla_parse_nested(tb2, MT76_TM_STATS_ATTR_MAX,
443 tb1[MT76_TM_ATTR_STATS], stats_policy);
444
445 if (attr == MT76_TM_STATS_ATTR_TX_DONE)
446 *res = nla_get_u32(tb2[MT76_TM_STATS_ATTR_TX_DONE]);
447
448 return NL_SKIP;
449}
450
451static int
452atenl_nl_dump_attr(struct atenl *an, struct atenl_data *data,
453 struct atenl_nl_priv *nl_priv)
454{
455 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
456 struct nl_msg *msg = nl_priv->msg;
457 void *ptr;
458 u64 res = 0;
459
460 nl_priv->res = (void *)&res;
461
462 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
463 if (!ptr)
464 return -ENOMEM;
465 nla_put_flag(msg, MT76_TM_ATTR_STATS);
466 nla_nest_end(msg, ptr);
467
468 unl_genl_request(&nl_priv->unl, msg, atenl_nl_dump_cb, (void *)nl_priv);
469
470 if (nl_priv->attr == MT76_TM_STATS_ATTR_TX_DONE)
471 *(u32 *)(hdr->data + 2 + 4 * an->cur_band) = htonl(res);
472
473 return 0;
474}
475
476static int atenl_nl_continuous_tx(struct atenl *an,
477 struct atenl_data *data,
478 struct atenl_nl_priv *nl_priv)
479{
480 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
481 struct nl_msg *msg = nl_priv->msg;
482 u32 *v = (u32 *)hdr->data;
483 u8 band = ntohl(v[0]);
484 bool enable = ntohl(v[1]);
485 void *ptr;
486
487 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
488 if (!ptr)
489 return -ENOMEM;
490
491 if (band >= MAX_BAND_NUM)
492 return -EINVAL;
493
494 if (!enable) {
495 int phy = get_band_val(an, band, phy_idx);
496 char cmd[64];
497
498 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
499 nla_nest_end(msg, ptr);
500 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
501
502 sprintf(cmd, "iw dev mon%d del", phy);
503 system(cmd);
504 sprintf(cmd, "iw phy phy%d interface add mon%d type monitor", phy, phy);
505 system(cmd);
506 sprintf(cmd, "ifconfig mon%d up", phy);
507 system(cmd);
508
509 return 0;
510 }
511
512 if (get_band_val(an, band, rf_mode) != ATENL_RF_MODE_TEST)
513 return 0;
514
515 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, ntohl(v[2]));
516 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, phy_type_to_attr(ntohl(v[3])));
517 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, ntohl(v[6]));
518
519 atenl_dbg("%s: enable = %d, ant=%u, tx_rate_mode=%u, rate_idx=%u\n",
520 __func__, enable, ntohl(v[2]), ntohl(v[3]), ntohl(v[6]));
521
522 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_TX_CONT);
523
524 nla_nest_end(msg, ptr);
525
526 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
527}
528
529static int atenl_nl_get_rx_info_cb(struct nl_msg *msg, void *arg)
530{
531 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
532 struct atenl *an = nl_priv->an;
533 struct atenl_band *anb = &an->anb[an->cur_band];
534 struct atenl_data *data = nl_priv->res;
535 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
536 struct atenl_rx_info_hdr *rx_hdr;
537 struct atenl_rx_info_band *rx_band;
538 struct atenl_rx_info_user *rx_user;
539 struct atenl_rx_info_path *rx_path;
540 struct atenl_rx_info_comm *rx_comm;
541 struct nlattr *tb1[NUM_MT76_TM_ATTRS];
542 struct nlattr *tb2[NUM_MT76_TM_STATS_ATTRS];
543 struct nlattr *tb3[NUM_MT76_TM_RX_ATTRS];
544 struct nlattr *nl_attr, *cur;
545 struct atenl_rx_stat rx_cur, rx_diff = {};
546 u32 rcpi[4] = {};
547 u32 type_num = htonl(4);
548 s32 ib_rssi[4] = {}, wb_rssi[4] = {};
549 u8 path = an->anb[an->cur_band].chainmask;
550 u8 path_num = __builtin_popcount(path);
551 u8 *buf = hdr->data + 2;
552 int i, rem;
553
554 *(u32 *)buf = type_num;
555 buf += sizeof(type_num);
556
557#define RX_PUT_HDR(_hdr, _type, _val, _size) do { \
558 _hdr->type = htonl(_type); \
559 _hdr->val = htonl(_val); \
560 _hdr->len = htonl(_size); \
561 buf += sizeof(*_hdr); \
562 } while (0)
563
564 rx_hdr = (struct atenl_rx_info_hdr *)buf;
565 RX_PUT_HDR(rx_hdr, 0, BIT(an->cur_band), sizeof(*rx_band));
566 rx_band = (struct atenl_rx_info_band *)buf;
567 buf += sizeof(*rx_band);
568
569 rx_hdr = (struct atenl_rx_info_hdr *)buf;
570 RX_PUT_HDR(rx_hdr, 1, path, path_num * sizeof(*rx_path));
571 rx_path = (struct atenl_rx_info_path *)buf;
572 buf += path_num * sizeof(*rx_path);
573
574 rx_hdr = (struct atenl_rx_info_hdr *)buf;
575 RX_PUT_HDR(rx_hdr, 2, GENMASK(15, 0), 16 * sizeof(*rx_user));
576 rx_user = (struct atenl_rx_info_user *)buf;
577 buf += 16 * sizeof(*rx_user);
578
579 rx_hdr = (struct atenl_rx_info_hdr *)buf;
580 RX_PUT_HDR(rx_hdr, 3, BIT(0), sizeof(*rx_comm));
581 rx_comm = (struct atenl_rx_info_comm *)buf;
582 buf += sizeof(*rx_comm);
583
584 hdr->len = htons(buf - hdr->data);
585
586 nl_attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
587 if (!nl_attr) {
588 atenl_err("Testdata attribute not found\n");
589 return NL_SKIP;
590 }
591
592 nla_parse_nested(tb1, MT76_TM_ATTR_MAX, nl_attr, testdata_policy);
593 nla_parse_nested(tb2, MT76_TM_STATS_ATTR_MAX,
594 tb1[MT76_TM_ATTR_STATS], stats_policy);
595
596 rx_cur.total = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_PACKETS]);
597 rx_cur.err_cnt = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_FCS_ERROR]);
598 rx_cur.len_mismatch = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_LEN_MISMATCH]);
599 rx_cur.ok_cnt = rx_cur.total - rx_cur.err_cnt - rx_cur.len_mismatch;
600
601 if (!anb->reset_rx_cnt ||
602 get_band_val(an, an->cur_band, cur_state) == MT76_TM_STATE_RX_FRAMES) {
603#define RX_COUNT_DIFF(_field) \
604 rx_diff._field = (rx_cur._field) - (anb->rx_stat._field);
605 RX_COUNT_DIFF(total);
606 RX_COUNT_DIFF(err_cnt);
607 RX_COUNT_DIFF(len_mismatch);
608 RX_COUNT_DIFF(ok_cnt);
609#undef RX_COUNT_DIFF
610
611 memcpy(&anb->rx_stat, &rx_cur, sizeof(anb->rx_stat));
612 }
613
614 rx_band->mac_rx_mdrdy_cnt = htonl((u32)rx_diff.total);
615 rx_band->mac_rx_fcs_err_cnt = htonl((u32)rx_diff.err_cnt);
616 rx_band->mac_rx_fcs_ok_cnt = htonl((u32)rx_diff.ok_cnt);
617 rx_band->mac_rx_len_mismatch = htonl((u32)rx_diff.len_mismatch);
618 rx_user->fcs_error_cnt = htonl((u32)rx_diff.err_cnt);
619
620 nla_parse_nested(tb3, MT76_TM_RX_ATTR_MAX,
621 tb2[MT76_TM_STATS_ATTR_LAST_RX], rx_policy);
622
623 rx_user->freq_offset = htonl(nla_get_u32(tb3[MT76_TM_RX_ATTR_FREQ_OFFSET]));
624 rx_user->snr = htonl(nla_get_u8(tb3[MT76_TM_RX_ATTR_SNR]));
625
626 i = 0;
627 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_RCPI], rem) {
628 if (nla_len(cur) != 1 || i >= 4)
629 break;
630
631 rcpi[i++] = nla_get_u8(cur);
632 }
633
634 i = 0;
635 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_IB_RSSI], rem) {
636 if (nla_len(cur) != 1 || i >= 4)
637 break;
638
639 ib_rssi[i++] = (s8)nla_get_u8(cur);
640 }
641
642 i = 0;
643 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_WB_RSSI], rem) {
644 if (nla_len(cur) != 1 || i >= 4)
645 break;
646
647 wb_rssi[i++] = (s8)nla_get_u8(cur);
648 }
649
650 for (i = 0; i < 4; i++) {
651 struct atenl_rx_info_path *path = &rx_path[i];
652
653 path->rcpi = htonl(rcpi[i]);
654 path->rssi = htonl(to_rssi((u8)rcpi[i]));
655 path->fagc_ib_rssi = htonl(ib_rssi[i]);
656 path->fagc_wb_rssi = htonl(wb_rssi[i]);
657 }
658
659 return NL_SKIP;
660}
661
662static int atenl_nl_get_rx_info(struct atenl *an, struct atenl_data *data,
663 struct atenl_nl_priv *nl_priv)
664{
665 struct nl_msg *msg = nl_priv->msg;
666 void *ptr;
667
668 nl_priv->an = an;
669 nl_priv->res = (void *)data;
670
671 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
672 if (!ptr)
673 return -ENOMEM;
674
675 nla_put_flag(msg, MT76_TM_ATTR_STATS);
676
677 nla_nest_end(msg, ptr);
678
679 return unl_genl_request(&nl_priv->unl, msg, atenl_nl_get_rx_info_cb,
680 (void *)nl_priv);
681}
682
683static int
684atenl_nl_set_ru(struct atenl *an, struct atenl_data *data,
685 struct atenl_nl_priv *nl_priv)
686{
687 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
688 struct nl_msg *msg;
689 u32 *v = (u32 *)(hdr->data + 4);
690 u32 seg0_num = ntohl(v[0]); /* v[1] seg1_num unused */
691 void *ptr;
692 int i, ret;
693
694 if (seg0_num > 8)
695 return -EINVAL;
696
697 for (i = 0, v = &v[2]; i < seg0_num; i++, v += 11) {
698 u32 ru_alloc = ntohl(v[1]);
699 u32 aid = ntohl(v[2]);
700 u32 ru_idx = ntohl(v[3]);
701 u32 mcs = ntohl(v[4]);
702 u32 ldpc = ntohl(v[5]);
703 u32 nss = ntohl(v[6]);
704 u32 tx_length = ntohl(v[8]);
705 char buf[10];
706
707 if (unl_genl_init(&nl_priv->unl, "nl80211") < 0) {
708 atenl_err("Failed to connect to nl80211\n");
709 return 2;
710 }
711
712 msg = unl_genl_msg(&nl_priv->unl, NL80211_CMD_TESTMODE, false);
713 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
714
715 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
716 if (!ptr)
717 return -ENOMEM;
718
719 if (i == 0)
720 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_IDLE);
721
722 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
723 nla_put_u8(msg, MT76_TM_ATTR_RU_IDX, ru_idx);
724 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, mcs);
725 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, ldpc);
726 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, nss);
727 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, tx_length);
728
729 ret = snprintf(buf, sizeof(buf), "%x", ru_alloc);
730 if (snprintf_error(sizeof(buf), ret))
731 return -EINVAL;
732
733 nla_put_u8(msg, MT76_TM_ATTR_RU_ALLOC, strtol(buf, NULL, 2));
734
735 nla_nest_end(msg, ptr);
736
737 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
738
739 unl_free(&nl_priv->unl);
740 }
741
742 return 0;
743}
744
745static int
746atenl_nl_ibf_init(struct atenl *an, u8 band)
747{
748 struct atenl_nl_priv nl_priv = {};
749 struct nl_msg *msg;
750 void *ptr, *a;
751 int ret;
752
753 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
754 atenl_err("Failed to connect to nl80211\n");
755 return 2;
756 }
757
758 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
759 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
760
761 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
762 if (!ptr) {
763 ret = -ENOMEM;
764 goto out;
765 }
766
767 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, MT76_TM_TX_MODE_HT);
768 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, an->ibf_mcs);
769 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, an->ibf_ant);
770 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_INIT);
771
772 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
773 if (!a) {
774 ret = -ENOMEM;
775 goto out;
776 }
777 nla_put_u16(msg, 0, 1);
778 nla_nest_end(msg, a);
779
780 nla_nest_end(msg, ptr);
781
782 ret = unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
783
784out:
785 unl_free(&nl_priv.unl);
786 return ret;
787}
788
789static int
790atenl_nl_ibf_e2p_update(struct atenl *an)
791{
792 struct atenl_nl_priv nl_priv = {};
793 struct nl_msg *msg;
794 void *ptr, *a;
795 int ret;
796
797 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
798 atenl_err("Failed to connect to nl80211\n");
799 return 2;
800 }
801
802 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
803 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
804
805 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
806 if (!ptr) {
807 ret = -ENOMEM;
808 goto out;
809 }
810
811 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_E2P_UPDATE);
812 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
813 if (!a) {
814 ret = -ENOMEM;
815 goto out;
816 }
817 nla_put_u16(msg, 0, 0);
818 nla_nest_end(msg, a);
819
820 nla_nest_end(msg, ptr);
821
822 ret = unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
823
824out:
825 unl_free(&nl_priv.unl);
826 return ret;
827}
828
developerf9b00212023-07-31 12:27:06 +0800829void
developerb9b4cd12022-10-11 13:18:59 +0800830atenl_get_ibf_cal_result(struct atenl *an)
831{
832 u16 offset;
833
834 if (an->adie_id == 0x7975)
835 offset = 0x651;
836 else if (an->adie_id == 0x7976)
837 offset = 0x60a;
838
839 /* per group size = 40, for group 0-8 */
840 atenl_eeprom_read_from_driver(an, offset, 40 * 9);
841}
842
843static int
844atenl_nl_ibf_set_val(struct atenl *an, struct atenl_data *data,
845 struct atenl_nl_priv *nl_priv)
846{
847#define MT_IBF(_act) MT76_TM_TXBF_ACT_##_act
848 static const u8 bf_act_map[] = {
849 [TXBF_ACT_IBF_PHASE_COMP] = MT_IBF(PHASE_COMP),
850 [TXBF_ACT_IBF_PROF_UPDATE] = MT_IBF(IBF_PROF_UPDATE),
851 [TXBF_ACT_EBF_PROF_UPDATE] = MT_IBF(EBF_PROF_UPDATE),
852 [TXBF_ACT_IBF_PHASE_CAL] = MT_IBF(PHASE_CAL),
853 };
854#undef MT_IBF
855 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
856 struct nl_msg *msg = nl_priv->msg;
857 u32 *v = (u32 *)(hdr->data + 4);
858 u32 action = ntohl(v[0]);
859 u16 val[8], is_atenl = 1;
860 u8 tmp_ant;
861 void *ptr, *a;
862 char cmd[64];
863 int i;
864
865 for (i = 0; i < 8; i++)
866 val[i] = ntohl(v[i + 1]);
867
868 atenl_dbg("%s: action = %u, val = %u, %u, %u, %u, %u\n",
869 __func__, action, val[0], val[1], val[2], val[3], val[4]);
870
871 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
872 if (!ptr)
873 return -ENOMEM;
874
875 switch (action) {
876 case TXBF_ACT_CHANNEL:
877 an->cur_band = val[1];
878 /* a sanity to prevent script band idx error */
879 if (val[0] > 14)
880 an->cur_band = 1;
881 atenl_nl_ibf_init(an, an->cur_band);
882 atenl_set_channel(an, 0, an->cur_band, val[0], 0, 0);
883
884 nla_put_u8(msg, MT76_TM_ATTR_AID, 0);
885 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_UPDATE_CH);
886 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
887 if (!a)
888 return -ENOMEM;
889 nla_put_u16(msg, 0, 0);
890 nla_nest_end(msg, a);
891 break;
892 case TXBF_ACT_MCS:
893 tmp_ant = (1 << DIV_ROUND_UP(val[0], 8)) - 1 ?: 1;
894 /* sometimes the correct band idx will be set after this action,
895 * so maintain a temp variable to allow mcs update in anthor action.
896 */
897 an->ibf_mcs = val[0];
898 an->ibf_ant = tmp_ant;
899 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, an->ibf_mcs);
900 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, an->ibf_ant);
901 break;
902 case TXBF_ACT_TX_ANT:
903 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, val[0]);
904 break;
905 case TXBF_ACT_RX_START:
906 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_RX_FRAMES);
907 break;
908 case TXBF_ACT_RX_ANT:
909 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, val[0]);
910 break;
911 case TXBF_ACT_TX_PKT:
912 nla_put_u8(msg, MT76_TM_ATTR_AID, val[1]);
913 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_TX_PREP);
developer7af0f762023-05-22 15:16:16 +0800914 if (!val[2])
915 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, 0xFFFFFFFF);
916 else
917 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, val[2]);
developerb9b4cd12022-10-11 13:18:59 +0800918 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, 1024);
919 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
920 if (!a)
921 return -ENOMEM;
922
923 for (i = 0; i < 5; i++)
924 nla_put_u16(msg, i, val[i]);
925 nla_nest_end(msg, a);
926
927 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_TX_FRAMES);
928 break;
929 case TXBF_ACT_IBF_PHASE_COMP:
930 nla_put_u8(msg, MT76_TM_ATTR_AID, 1);
931 case TXBF_ACT_IBF_PROF_UPDATE:
932 case TXBF_ACT_EBF_PROF_UPDATE:
933 case TXBF_ACT_IBF_PHASE_CAL:
934 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, bf_act_map[action]);
935 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
936 if (!a)
937 return -ENOMEM;
938 /* Note: litepoint may send random number for lna_gain_level, reset to 0 */
939 if (action == TXBF_ACT_IBF_PHASE_CAL)
940 val[4] = 0;
941 for (i = 0; i < 5; i++)
942 nla_put_u16(msg, i, val[i]);
943 /* Used to distinguish between command mode and HQADLL mode */
944 nla_put_u16(msg, 5, is_atenl);
945 nla_nest_end(msg, a);
946 break;
947 case TXBF_ACT_IBF_PHASE_E2P_UPDATE:
948 atenl_nl_ibf_e2p_update(an);
949 atenl_get_ibf_cal_result(an);
950
951 nla_put_u8(msg, MT76_TM_ATTR_AID, 0);
952 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_INIT);
953
954 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
955 if (!a)
956 return -ENOMEM;
957 nla_put_u16(msg, 0, 0);
958 nla_nest_end(msg, a);
959 break;
960 case TXBF_ACT_INIT:
961 case TXBF_ACT_POWER:
962 default:
963 break;
964 }
965
966 nla_nest_end(msg, ptr);
967
968 *(u32 *)(hdr->data + 2) = data->ext_id;
969
970 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
971}
972
973static int
974atenl_nl_ibf_get_status(struct atenl *an, struct atenl_data *data,
975 struct atenl_nl_priv *nl_priv)
976{
977 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
978 u32 status = htonl(1);
979
980 *(u32 *)(hdr->data + 2) = data->ext_id;
981 memcpy(hdr->data + 6, &status, 4);
982
983 return 0;
984}
985
986static int
987atenl_nl_ibf_profile_update_all(struct atenl *an, struct atenl_data *data,
988 struct atenl_nl_priv *nl_priv)
989{
990 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
991 struct nl_msg *msg;
992 void *ptr, *a;
993 u32 *v = (u32 *)(hdr->data + 4);
994 u16 pfmu_idx = ntohl(v[0]);
995 int i;
996
997 for (i = 0, v = &v[5]; i < 64; i++, v += 5) {
998 int j;
999
1000 if (unl_genl_init(&nl_priv->unl, "nl80211") < 0) {
1001 atenl_err("Failed to connect to nl80211\n");
1002 return 2;
1003 }
1004
1005 msg = unl_genl_msg(&nl_priv->unl, NL80211_CMD_TESTMODE, false);
1006 nla_put_u32(msg, NL80211_ATTR_WIPHY,
1007 get_band_val(an, an->cur_band, phy_idx));
1008
1009 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1010 if (!ptr)
1011 return -ENOMEM;
1012
1013 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_PROF_UPDATE_ALL);
1014 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
1015 if (!a)
1016 return -ENOMEM;
1017 nla_put_u16(msg, 0, pfmu_idx);
1018
1019 for (j = 0; j < 5; j++)
1020 nla_put_u16(msg, j + 1, ntohl(v[j]));
1021 nla_nest_end(msg, a);
1022
1023 nla_nest_end(msg, ptr);
1024
1025 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
1026
1027 unl_free(&nl_priv->unl);
1028 }
1029
1030 *(u32 *)(hdr->data + 2) = data->ext_id;
1031
1032 return 0;
1033}
1034
1035#define NL_OPS_GROUP(cmd, ...) [HQA_CMD_##cmd] = { __VA_ARGS__ }
1036static const struct atenl_nl_ops nl_ops[] = {
1037 NL_OPS_GROUP(SET_TX_PATH, .set=MT76_TM_ATTR_TX_ANTENNA),
1038 NL_OPS_GROUP(SET_TX_POWER, .set=MT76_TM_ATTR_TX_POWER),
1039 NL_OPS_GROUP(SET_RX_PATH, .set=MT76_TM_ATTR_TX_ANTENNA),
1040 NL_OPS_GROUP(SET_FREQ_OFFSET, .set=MT76_TM_ATTR_FREQ_OFFSET),
1041 NL_OPS_GROUP(SET_CFG, .ops=atenl_nl_set_cfg),
1042 NL_OPS_GROUP(SET_TSSI, .ops=atenl_nl_set_cfg),
1043 NL_OPS_GROUP(CONTINUOUS_TX, .ops=atenl_nl_continuous_tx),
1044 NL_OPS_GROUP(GET_TX_INFO, .dump=MT76_TM_STATS_ATTR_TX_DONE),
1045 NL_OPS_GROUP(GET_RX_INFO, .ops=atenl_nl_get_rx_info, .dump=true),
1046 NL_OPS_GROUP(SET_RU, .ops=atenl_nl_set_ru),
1047};
1048#undef NL_OPS_GROUP
1049
1050#define NL_OPS_EXT(cmd, ...) [HQA_EXT_CMD_##cmd] = { __VA_ARGS__ }
1051static const struct atenl_nl_ops nl_ops_ext[] = {
1052 NL_OPS_EXT(SET_TX, .ops=atenl_nl_set_tx),
1053 NL_OPS_EXT(START_TX, .ops=atenl_nl_tx),
1054 NL_OPS_EXT(STOP_TX, .ops=atenl_nl_tx),
1055 NL_OPS_EXT(START_RX, .ops=atenl_nl_rx),
1056 NL_OPS_EXT(STOP_RX, .ops=atenl_nl_rx),
1057 NL_OPS_EXT(OFF_CH_SCAN, .ops=atenl_off_ch_scan),
1058 NL_OPS_EXT(IBF_SET_VAL, .ops=atenl_nl_ibf_set_val),
1059 NL_OPS_EXT(IBF_GET_STATUS, .ops=atenl_nl_ibf_get_status),
1060 NL_OPS_EXT(IBF_PROF_UPDATE_ALL, .ops=atenl_nl_ibf_profile_update_all),
1061};
1062#undef NL_OPS_EXT
1063
1064int atenl_nl_process(struct atenl *an, struct atenl_data *data)
1065{
1066 struct atenl_nl_priv nl_priv = {};
1067 const struct atenl_nl_ops *ops;
1068 struct nl_msg *msg;
1069 int ret = 0;
1070
1071 if (data->ext_cmd != 0)
1072 ops = &nl_ops_ext[data->ext_cmd];
1073 else
1074 ops = &nl_ops[data->cmd];
1075
1076 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1077 atenl_err("Failed to connect to nl80211\n");
1078 return -1;
1079 }
1080
1081 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, !!ops->dump);
1082 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
1083 nl_priv.msg = msg;
1084
1085 if (ops->ops) {
1086 ret = ops->ops(an, data, &nl_priv);
1087 } else if (ops->dump) {
1088 nl_priv.attr = ops->dump;
1089 ret = atenl_nl_dump_attr(an, data, &nl_priv);
1090 } else {
1091 nl_priv.attr = ops->set;
1092 ret = atenl_nl_set_attr(an, data, &nl_priv);
1093 }
1094
1095 if (ret)
1096 atenl_err("command process error: 0x%x (0x%x)\n", data->cmd_id, data->ext_id);
1097
1098 unl_free(&nl_priv.unl);
1099
1100 return ret;
1101}
1102
1103int atenl_nl_process_many(struct atenl *an, struct atenl_data *data)
1104{
1105 struct atenl_nl_priv nl_priv = {};
1106 const struct atenl_nl_ops *ops;
1107 int ret = 0;
1108
1109 if (data->ext_cmd != 0)
1110 ops = &nl_ops_ext[data->ext_cmd];
1111 else
1112 ops = &nl_ops[data->cmd];
1113
1114 if (ops->ops)
1115 ret = ops->ops(an, data, &nl_priv);
1116
1117 return ret;
1118}
1119
1120int atenl_nl_set_state(struct atenl *an, u8 band,
1121 enum mt76_testmode_state state)
1122{
1123 struct atenl_nl_priv nl_priv = {};
1124 struct nl_msg *msg;
1125 void *ptr;
1126
1127 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1128 atenl_err("Failed to connect to nl80211\n");
1129 return 2;
1130 }
1131
1132 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1133 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
1134
1135 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1136 if (!ptr)
1137 return -ENOMEM;
1138
1139 atenl_set_attr_state(an, msg, band, state);
1140
1141 nla_nest_end(msg, ptr);
1142
1143 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1144
1145 unl_free(&nl_priv.unl);
1146
1147 return 0;
1148}
1149
1150int atenl_nl_set_aid(struct atenl *an, u8 band, u8 aid)
1151{
1152 struct atenl_nl_priv nl_priv = {};
1153 struct nl_msg *msg;
1154 void *ptr;
1155
1156 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1157 atenl_err("Failed to connect to nl80211\n");
1158 return 2;
1159 }
1160
1161 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1162 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
1163
1164 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1165 if (!ptr)
1166 return -ENOMEM;
1167
1168 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
1169
1170 nla_nest_end(msg, ptr);
1171
1172 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1173
1174 unl_free(&nl_priv.unl);
1175
1176 return 0;
1177}
1178
1179static int atenl_nl_check_mtd_cb(struct nl_msg *msg, void *arg)
1180{
1181 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
1182 struct atenl *an = nl_priv->an;
1183 struct nlattr *tb[NUM_MT76_TM_ATTRS];
1184 struct nlattr *attr;
1185
1186 attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
1187 if (!attr)
1188 return NL_SKIP;
1189
1190 nla_parse_nested(tb, MT76_TM_ATTR_MAX, attr, testdata_policy);
1191 if (!tb[MT76_TM_ATTR_MTD_PART] || !tb[MT76_TM_ATTR_MTD_OFFSET])
1192 return NL_SKIP;
1193
1194 an->mtd_part = strdup(nla_get_string(tb[MT76_TM_ATTR_MTD_PART]));
1195 an->mtd_offset = nla_get_u32(tb[MT76_TM_ATTR_MTD_OFFSET]);
developer13655da2023-01-10 19:53:25 +08001196 an->band_idx = nla_get_u32(tb[MT76_TM_ATTR_BAND_IDX]);
developerb9b4cd12022-10-11 13:18:59 +08001197
1198 return NL_SKIP;
1199}
1200
1201int atenl_nl_check_mtd(struct atenl *an)
1202{
1203 struct atenl_nl_priv nl_priv = { .an = an };
1204 struct nl_msg *msg;
1205
1206 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1207 atenl_err("Failed to connect to nl80211\n");
1208 return 2;
1209 }
1210
1211 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, true);
1212 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1213 unl_genl_request(&nl_priv.unl, msg, atenl_nl_check_mtd_cb, (void *)&nl_priv);
1214
1215 unl_free(&nl_priv.unl);
1216
1217 return 0;
1218}
1219
1220int atenl_nl_write_eeprom(struct atenl *an, u32 offset, u8 *val, int len)
1221{
1222 struct atenl_nl_priv nl_priv = {};
1223 struct nl_msg *msg;
1224 void *ptr, *a;
1225 int i;
1226
1227 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1228 atenl_err("Failed to connect to nl80211\n");
1229 return 2;
1230 }
1231
1232 if (len > 16)
1233 return -EINVAL;
1234
1235 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1236 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1237
1238 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1239 if (!ptr)
1240 return -ENOMEM;
1241
1242 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1243 MT76_TM_EEPROM_ACTION_UPDATE_DATA);
1244 nla_put_u32(msg, MT76_TM_ATTR_EEPROM_OFFSET, offset);
1245
1246 a = nla_nest_start(msg, MT76_TM_ATTR_EEPROM_VAL);
1247 if (!a)
1248 return -ENOMEM;
1249
1250 for (i = 0; i < len; i++)
1251 if (nla_put_u8(msg, i, val[i]))
1252 goto out;
1253
1254 nla_nest_end(msg, a);
1255
1256 nla_nest_end(msg, ptr);
1257
1258 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1259
1260 unl_free(&nl_priv.unl);
1261
1262out:
1263 return 0;
1264}
1265
1266int atenl_nl_write_efuse_all(struct atenl *an)
1267{
1268 struct atenl_nl_priv nl_priv = {};
1269 struct nl_msg *msg;
1270 void *ptr;
1271
1272 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1273 atenl_err("Failed to connect to nl80211\n");
1274 return 2;
1275 }
1276
1277 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1278 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1279
1280 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1281 if (!ptr)
1282 return -ENOMEM;
1283
1284 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1285 MT76_TM_EEPROM_ACTION_WRITE_TO_EFUSE);
1286
1287 nla_nest_end(msg, ptr);
1288
1289 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1290
1291 unl_free(&nl_priv.unl);
1292
1293 return 0;
1294}
1295
1296int atenl_nl_update_buffer_mode(struct atenl *an)
1297{
1298 struct atenl_nl_priv nl_priv = {};
1299 struct nl_msg *msg;
1300 void *ptr;
1301
1302 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1303 atenl_err("Failed to connect to nl80211\n");
1304 return 2;
1305 }
1306
1307 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1308 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1309
1310 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1311 if (!ptr)
1312 return -ENOMEM;
1313
1314 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1315 MT76_TM_EEPROM_ACTION_UPDATE_BUFFER_MODE);
1316
1317 nla_nest_end(msg, ptr);
1318
1319 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1320
1321 unl_free(&nl_priv.unl);
1322
1323 return 0;
1324}
1325
1326static int atenl_nl_precal_sync_from_driver_cb(struct nl_msg *msg, void *arg)
1327{
1328 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
1329 struct atenl *an = nl_priv->an;
1330 struct nlattr *tb[NUM_MT76_TM_ATTRS];
1331 struct nlattr *attr, *cur;
1332 int i, rem, prek_offset = nl_priv->attr;
1333
1334
1335 attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
1336 if (!attr)
1337 return NL_SKIP;
1338
1339 nla_parse_nested(tb, MT76_TM_ATTR_MAX, attr, testdata_policy);
1340
1341 if (!tb[MT76_TM_ATTR_PRECAL_INFO] && !tb[MT76_TM_ATTR_PRECAL]) {
1342 atenl_info("No Pre cal data or info!\n");
1343 return NL_SKIP;
1344 }
1345
1346 if (tb[MT76_TM_ATTR_PRECAL_INFO]) {
1347 i = 0;
1348 nla_for_each_nested(cur, tb[MT76_TM_ATTR_PRECAL_INFO], rem) {
1349 an->cal_info[i] = (u32) nla_get_u32(cur);
1350 i++;
1351 }
1352 return NL_SKIP;
1353 }
1354
1355 if (tb[MT76_TM_ATTR_PRECAL] && an->cal) {
1356 i = prek_offset;
1357 nla_for_each_nested(cur, tb[MT76_TM_ATTR_PRECAL], rem) {
1358 an->cal[i] = (u8) nla_get_u8(cur);
1359 i++;
1360 }
1361 return NL_SKIP;
1362 }
1363 atenl_info("No data found for pre-cal!\n");
1364
1365 return NL_SKIP;
1366}
1367
1368static int
1369atenl_nl_precal_sync_partition(struct atenl_nl_priv *nl_priv, enum mt76_testmode_attr attr,
1370 int prek_type, int prek_offset)
1371{
1372 int ret;
1373 void *ptr;
1374 struct nl_msg *msg;
1375 struct atenl *an = nl_priv->an;
1376
1377 msg = unl_genl_msg(&(nl_priv->unl), NL80211_CMD_TESTMODE, true);
1378 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
1379 nl_priv->msg = msg;
1380 nl_priv->attr = prek_offset;
1381
1382 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1383 if (!ptr)
1384 return -ENOMEM;
1385
1386 nla_put_flag(msg, attr);
1387 if (attr == MT76_TM_ATTR_PRECAL)
1388 nla_put_u8(msg, MT76_TM_ATTR_PRECAL_INFO, prek_type);
1389 nla_nest_end(msg, ptr);
1390
1391 ret = unl_genl_request(&(nl_priv->unl), msg, atenl_nl_precal_sync_from_driver_cb, (void *)nl_priv);
1392
1393 if (ret) {
1394 atenl_err("command process error!\n");
1395 return ret;
1396 }
1397
1398 return 0;
1399}
1400
1401int atenl_nl_precal_sync_from_driver(struct atenl *an, enum prek_ops ops)
1402{
developer963a66b2023-04-11 13:34:56 +08001403#define GROUP_IND_MASK BIT(0)
1404#define GROUP_IND_MASK_7996 GENMASK(2, 0)
1405#define DPD_IND_MASK GENMASK(3, 1)
1406#define DPD_IND_MASK_7996 GENMASK(5, 3)
developerb9b4cd12022-10-11 13:18:59 +08001407 int ret;
1408 u32 i, times, group_size, dpd_size, total_size, transmit_size, offs;
developer963a66b2023-04-11 13:34:56 +08001409 u32 dpd_per_chan_size, dpd_chan_ratio[3], total_ratio;
1410 u32 size, base, base_idx, dpd_base_map, *size_ptr;
1411 u8 cal_indicator, group_ind_mask, dpd_ind_mask, *precal_info;
developerb9b4cd12022-10-11 13:18:59 +08001412 struct atenl_nl_priv nl_priv = { .an = an };
1413
1414 offs = an->eeprom_prek_offs;
1415 cal_indicator = an->eeprom_data[offs];
developer963a66b2023-04-11 13:34:56 +08001416 group_ind_mask = is_mt7996(an) ? GROUP_IND_MASK_7996 : GROUP_IND_MASK;
1417 dpd_ind_mask = is_mt7996(an) ? DPD_IND_MASK_7996 : DPD_IND_MASK;
developerb9b4cd12022-10-11 13:18:59 +08001418
1419 if (cal_indicator) {
1420 precal_info = an->eeprom_data + an->eeprom_size;
1421 memcpy(an->cal_info, precal_info, PRE_CAL_INFO);
1422 group_size = an->cal_info[0];
1423 dpd_size = an->cal_info[1];
1424 total_size = group_size + dpd_size;
developer963a66b2023-04-11 13:34:56 +08001425 dpd_chan_ratio[0] = (an->cal_info[2] >> DPD_INFO_6G_SHIFT) &
1426 DPD_INFO_MASK;
1427 dpd_chan_ratio[1] = (an->cal_info[2] >> DPD_INFO_5G_SHIFT) &
1428 DPD_INFO_MASK;
1429 dpd_chan_ratio[2] = (an->cal_info[2] >> DPD_INFO_2G_SHIFT) &
1430 DPD_INFO_MASK;
1431 dpd_per_chan_size = (an->cal_info[2] >> DPD_INFO_CH_SHIFT) &
1432 DPD_INFO_MASK;
1433 total_ratio = dpd_chan_ratio[0] + dpd_chan_ratio[1] +
1434 dpd_chan_ratio[2];
developerb9b4cd12022-10-11 13:18:59 +08001435 }
1436
1437 switch (ops){
1438 case PREK_SYNC_ALL:
1439 size_ptr = &total_size;
1440 base_idx = 0;
developer963a66b2023-04-11 13:34:56 +08001441 dpd_base_map = 0;
developerb9b4cd12022-10-11 13:18:59 +08001442 goto start;
1443 case PREK_SYNC_GROUP:
1444 size_ptr = &group_size;
1445 base_idx = 0;
developer963a66b2023-04-11 13:34:56 +08001446 dpd_base_map = 0;
developerb9b4cd12022-10-11 13:18:59 +08001447 goto start;
1448 case PREK_SYNC_DPD_6G:
1449 size_ptr = &dpd_size;
1450 base_idx = 0;
developer963a66b2023-04-11 13:34:56 +08001451 dpd_base_map = is_mt7996(an) ? GENMASK(2, 1) : 0;
developerb9b4cd12022-10-11 13:18:59 +08001452 goto start;
1453 case PREK_SYNC_DPD_5G:
1454 size_ptr = &dpd_size;
1455 base_idx = 1;
developer963a66b2023-04-11 13:34:56 +08001456 dpd_base_map = is_mt7996(an) ? BIT(2) : BIT(0);
developerb9b4cd12022-10-11 13:18:59 +08001457 goto start;
1458 case PREK_SYNC_DPD_2G:
1459 size_ptr = &dpd_size;
1460 base_idx = 2;
developer963a66b2023-04-11 13:34:56 +08001461 dpd_base_map = is_mt7996(an) ? 0 : GENMASK(1, 0);
developerb9b4cd12022-10-11 13:18:59 +08001462
1463start:
1464 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1465 atenl_err("Failed to connect to nl80211\n");
1466 return 2;
1467 }
1468
1469 ret = atenl_nl_precal_sync_partition(&nl_priv, MT76_TM_ATTR_PRECAL_INFO, 0, 0);
1470 if (ret || !an->cal_info)
1471 goto out;
1472
1473 group_size = an->cal_info[0];
1474 dpd_size = an->cal_info[1];
1475 total_size = group_size + dpd_size;
developer963a66b2023-04-11 13:34:56 +08001476 dpd_chan_ratio[0] = (an->cal_info[2] >> DPD_INFO_6G_SHIFT) &
1477 DPD_INFO_MASK;
1478 dpd_chan_ratio[1] = (an->cal_info[2] >> DPD_INFO_5G_SHIFT) &
1479 DPD_INFO_MASK;
1480 dpd_chan_ratio[2] = (an->cal_info[2] >> DPD_INFO_2G_SHIFT) &
1481 DPD_INFO_MASK;
1482 dpd_per_chan_size = (an->cal_info[2] >> DPD_INFO_CH_SHIFT) &
1483 DPD_INFO_MASK;
1484 total_ratio = dpd_chan_ratio[0] + dpd_chan_ratio[1] +
1485 dpd_chan_ratio[2];
developerb9b4cd12022-10-11 13:18:59 +08001486 transmit_size = an->cal_info[3];
1487
1488 size = *size_ptr;
developer963a66b2023-04-11 13:34:56 +08001489 if (size_ptr == &dpd_size)
1490 size = size / total_ratio * dpd_chan_ratio[base_idx];
1491
developerb9b4cd12022-10-11 13:18:59 +08001492 base = 0;
developer963a66b2023-04-11 13:34:56 +08001493 for (i = 0; i < 3; i++) {
1494 if (dpd_base_map & BIT(i))
1495 base += dpd_chan_ratio[i] * dpd_per_chan_size *
1496 MT_EE_CAL_UNIT;
developerb9b4cd12022-10-11 13:18:59 +08001497 }
1498 base += (size_ptr == &dpd_size) ? group_size : 0;
1499
1500 if (!an->cal)
1501 an->cal = (u8 *) calloc(size, sizeof(u8));
1502 times = size / transmit_size + 1;
1503 for (i = 0; i < times; i++) {
1504 ret = atenl_nl_precal_sync_partition(&nl_priv, MT76_TM_ATTR_PRECAL, ops,
1505 i * transmit_size);
1506 if (ret)
1507 goto out;
1508 }
1509
1510 ret = atenl_eeprom_update_precal(an, base, size);
1511 break;
1512 case PREK_CLEAN_GROUP:
developer963a66b2023-04-11 13:34:56 +08001513 if (!(cal_indicator & group_ind_mask))
developerb9b4cd12022-10-11 13:18:59 +08001514 return 0;
developer963a66b2023-04-11 13:34:56 +08001515 an->cal_info[4] = cal_indicator & group_ind_mask;
developerb9b4cd12022-10-11 13:18:59 +08001516 ret = atenl_eeprom_update_precal(an, 0, group_size);
1517 break;
1518 case PREK_CLEAN_DPD:
developer963a66b2023-04-11 13:34:56 +08001519 if (!(cal_indicator & dpd_ind_mask))
developerb9b4cd12022-10-11 13:18:59 +08001520 return 0;
developer963a66b2023-04-11 13:34:56 +08001521 an->cal_info[4] = cal_indicator & dpd_ind_mask;
developerb9b4cd12022-10-11 13:18:59 +08001522 ret = atenl_eeprom_update_precal(an, group_size, dpd_size);
1523 break;
1524 default:
1525 break;
1526 }
1527
1528out:
1529 unl_free(&nl_priv.unl);
1530 return ret;
1531}