blob: efd8e68ce47191202d6159989773c375bdf13589 [file] [log] [blame]
developer3abe1ad2022-01-24 11:13:32 +08001/* Copyright (C) 2021-2022 Mediatek Inc. */
2#define _GNU_SOURCE
3
4#include <unl.h>
5
6#include "atenl.h"
7
8#define to_rssi(_rcpi) ((_rcpi - 220) / 2)
9
10struct atenl_nl_priv {
11 struct atenl *an;
12 struct unl unl;
13 struct nl_msg *msg;
14 int attr;
15 void *res;
16};
17
18struct atenl_nl_ops {
19 int set;
20 int dump;
21 int (*ops)(struct atenl *an, struct atenl_data *data,
22 struct atenl_nl_priv *nl_priv);
23};
24
25static struct nla_policy testdata_policy[NUM_MT76_TM_ATTRS] = {
26 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
27 [MT76_TM_ATTR_MTD_PART] = { .type = NLA_STRING },
28 [MT76_TM_ATTR_MTD_OFFSET] = { .type = NLA_U32 },
developerf90c9af2022-12-28 22:40:23 +080029 [MT76_TM_ATTR_BAND_IDX] = { .type = NLA_U8 },
developer50835162023-09-19 13:29:11 +080030 [MT76_TM_ATTR_SKU_EN] = { .type = NLA_U8 },
developer3abe1ad2022-01-24 11:13:32 +080031 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
32 [MT76_TM_ATTR_TX_LENGTH] = { .type = NLA_U32 },
33 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
34 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
35 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
36 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
37 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
38 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
39 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
40 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
41 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
42 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
43 [MT76_TM_ATTR_STATS] = { .type = NLA_NESTED },
developer071927d2022-08-31 20:39:29 +080044 [MT76_TM_ATTR_PRECAL] = { .type = NLA_NESTED },
45 [MT76_TM_ATTR_PRECAL_INFO] = { .type = NLA_NESTED },
developer3abe1ad2022-01-24 11:13:32 +080046};
47
48static struct nla_policy stats_policy[NUM_MT76_TM_STATS_ATTRS] = {
49 [MT76_TM_STATS_ATTR_TX_PENDING] = { .type = NLA_U32 },
50 [MT76_TM_STATS_ATTR_TX_QUEUED] = { .type = NLA_U32 },
51 [MT76_TM_STATS_ATTR_TX_DONE] = { .type = NLA_U32 },
52 [MT76_TM_STATS_ATTR_RX_PACKETS] = { .type = NLA_U64 },
53 [MT76_TM_STATS_ATTR_RX_FCS_ERROR] = { .type = NLA_U64 },
54};
55
56static struct nla_policy rx_policy[NUM_MT76_TM_RX_ATTRS] = {
57 [MT76_TM_RX_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
58 [MT76_TM_RX_ATTR_RCPI] = { .type = NLA_NESTED },
developer50835162023-09-19 13:29:11 +080059 [MT76_TM_RX_ATTR_RSSI] = { .type = NLA_NESTED },
developer3abe1ad2022-01-24 11:13:32 +080060 [MT76_TM_RX_ATTR_IB_RSSI] = { .type = NLA_NESTED },
61 [MT76_TM_RX_ATTR_WB_RSSI] = { .type = NLA_NESTED },
62 [MT76_TM_RX_ATTR_SNR] = { .type = NLA_U8 },
63};
64
65struct he_sgi {
66 enum mt76_testmode_tx_mode tx_mode;
67 u8 sgi;
68 u8 tx_ltf;
69};
70
71#define HE_SGI_GROUP(_tx_mode, _sgi, _tx_ltf) \
72 { .tx_mode = MT76_TM_TX_MODE_##_tx_mode, .sgi = _sgi, .tx_ltf = _tx_ltf }
73static const struct he_sgi he_sgi_groups[] = {
74 HE_SGI_GROUP(HE_SU, 0, 0),
75 HE_SGI_GROUP(HE_SU, 0, 1),
76 HE_SGI_GROUP(HE_SU, 1, 1),
77 HE_SGI_GROUP(HE_SU, 2, 2),
78 HE_SGI_GROUP(HE_SU, 0, 2),
79 HE_SGI_GROUP(HE_EXT_SU, 0, 0),
80 HE_SGI_GROUP(HE_EXT_SU, 0, 1),
81 HE_SGI_GROUP(HE_EXT_SU, 1, 1),
82 HE_SGI_GROUP(HE_EXT_SU, 2, 2),
83 HE_SGI_GROUP(HE_EXT_SU, 0, 2),
84 HE_SGI_GROUP(HE_TB, 1, 0),
85 HE_SGI_GROUP(HE_TB, 1, 1),
86 HE_SGI_GROUP(HE_TB, 2, 2),
87 HE_SGI_GROUP(HE_MU, 0, 2),
88 HE_SGI_GROUP(HE_MU, 0, 1),
89 HE_SGI_GROUP(HE_MU, 1, 1),
90 HE_SGI_GROUP(HE_MU, 2, 2),
91};
92#undef HE_SGI_LTF_GROUP
93
94static u8 phy_type_to_attr(u8 phy_type)
95{
96 static const u8 phy_type_to_attr[] = {
97 [ATENL_PHY_TYPE_CCK] = MT76_TM_TX_MODE_CCK,
98 [ATENL_PHY_TYPE_OFDM] = MT76_TM_TX_MODE_OFDM,
99 [ATENL_PHY_TYPE_HT] = MT76_TM_TX_MODE_HT,
100 [ATENL_PHY_TYPE_HT_GF] = MT76_TM_TX_MODE_HT,
101 [ATENL_PHY_TYPE_VHT] = MT76_TM_TX_MODE_VHT,
102 [ATENL_PHY_TYPE_HE_SU] = MT76_TM_TX_MODE_HE_SU,
103 [ATENL_PHY_TYPE_HE_EXT_SU] = MT76_TM_TX_MODE_HE_EXT_SU,
104 [ATENL_PHY_TYPE_HE_TB] = MT76_TM_TX_MODE_HE_TB,
105 [ATENL_PHY_TYPE_HE_MU] = MT76_TM_TX_MODE_HE_MU,
developer77215642023-05-15 13:52:35 +0800106 [ATENL_PHY_TYPE_EHT_SU] = MT76_TM_TX_MODE_EHT_SU,
107 [ATENL_PHY_TYPE_EHT_TRIG] = MT76_TM_TX_MODE_EHT_TRIG,
108 [ATENL_PHY_TYPE_EHT_MU] = MT76_TM_TX_MODE_EHT_MU,
developer3abe1ad2022-01-24 11:13:32 +0800109 };
110
111 if (phy_type >= ARRAY_SIZE(phy_type_to_attr))
112 return 0;
113
114 return phy_type_to_attr[phy_type];
115}
116
117static void
118atenl_set_attr_state(struct atenl *an, struct nl_msg *msg,
119 u8 band, enum mt76_testmode_state state)
120{
121 if (get_band_val(an, band, cur_state) == state)
122 return;
123
124 nla_put_u8(msg, MT76_TM_ATTR_STATE, state);
125 set_band_val(an, band, cur_state, state);
126}
127
128static void
129atenl_set_attr_antenna(struct atenl *an, struct nl_msg *msg, u8 tx_antenna)
130{
131 if (!tx_antenna)
132 return;
developer67630e02022-12-06 14:35:28 +0800133
134 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, tx_antenna);
developer3abe1ad2022-01-24 11:13:32 +0800135}
136
137static int
138atenl_nl_set_attr(struct atenl *an, struct atenl_data *data,
139 struct atenl_nl_priv *nl_priv)
140{
141 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
142 struct nl_msg *msg = nl_priv->msg;
143 u32 val = ntohl(*(u32 *)hdr->data);
144 int attr = nl_priv->attr;
145 void *ptr, *a;
146
147 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
148 if (!ptr)
149 return -ENOMEM;
150
151 switch (attr) {
152 case MT76_TM_ATTR_TX_ANTENNA:
153 atenl_set_attr_antenna(an, msg, val);
154 break;
155 case MT76_TM_ATTR_FREQ_OFFSET:
156 nla_put_u32(msg, attr, val);
157 break;
158 case MT76_TM_ATTR_TX_POWER:
159 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
160 if (!a)
161 return -ENOMEM;
162 nla_put_u8(msg, 0, val);
163 nla_nest_end(msg, a);
164 break;
165 default:
166 nla_put_u8(msg, attr, val);
167 break;
168 }
169
170 nla_nest_end(msg, ptr);
171
172 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
173}
174
175static int
176atenl_nl_set_cfg(struct atenl *an, struct atenl_data *data,
177 struct atenl_nl_priv *nl_priv)
178{
179 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
180 struct nl_msg *msg = nl_priv->msg;
181 enum atenl_cmd cmd = data->cmd;
182 u32 *v = (u32 *)hdr->data;
183 u8 type = ntohl(v[0]);
184 u8 enable = ntohl(v[1]);
185 void *ptr, *cfg;
186
187 if (cmd == HQA_CMD_SET_TSSI) {
188 type = 0;
189 enable = 1;
190 }
191
192 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
193 if (!ptr)
194 return -ENOMEM;
195
196 cfg = nla_nest_start(msg, MT76_TM_ATTR_CFG);
197 if (!cfg)
198 return -ENOMEM;
199
200 if (nla_put_u8(msg, 0, type) ||
201 nla_put_u8(msg, 1, enable))
202 return -EINVAL;
203
204 nla_nest_end(msg, cfg);
205
206 nla_nest_end(msg, ptr);
207
208 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
209}
210
211static int
212atenl_nl_set_tx(struct atenl *an, struct atenl_data *data,
213 struct atenl_nl_priv *nl_priv)
214{
215 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
216 struct nl_msg *msg = nl_priv->msg;
217 u32 *v = (u32 *)hdr->data;
218 u8 *addr1 = hdr->data + 36;
219 u8 *addr2 = addr1 + ETH_ALEN;
220 u8 *addr3 = addr2 + ETH_ALEN;
developer5698c9c2022-05-30 16:40:23 +0800221 u8 def_mac[ETH_ALEN] = {0x00, 0x11, 0x22, 0x33, 0x44, 0x55};
developer3abe1ad2022-01-24 11:13:32 +0800222 void *ptr, *a;
223
developer5698c9c2022-05-30 16:40:23 +0800224 if (get_band_val(an, an->cur_band, use_tx_time))
225 set_band_val(an, an->cur_band, tx_time, ntohl(v[7]));
226 else
227 set_band_val(an, an->cur_band, tx_mpdu_len, ntohl(v[7]));
228
developer3abe1ad2022-01-24 11:13:32 +0800229 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
230 if (!ptr)
231 return -ENOMEM;
232
developer3abe1ad2022-01-24 11:13:32 +0800233 a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
234 if (!a)
235 return -ENOMEM;
236
developer5698c9c2022-05-30 16:40:23 +0800237 nla_put(msg, 0, ETH_ALEN, use_default_addr(addr1) ? def_mac : addr1);
238 nla_put(msg, 1, ETH_ALEN, use_default_addr(addr2) ? def_mac : addr2);
239 nla_put(msg, 2, ETH_ALEN, use_default_addr(addr3) ? def_mac : addr3);
developer3abe1ad2022-01-24 11:13:32 +0800240
241 nla_nest_end(msg, a);
242
243 nla_nest_end(msg, ptr);
244
245 *(u32 *)(hdr->data + 2) = data->ext_id;
246
247 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
248}
249
250static int
251atenl_nl_tx(struct atenl *an, struct atenl_data *data, struct atenl_nl_priv *nl_priv)
252{
253#define USE_SPE_IDX BIT(31)
254 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
255 struct nl_msg *msg = nl_priv->msg;
256 u32 *v = (u32 *)hdr->data;
257 u8 band = ntohl(v[2]);
258 void *ptr;
259 int ret;
260
261 if (band >= MAX_BAND_NUM)
262 return -EINVAL;
263
264 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
265 if (!ptr)
266 return -ENOMEM;
267
268 if (data->ext_cmd == HQA_EXT_CMD_STOP_TX) {
269 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
270 } else {
271 u32 tx_count = ntohl(v[3]);
272 u8 tx_rate_mode = phy_type_to_attr(ntohl(v[4]));
273 u8 aid = ntohl(v[11]);
274 u8 sgi = ntohl(v[13]);
275 u32 tx_antenna = ntohl(v[14]);
276 void *a;
277
278 if (sgi > 5)
279 return -EINVAL;
280
281 if (!tx_count)
282 tx_count = 10000000;
283
284 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, tx_count);
285 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, ntohl(v[12]));
286 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, tx_rate_mode);
287 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, ntohl(v[5]));
288 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, ntohl(v[7]));
289 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, ntohl(v[8]));
290 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, ntohl(v[15]));
291
developer5698c9c2022-05-30 16:40:23 +0800292 if (get_band_val(an, band, use_tx_time))
293 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME,
294 get_band_val(an, band, tx_time));
developer77215642023-05-15 13:52:35 +0800295 else if (get_band_val(an, band, tx_mpdu_len))
developer93dadcc2022-07-13 10:25:35 +0800296 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH,
297 get_band_val(an, band, tx_mpdu_len));
developer5698c9c2022-05-30 16:40:23 +0800298
developer3abe1ad2022-01-24 11:13:32 +0800299 /* for chips after 7915, tx need to use at least wcid = 1 */
300 if (!is_mt7915(an) && !aid)
301 aid = 1;
302 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
303
304 if (tx_antenna & USE_SPE_IDX) {
305 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX,
306 tx_antenna & ~USE_SPE_IDX);
307 } else {
308 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, 0);
309 atenl_set_attr_antenna(an, msg, tx_antenna);
310 }
311
developer77215642023-05-15 13:52:35 +0800312 if (!is_mt7996(an) && tx_rate_mode >= MT76_TM_TX_MODE_HE_SU) {
developer3abe1ad2022-01-24 11:13:32 +0800313 u8 ofs = sgi;
314 size_t i;
315
316 for (i = 0; i < ARRAY_SIZE(he_sgi_groups); i++)
317 if (he_sgi_groups[i].tx_mode == tx_rate_mode)
318 break;
319
320 if ((i + ofs) >= ARRAY_SIZE(he_sgi_groups))
321 return -EINVAL;
322
323 sgi = he_sgi_groups[i + ofs].sgi;
324 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF,
325 he_sgi_groups[i + ofs].tx_ltf);
326 }
327 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, sgi);
328
329 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
330 if (!a)
331 return -ENOMEM;
332 nla_put_u8(msg, 0, ntohl(v[6]));
333 nla_nest_end(msg, a);
334
335 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_TX_FRAMES);
336 }
337
338 nla_nest_end(msg, ptr);
339
340 ret = unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
341 if (ret)
342 return ret;
343
344 *(u32 *)(hdr->data + 2) = data->ext_id;
345
346 return 0;
347}
348
349static int
350atenl_nl_rx(struct atenl *an, struct atenl_data *data, struct atenl_nl_priv *nl_priv)
351{
352 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
353 struct atenl_band *anb = &an->anb[an->cur_band];
354 struct nl_msg *msg = nl_priv->msg;
355 u32 *v = (u32 *)hdr->data;
356 u8 band = ntohl(v[2]);
357 void *ptr;
358
359 if (band >= MAX_BAND_NUM)
360 return -EINVAL;
361
362 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
363 if (!ptr)
364 return -ENOMEM;
365
366 if (data->ext_cmd == HQA_EXT_CMD_STOP_RX) {
367 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
368 } else {
369 v = (u32 *)(hdr->data + 18);
370
371 atenl_set_attr_antenna(an, msg, ntohl(v[0]));
developer77215642023-05-15 13:52:35 +0800372 if (is_mt7996(an)) {
373 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE,
374 phy_type_to_attr(ntohl(v[2])));
375 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, ntohl(v[3]));
376 nla_put_u8(msg, MT76_TM_ATTR_AID, ntohl(v[4]));
377 } else {
378 nla_put_u8(msg, MT76_TM_ATTR_AID, ntohl(v[1]));
379 }
developer3abe1ad2022-01-24 11:13:32 +0800380 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_RX_FRAMES);
381
382 anb->reset_rx_cnt = false;
383
384 /* clear history buffer */
385 memset(&anb->rx_stat, 0, sizeof(anb->rx_stat));
386 }
387
388 nla_nest_end(msg, ptr);
389
390 *(u32 *)(hdr->data + 2) = data->ext_id;
391
392 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
393}
394
395static int
396atenl_off_ch_scan(struct atenl *an, struct atenl_data *data,
397 struct atenl_nl_priv *nl_priv)
398{
399 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
400 struct nl_msg *msg = nl_priv->msg;
401 u32 *v = (u32 *)hdr->data;
402 u8 ch = ntohl(v[2]);
403 u8 bw = ntohl(v[4]);
404 u8 tx_path = ntohl(v[5]);
405 u8 status = ntohl(v[6]);
406 void *ptr;
407
408 if (!status)
409 ch = 0; /* stop */
410
411 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
412 if (!ptr)
413 return -ENOMEM;
414
415 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_CH, ch);
416 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_CENTER_CH,
417 atenl_get_center_channel(bw, CH_BAND_5GHZ, ch));
418 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_BW, bw);
419 nla_put_u8(msg, MT76_TM_ATTR_OFF_CH_SCAN_PATH, tx_path);
420
421 nla_nest_end(msg, ptr);
422
developer5698c9c2022-05-30 16:40:23 +0800423 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +0800424
425 return 0;
426}
427
428static int atenl_nl_dump_cb(struct nl_msg *msg, void *arg)
429{
430 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
431 struct nlattr *tb1[NUM_MT76_TM_ATTRS];
432 struct nlattr *tb2[NUM_MT76_TM_STATS_ATTRS];
433 struct nlattr *nl_attr;
434 int attr = nl_priv->attr;
435 u64 *res = nl_priv->res;
436
437 nl_attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
438 if (!nl_attr) {
developer5698c9c2022-05-30 16:40:23 +0800439 atenl_err("Testdata attribute not found\n");
developer3abe1ad2022-01-24 11:13:32 +0800440 return NL_SKIP;
441 }
442
443 nla_parse_nested(tb1, MT76_TM_ATTR_MAX, nl_attr, testdata_policy);
444 nla_parse_nested(tb2, MT76_TM_STATS_ATTR_MAX,
445 tb1[MT76_TM_ATTR_STATS], stats_policy);
446
447 if (attr == MT76_TM_STATS_ATTR_TX_DONE)
448 *res = nla_get_u32(tb2[MT76_TM_STATS_ATTR_TX_DONE]);
449
450 return NL_SKIP;
451}
452
453static int
454atenl_nl_dump_attr(struct atenl *an, struct atenl_data *data,
455 struct atenl_nl_priv *nl_priv)
456{
457 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
458 struct nl_msg *msg = nl_priv->msg;
459 void *ptr;
460 u64 res = 0;
461
462 nl_priv->res = (void *)&res;
463
464 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
465 if (!ptr)
466 return -ENOMEM;
467 nla_put_flag(msg, MT76_TM_ATTR_STATS);
468 nla_nest_end(msg, ptr);
469
470 unl_genl_request(&nl_priv->unl, msg, atenl_nl_dump_cb, (void *)nl_priv);
471
472 if (nl_priv->attr == MT76_TM_STATS_ATTR_TX_DONE)
473 *(u32 *)(hdr->data + 2 + 4 * an->cur_band) = htonl(res);
474
475 return 0;
476}
477
478static int atenl_nl_continuous_tx(struct atenl *an,
479 struct atenl_data *data,
480 struct atenl_nl_priv *nl_priv)
481{
482 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
483 struct nl_msg *msg = nl_priv->msg;
484 u32 *v = (u32 *)hdr->data;
485 u8 band = ntohl(v[0]);
486 bool enable = ntohl(v[1]);
487 void *ptr;
488
489 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
490 if (!ptr)
491 return -ENOMEM;
492
493 if (band >= MAX_BAND_NUM)
494 return -EINVAL;
495
496 if (!enable) {
497 int phy = get_band_val(an, band, phy_idx);
498 char cmd[64];
499
500 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_IDLE);
501 nla_nest_end(msg, ptr);
502 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
503
504 sprintf(cmd, "iw dev mon%d del", phy);
505 system(cmd);
506 sprintf(cmd, "iw phy phy%d interface add mon%d type monitor", phy, phy);
507 system(cmd);
508 sprintf(cmd, "ifconfig mon%d up", phy);
509 system(cmd);
510
511 return 0;
512 }
513
514 if (get_band_val(an, band, rf_mode) != ATENL_RF_MODE_TEST)
515 return 0;
516
517 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, ntohl(v[2]));
518 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, phy_type_to_attr(ntohl(v[3])));
519 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, ntohl(v[6]));
520
521 atenl_dbg("%s: enable = %d, ant=%u, tx_rate_mode=%u, rate_idx=%u\n",
522 __func__, enable, ntohl(v[2]), ntohl(v[3]), ntohl(v[6]));
523
524 atenl_set_attr_state(an, msg, band, MT76_TM_STATE_TX_CONT);
525
526 nla_nest_end(msg, ptr);
527
528 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
529}
530
531static int atenl_nl_get_rx_info_cb(struct nl_msg *msg, void *arg)
532{
533 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
534 struct atenl *an = nl_priv->an;
535 struct atenl_band *anb = &an->anb[an->cur_band];
536 struct atenl_data *data = nl_priv->res;
537 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
538 struct atenl_rx_info_hdr *rx_hdr;
539 struct atenl_rx_info_band *rx_band;
540 struct atenl_rx_info_user *rx_user;
541 struct atenl_rx_info_path *rx_path;
542 struct atenl_rx_info_comm *rx_comm;
543 struct nlattr *tb1[NUM_MT76_TM_ATTRS];
544 struct nlattr *tb2[NUM_MT76_TM_STATS_ATTRS];
545 struct nlattr *tb3[NUM_MT76_TM_RX_ATTRS];
546 struct nlattr *nl_attr, *cur;
547 struct atenl_rx_stat rx_cur, rx_diff = {};
548 u32 rcpi[4] = {};
549 u32 type_num = htonl(4);
550 s32 ib_rssi[4] = {}, wb_rssi[4] = {};
551 u8 path = an->anb[an->cur_band].chainmask;
552 u8 path_num = __builtin_popcount(path);
553 u8 *buf = hdr->data + 2;
554 int i, rem;
555
556 *(u32 *)buf = type_num;
557 buf += sizeof(type_num);
558
559#define RX_PUT_HDR(_hdr, _type, _val, _size) do { \
560 _hdr->type = htonl(_type); \
561 _hdr->val = htonl(_val); \
562 _hdr->len = htonl(_size); \
563 buf += sizeof(*_hdr); \
564 } while (0)
565
566 rx_hdr = (struct atenl_rx_info_hdr *)buf;
567 RX_PUT_HDR(rx_hdr, 0, BIT(an->cur_band), sizeof(*rx_band));
568 rx_band = (struct atenl_rx_info_band *)buf;
569 buf += sizeof(*rx_band);
570
571 rx_hdr = (struct atenl_rx_info_hdr *)buf;
572 RX_PUT_HDR(rx_hdr, 1, path, path_num * sizeof(*rx_path));
573 rx_path = (struct atenl_rx_info_path *)buf;
574 buf += path_num * sizeof(*rx_path);
575
576 rx_hdr = (struct atenl_rx_info_hdr *)buf;
577 RX_PUT_HDR(rx_hdr, 2, GENMASK(15, 0), 16 * sizeof(*rx_user));
578 rx_user = (struct atenl_rx_info_user *)buf;
579 buf += 16 * sizeof(*rx_user);
580
581 rx_hdr = (struct atenl_rx_info_hdr *)buf;
582 RX_PUT_HDR(rx_hdr, 3, BIT(0), sizeof(*rx_comm));
583 rx_comm = (struct atenl_rx_info_comm *)buf;
584 buf += sizeof(*rx_comm);
585
586 hdr->len = htons(buf - hdr->data);
587
588 nl_attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
589 if (!nl_attr) {
developer5698c9c2022-05-30 16:40:23 +0800590 atenl_err("Testdata attribute not found\n");
developer3abe1ad2022-01-24 11:13:32 +0800591 return NL_SKIP;
592 }
593
594 nla_parse_nested(tb1, MT76_TM_ATTR_MAX, nl_attr, testdata_policy);
595 nla_parse_nested(tb2, MT76_TM_STATS_ATTR_MAX,
596 tb1[MT76_TM_ATTR_STATS], stats_policy);
597
598 rx_cur.total = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_PACKETS]);
599 rx_cur.err_cnt = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_FCS_ERROR]);
600 rx_cur.len_mismatch = nla_get_u64(tb2[MT76_TM_STATS_ATTR_RX_LEN_MISMATCH]);
601 rx_cur.ok_cnt = rx_cur.total - rx_cur.err_cnt - rx_cur.len_mismatch;
602
developer5698c9c2022-05-30 16:40:23 +0800603 if (!anb->reset_rx_cnt ||
604 get_band_val(an, an->cur_band, cur_state) == MT76_TM_STATE_RX_FRAMES) {
developer3abe1ad2022-01-24 11:13:32 +0800605#define RX_COUNT_DIFF(_field) \
developer5698c9c2022-05-30 16:40:23 +0800606 rx_diff._field = (rx_cur._field) - (anb->rx_stat._field);
developer3abe1ad2022-01-24 11:13:32 +0800607 RX_COUNT_DIFF(total);
608 RX_COUNT_DIFF(err_cnt);
609 RX_COUNT_DIFF(len_mismatch);
610 RX_COUNT_DIFF(ok_cnt);
611#undef RX_COUNT_DIFF
612
613 memcpy(&anb->rx_stat, &rx_cur, sizeof(anb->rx_stat));
614 }
615
616 rx_band->mac_rx_mdrdy_cnt = htonl((u32)rx_diff.total);
617 rx_band->mac_rx_fcs_err_cnt = htonl((u32)rx_diff.err_cnt);
618 rx_band->mac_rx_fcs_ok_cnt = htonl((u32)rx_diff.ok_cnt);
619 rx_band->mac_rx_len_mismatch = htonl((u32)rx_diff.len_mismatch);
620 rx_user->fcs_error_cnt = htonl((u32)rx_diff.err_cnt);
621
622 nla_parse_nested(tb3, MT76_TM_RX_ATTR_MAX,
623 tb2[MT76_TM_STATS_ATTR_LAST_RX], rx_policy);
624
625 rx_user->freq_offset = htonl(nla_get_u32(tb3[MT76_TM_RX_ATTR_FREQ_OFFSET]));
626 rx_user->snr = htonl(nla_get_u8(tb3[MT76_TM_RX_ATTR_SNR]));
627
628 i = 0;
629 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_RCPI], rem) {
630 if (nla_len(cur) != 1 || i >= 4)
631 break;
632
633 rcpi[i++] = nla_get_u8(cur);
634 }
635
636 i = 0;
637 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_IB_RSSI], rem) {
638 if (nla_len(cur) != 1 || i >= 4)
639 break;
640
641 ib_rssi[i++] = (s8)nla_get_u8(cur);
642 }
643
644 i = 0;
645 nla_for_each_nested(cur, tb3[MT76_TM_RX_ATTR_WB_RSSI], rem) {
646 if (nla_len(cur) != 1 || i >= 4)
647 break;
648
649 wb_rssi[i++] = (s8)nla_get_u8(cur);
650 }
651
652 for (i = 0; i < 4; i++) {
653 struct atenl_rx_info_path *path = &rx_path[i];
654
655 path->rcpi = htonl(rcpi[i]);
656 path->rssi = htonl(to_rssi((u8)rcpi[i]));
657 path->fagc_ib_rssi = htonl(ib_rssi[i]);
658 path->fagc_wb_rssi = htonl(wb_rssi[i]);
659 }
660
661 return NL_SKIP;
662}
663
664static int atenl_nl_get_rx_info(struct atenl *an, struct atenl_data *data,
665 struct atenl_nl_priv *nl_priv)
666{
667 struct nl_msg *msg = nl_priv->msg;
668 void *ptr;
669
670 nl_priv->an = an;
671 nl_priv->res = (void *)data;
672
673 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
674 if (!ptr)
675 return -ENOMEM;
676
677 nla_put_flag(msg, MT76_TM_ATTR_STATS);
678
679 nla_nest_end(msg, ptr);
680
681 return unl_genl_request(&nl_priv->unl, msg, atenl_nl_get_rx_info_cb,
682 (void *)nl_priv);
683}
684
685static int
686atenl_nl_set_ru(struct atenl *an, struct atenl_data *data,
687 struct atenl_nl_priv *nl_priv)
688{
689 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
690 struct nl_msg *msg;
691 u32 *v = (u32 *)(hdr->data + 4);
692 u32 seg0_num = ntohl(v[0]); /* v[1] seg1_num unused */
693 void *ptr;
694 int i, ret;
695
696 if (seg0_num > 8)
697 return -EINVAL;
698
699 for (i = 0, v = &v[2]; i < seg0_num; i++, v += 11) {
700 u32 ru_alloc = ntohl(v[1]);
701 u32 aid = ntohl(v[2]);
702 u32 ru_idx = ntohl(v[3]);
703 u32 mcs = ntohl(v[4]);
704 u32 ldpc = ntohl(v[5]);
705 u32 nss = ntohl(v[6]);
706 u32 tx_length = ntohl(v[8]);
707 char buf[10];
708
709 if (unl_genl_init(&nl_priv->unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +0800710 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +0800711 return 2;
712 }
713
714 msg = unl_genl_msg(&nl_priv->unl, NL80211_CMD_TESTMODE, false);
715 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
716
717 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
718 if (!ptr)
719 return -ENOMEM;
720
721 if (i == 0)
722 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_IDLE);
723
724 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
725 nla_put_u8(msg, MT76_TM_ATTR_RU_IDX, ru_idx);
726 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, mcs);
727 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, ldpc);
728 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, nss);
developer93dadcc2022-07-13 10:25:35 +0800729 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, tx_length);
developer3abe1ad2022-01-24 11:13:32 +0800730
731 ret = snprintf(buf, sizeof(buf), "%x", ru_alloc);
732 if (snprintf_error(sizeof(buf), ret))
733 return -EINVAL;
734
735 nla_put_u8(msg, MT76_TM_ATTR_RU_ALLOC, strtol(buf, NULL, 2));
736
737 nla_nest_end(msg, ptr);
738
739 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
740
741 unl_free(&nl_priv->unl);
742 }
743
744 return 0;
745}
746
developer5698c9c2022-05-30 16:40:23 +0800747static int
748atenl_nl_ibf_init(struct atenl *an, u8 band)
749{
750 struct atenl_nl_priv nl_priv = {};
751 struct nl_msg *msg;
752 void *ptr, *a;
753 int ret;
754
755 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
756 atenl_err("Failed to connect to nl80211\n");
757 return 2;
758 }
759
760 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
761 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
762
763 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
764 if (!ptr) {
765 ret = -ENOMEM;
766 goto out;
767 }
768
769 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, MT76_TM_TX_MODE_HT);
770 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, an->ibf_mcs);
771 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, an->ibf_ant);
772 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_INIT);
773
774 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
775 if (!a) {
776 ret = -ENOMEM;
777 goto out;
778 }
779 nla_put_u16(msg, 0, 1);
780 nla_nest_end(msg, a);
781
782 nla_nest_end(msg, ptr);
783
784 ret = unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
785
786out:
787 unl_free(&nl_priv.unl);
788 return ret;
789}
790
791static int
792atenl_nl_ibf_e2p_update(struct atenl *an)
793{
794 struct atenl_nl_priv nl_priv = {};
795 struct nl_msg *msg;
796 void *ptr, *a;
797 int ret;
798
799 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
800 atenl_err("Failed to connect to nl80211\n");
801 return 2;
802 }
803
804 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
805 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
806
807 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
808 if (!ptr) {
809 ret = -ENOMEM;
810 goto out;
811 }
812
813 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_E2P_UPDATE);
814 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
815 if (!a) {
816 ret = -ENOMEM;
817 goto out;
818 }
819 nla_put_u16(msg, 0, 0);
820 nla_nest_end(msg, a);
821
822 nla_nest_end(msg, ptr);
823
824 ret = unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
825
826out:
827 unl_free(&nl_priv.unl);
828 return ret;
829}
830
developer44ae8e92023-07-21 13:42:14 +0800831void
developer5698c9c2022-05-30 16:40:23 +0800832atenl_get_ibf_cal_result(struct atenl *an)
833{
834 u16 offset;
835
836 if (an->adie_id == 0x7975)
837 offset = 0x651;
838 else if (an->adie_id == 0x7976)
839 offset = 0x60a;
840
841 /* per group size = 40, for group 0-8 */
842 atenl_eeprom_read_from_driver(an, offset, 40 * 9);
843}
844
845static int
846atenl_nl_ibf_set_val(struct atenl *an, struct atenl_data *data,
847 struct atenl_nl_priv *nl_priv)
developer3abe1ad2022-01-24 11:13:32 +0800848{
849#define MT_IBF(_act) MT76_TM_TXBF_ACT_##_act
850 static const u8 bf_act_map[] = {
851 [TXBF_ACT_IBF_PHASE_COMP] = MT_IBF(PHASE_COMP),
852 [TXBF_ACT_IBF_PROF_UPDATE] = MT_IBF(IBF_PROF_UPDATE),
853 [TXBF_ACT_EBF_PROF_UPDATE] = MT_IBF(EBF_PROF_UPDATE),
854 [TXBF_ACT_IBF_PHASE_CAL] = MT_IBF(PHASE_CAL),
855 };
856#undef MT_IBF
857 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
858 struct nl_msg *msg = nl_priv->msg;
859 u32 *v = (u32 *)(hdr->data + 4);
860 u32 action = ntohl(v[0]);
developerf9843e22022-09-13 10:57:15 +0800861 u16 val[8], is_atenl = 1;
developer5698c9c2022-05-30 16:40:23 +0800862 u8 tmp_ant;
developer3abe1ad2022-01-24 11:13:32 +0800863 void *ptr, *a;
864 char cmd[64];
865 int i;
866
867 for (i = 0; i < 8; i++)
868 val[i] = ntohl(v[i + 1]);
869
870 atenl_dbg("%s: action = %u, val = %u, %u, %u, %u, %u\n",
871 __func__, action, val[0], val[1], val[2], val[3], val[4]);
872
873 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
874 if (!ptr)
875 return -ENOMEM;
876
877 switch (action) {
developer3abe1ad2022-01-24 11:13:32 +0800878 case TXBF_ACT_CHANNEL:
developer5698c9c2022-05-30 16:40:23 +0800879 an->cur_band = val[1];
880 /* a sanity to prevent script band idx error */
881 if (val[0] > 14)
882 an->cur_band = 1;
883 atenl_nl_ibf_init(an, an->cur_band);
884 atenl_set_channel(an, 0, an->cur_band, val[0], 0, 0);
885
886 nla_put_u8(msg, MT76_TM_ATTR_AID, 0);
developer3abe1ad2022-01-24 11:13:32 +0800887 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_UPDATE_CH);
888 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
889 if (!a)
890 return -ENOMEM;
891 nla_put_u16(msg, 0, 0);
892 nla_nest_end(msg, a);
893 break;
894 case TXBF_ACT_MCS:
developer5698c9c2022-05-30 16:40:23 +0800895 tmp_ant = (1 << DIV_ROUND_UP(val[0], 8)) - 1 ?: 1;
896 /* sometimes the correct band idx will be set after this action,
897 * so maintain a temp variable to allow mcs update in anthor action.
898 */
899 an->ibf_mcs = val[0];
900 an->ibf_ant = tmp_ant;
901 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, an->ibf_mcs);
902 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, an->ibf_ant);
developer3abe1ad2022-01-24 11:13:32 +0800903 break;
904 case TXBF_ACT_TX_ANT:
905 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, val[0]);
906 break;
907 case TXBF_ACT_RX_START:
908 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_RX_FRAMES);
909 break;
910 case TXBF_ACT_RX_ANT:
911 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, val[0]);
912 break;
913 case TXBF_ACT_TX_PKT:
914 nla_put_u8(msg, MT76_TM_ATTR_AID, val[1]);
developer3abe1ad2022-01-24 11:13:32 +0800915 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_TX_PREP);
developer77215642023-05-15 13:52:35 +0800916 if (!val[2])
917 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, 0xFFFFFFFF);
918 else
919 nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, val[2]);
developer93dadcc2022-07-13 10:25:35 +0800920 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, 1024);
developer3abe1ad2022-01-24 11:13:32 +0800921 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
922 if (!a)
923 return -ENOMEM;
924
925 for (i = 0; i < 5; i++)
926 nla_put_u16(msg, i, val[i]);
927 nla_nest_end(msg, a);
928
929 atenl_set_attr_state(an, msg, an->cur_band, MT76_TM_STATE_TX_FRAMES);
930 break;
931 case TXBF_ACT_IBF_PHASE_COMP:
developer5698c9c2022-05-30 16:40:23 +0800932 nla_put_u8(msg, MT76_TM_ATTR_AID, 1);
developer3abe1ad2022-01-24 11:13:32 +0800933 case TXBF_ACT_IBF_PROF_UPDATE:
934 case TXBF_ACT_EBF_PROF_UPDATE:
935 case TXBF_ACT_IBF_PHASE_CAL:
936 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, bf_act_map[action]);
937 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
938 if (!a)
939 return -ENOMEM;
developerf9843e22022-09-13 10:57:15 +0800940 /* Note: litepoint may send random number for lna_gain_level, reset to 0 */
941 if (action == TXBF_ACT_IBF_PHASE_CAL)
942 val[4] = 0;
developer3abe1ad2022-01-24 11:13:32 +0800943 for (i = 0; i < 5; i++)
944 nla_put_u16(msg, i, val[i]);
developerf9843e22022-09-13 10:57:15 +0800945 /* Used to distinguish between command mode and HQADLL mode */
946 nla_put_u16(msg, 5, is_atenl);
developer3abe1ad2022-01-24 11:13:32 +0800947 nla_nest_end(msg, a);
948 break;
949 case TXBF_ACT_IBF_PHASE_E2P_UPDATE:
developer5698c9c2022-05-30 16:40:23 +0800950 atenl_nl_ibf_e2p_update(an);
951 atenl_get_ibf_cal_result(an);
developer3abe1ad2022-01-24 11:13:32 +0800952
953 nla_put_u8(msg, MT76_TM_ATTR_AID, 0);
954 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_INIT);
955
956 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
957 if (!a)
958 return -ENOMEM;
959 nla_put_u16(msg, 0, 0);
960 nla_nest_end(msg, a);
961 break;
developer5698c9c2022-05-30 16:40:23 +0800962 case TXBF_ACT_INIT:
963 case TXBF_ACT_POWER:
developer3abe1ad2022-01-24 11:13:32 +0800964 default:
965 break;
966 }
967
968 nla_nest_end(msg, ptr);
969
developer5698c9c2022-05-30 16:40:23 +0800970 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +0800971
972 return unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
973}
974
975static int
976atenl_nl_ibf_get_status(struct atenl *an, struct atenl_data *data,
977 struct atenl_nl_priv *nl_priv)
978{
979 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
980 u32 status = htonl(1);
981
developer5698c9c2022-05-30 16:40:23 +0800982 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +0800983 memcpy(hdr->data + 6, &status, 4);
984
985 return 0;
986}
987
988static int
989atenl_nl_ibf_profile_update_all(struct atenl *an, struct atenl_data *data,
990 struct atenl_nl_priv *nl_priv)
991{
992 struct atenl_cmd_hdr *hdr = atenl_hdr(data);
993 struct nl_msg *msg;
994 void *ptr, *a;
995 u32 *v = (u32 *)(hdr->data + 4);
996 u16 pfmu_idx = ntohl(v[0]);
997 int i;
998
999 for (i = 0, v = &v[5]; i < 64; i++, v += 5) {
1000 int j;
1001
1002 if (unl_genl_init(&nl_priv->unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001003 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001004 return 2;
1005 }
1006
1007 msg = unl_genl_msg(&nl_priv->unl, NL80211_CMD_TESTMODE, false);
1008 nla_put_u32(msg, NL80211_ATTR_WIPHY,
1009 get_band_val(an, an->cur_band, phy_idx));
1010
1011 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1012 if (!ptr)
1013 return -ENOMEM;
1014
1015 nla_put_u8(msg, MT76_TM_ATTR_TXBF_ACT, MT76_TM_TXBF_ACT_PROF_UPDATE_ALL);
1016 a = nla_nest_start(msg, MT76_TM_ATTR_TXBF_PARAM);
1017 if (!a)
1018 return -ENOMEM;
1019 nla_put_u16(msg, 0, pfmu_idx);
1020
1021 for (j = 0; j < 5; j++)
1022 nla_put_u16(msg, j + 1, ntohl(v[j]));
1023 nla_nest_end(msg, a);
1024
1025 nla_nest_end(msg, ptr);
1026
1027 unl_genl_request(&nl_priv->unl, msg, NULL, NULL);
1028
1029 unl_free(&nl_priv->unl);
1030 }
1031
developer5698c9c2022-05-30 16:40:23 +08001032 *(u32 *)(hdr->data + 2) = data->ext_id;
developer3abe1ad2022-01-24 11:13:32 +08001033
1034 return 0;
1035}
1036
1037#define NL_OPS_GROUP(cmd, ...) [HQA_CMD_##cmd] = { __VA_ARGS__ }
1038static const struct atenl_nl_ops nl_ops[] = {
1039 NL_OPS_GROUP(SET_TX_PATH, .set=MT76_TM_ATTR_TX_ANTENNA),
1040 NL_OPS_GROUP(SET_TX_POWER, .set=MT76_TM_ATTR_TX_POWER),
1041 NL_OPS_GROUP(SET_RX_PATH, .set=MT76_TM_ATTR_TX_ANTENNA),
1042 NL_OPS_GROUP(SET_FREQ_OFFSET, .set=MT76_TM_ATTR_FREQ_OFFSET),
1043 NL_OPS_GROUP(SET_CFG, .ops=atenl_nl_set_cfg),
1044 NL_OPS_GROUP(SET_TSSI, .ops=atenl_nl_set_cfg),
1045 NL_OPS_GROUP(CONTINUOUS_TX, .ops=atenl_nl_continuous_tx),
1046 NL_OPS_GROUP(GET_TX_INFO, .dump=MT76_TM_STATS_ATTR_TX_DONE),
1047 NL_OPS_GROUP(GET_RX_INFO, .ops=atenl_nl_get_rx_info, .dump=true),
1048 NL_OPS_GROUP(SET_RU, .ops=atenl_nl_set_ru),
1049};
1050#undef NL_OPS_GROUP
1051
1052#define NL_OPS_EXT(cmd, ...) [HQA_EXT_CMD_##cmd] = { __VA_ARGS__ }
1053static const struct atenl_nl_ops nl_ops_ext[] = {
1054 NL_OPS_EXT(SET_TX, .ops=atenl_nl_set_tx),
1055 NL_OPS_EXT(START_TX, .ops=atenl_nl_tx),
1056 NL_OPS_EXT(STOP_TX, .ops=atenl_nl_tx),
1057 NL_OPS_EXT(START_RX, .ops=atenl_nl_rx),
1058 NL_OPS_EXT(STOP_RX, .ops=atenl_nl_rx),
1059 NL_OPS_EXT(OFF_CH_SCAN, .ops=atenl_off_ch_scan),
1060 NL_OPS_EXT(IBF_SET_VAL, .ops=atenl_nl_ibf_set_val),
1061 NL_OPS_EXT(IBF_GET_STATUS, .ops=atenl_nl_ibf_get_status),
1062 NL_OPS_EXT(IBF_PROF_UPDATE_ALL, .ops=atenl_nl_ibf_profile_update_all),
1063};
1064#undef NL_OPS_EXT
1065
1066int atenl_nl_process(struct atenl *an, struct atenl_data *data)
1067{
1068 struct atenl_nl_priv nl_priv = {};
1069 const struct atenl_nl_ops *ops;
1070 struct nl_msg *msg;
1071 int ret = 0;
1072
1073 if (data->ext_cmd != 0)
1074 ops = &nl_ops_ext[data->ext_cmd];
1075 else
1076 ops = &nl_ops[data->cmd];
1077
1078 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001079 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001080 return -1;
1081 }
1082
1083 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, !!ops->dump);
1084 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
1085 nl_priv.msg = msg;
1086
1087 if (ops->ops) {
1088 ret = ops->ops(an, data, &nl_priv);
1089 } else if (ops->dump) {
1090 nl_priv.attr = ops->dump;
1091 ret = atenl_nl_dump_attr(an, data, &nl_priv);
1092 } else {
1093 nl_priv.attr = ops->set;
1094 ret = atenl_nl_set_attr(an, data, &nl_priv);
1095 }
1096
1097 if (ret)
developer5698c9c2022-05-30 16:40:23 +08001098 atenl_err("command process error: 0x%x (0x%x)\n", data->cmd_id, data->ext_id);
developer3abe1ad2022-01-24 11:13:32 +08001099
1100 unl_free(&nl_priv.unl);
1101
1102 return ret;
1103}
1104
1105int atenl_nl_process_many(struct atenl *an, struct atenl_data *data)
1106{
1107 struct atenl_nl_priv nl_priv = {};
1108 const struct atenl_nl_ops *ops;
1109 int ret = 0;
1110
1111 if (data->ext_cmd != 0)
1112 ops = &nl_ops_ext[data->ext_cmd];
1113 else
1114 ops = &nl_ops[data->cmd];
1115
1116 if (ops->ops)
1117 ret = ops->ops(an, data, &nl_priv);
1118
1119 return ret;
1120}
1121
1122int atenl_nl_set_state(struct atenl *an, u8 band,
1123 enum mt76_testmode_state state)
1124{
1125 struct atenl_nl_priv nl_priv = {};
1126 struct nl_msg *msg;
1127 void *ptr;
1128
1129 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001130 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001131 return 2;
1132 }
1133
1134 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1135 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
1136
1137 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1138 if (!ptr)
1139 return -ENOMEM;
1140
1141 atenl_set_attr_state(an, msg, band, state);
1142
1143 nla_nest_end(msg, ptr);
1144
1145 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1146
1147 unl_free(&nl_priv.unl);
1148
1149 return 0;
1150}
1151
developer5698c9c2022-05-30 16:40:23 +08001152int atenl_nl_set_aid(struct atenl *an, u8 band, u8 aid)
1153{
1154 struct atenl_nl_priv nl_priv = {};
1155 struct nl_msg *msg;
1156 void *ptr;
1157
1158 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1159 atenl_err("Failed to connect to nl80211\n");
1160 return 2;
1161 }
1162
1163 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1164 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, band, phy_idx));
1165
1166 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1167 if (!ptr)
1168 return -ENOMEM;
1169
1170 nla_put_u8(msg, MT76_TM_ATTR_AID, aid);
1171
1172 nla_nest_end(msg, ptr);
1173
1174 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1175
1176 unl_free(&nl_priv.unl);
1177
1178 return 0;
1179}
1180
developer3abe1ad2022-01-24 11:13:32 +08001181static int atenl_nl_check_mtd_cb(struct nl_msg *msg, void *arg)
1182{
1183 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
1184 struct atenl *an = nl_priv->an;
1185 struct nlattr *tb[NUM_MT76_TM_ATTRS];
1186 struct nlattr *attr;
1187
1188 attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
1189 if (!attr)
1190 return NL_SKIP;
1191
1192 nla_parse_nested(tb, MT76_TM_ATTR_MAX, attr, testdata_policy);
1193 if (!tb[MT76_TM_ATTR_MTD_PART] || !tb[MT76_TM_ATTR_MTD_OFFSET])
1194 return NL_SKIP;
1195
1196 an->mtd_part = strdup(nla_get_string(tb[MT76_TM_ATTR_MTD_PART]));
1197 an->mtd_offset = nla_get_u32(tb[MT76_TM_ATTR_MTD_OFFSET]);
developerf90c9af2022-12-28 22:40:23 +08001198 an->band_idx = nla_get_u32(tb[MT76_TM_ATTR_BAND_IDX]);
developer3abe1ad2022-01-24 11:13:32 +08001199
1200 return NL_SKIP;
1201}
1202
1203int atenl_nl_check_mtd(struct atenl *an)
1204{
1205 struct atenl_nl_priv nl_priv = { .an = an };
1206 struct nl_msg *msg;
1207
1208 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001209 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001210 return 2;
1211 }
1212
1213 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, true);
1214 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1215 unl_genl_request(&nl_priv.unl, msg, atenl_nl_check_mtd_cb, (void *)&nl_priv);
1216
1217 unl_free(&nl_priv.unl);
1218
1219 return 0;
1220}
1221
1222int atenl_nl_write_eeprom(struct atenl *an, u32 offset, u8 *val, int len)
1223{
1224 struct atenl_nl_priv nl_priv = {};
1225 struct nl_msg *msg;
1226 void *ptr, *a;
1227 int i;
1228
1229 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001230 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001231 return 2;
1232 }
1233
1234 if (len > 16)
1235 return -EINVAL;
1236
1237 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1238 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1239
1240 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1241 if (!ptr)
1242 return -ENOMEM;
1243
1244 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1245 MT76_TM_EEPROM_ACTION_UPDATE_DATA);
1246 nla_put_u32(msg, MT76_TM_ATTR_EEPROM_OFFSET, offset);
1247
1248 a = nla_nest_start(msg, MT76_TM_ATTR_EEPROM_VAL);
1249 if (!a)
1250 return -ENOMEM;
1251
1252 for (i = 0; i < len; i++)
1253 if (nla_put_u8(msg, i, val[i]))
1254 goto out;
1255
1256 nla_nest_end(msg, a);
1257
1258 nla_nest_end(msg, ptr);
1259
1260 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1261
1262 unl_free(&nl_priv.unl);
1263
1264out:
1265 return 0;
1266}
1267
developer9b7cdad2022-03-10 14:24:55 +08001268int atenl_nl_write_efuse_all(struct atenl *an)
developer3abe1ad2022-01-24 11:13:32 +08001269{
1270 struct atenl_nl_priv nl_priv = {};
1271 struct nl_msg *msg;
1272 void *ptr;
1273
1274 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001275 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001276 return 2;
1277 }
1278
1279 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1280 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1281
1282 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1283 if (!ptr)
1284 return -ENOMEM;
1285
1286 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1287 MT76_TM_EEPROM_ACTION_WRITE_TO_EFUSE);
1288
1289 nla_nest_end(msg, ptr);
1290
1291 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1292
1293 unl_free(&nl_priv.unl);
1294
1295 return 0;
1296}
1297
1298int atenl_nl_update_buffer_mode(struct atenl *an)
1299{
1300 struct atenl_nl_priv nl_priv = {};
1301 struct nl_msg *msg;
1302 void *ptr;
1303
1304 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
developer5698c9c2022-05-30 16:40:23 +08001305 atenl_err("Failed to connect to nl80211\n");
developer3abe1ad2022-01-24 11:13:32 +08001306 return 2;
1307 }
1308
1309 msg = unl_genl_msg(&nl_priv.unl, NL80211_CMD_TESTMODE, false);
1310 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, 0, phy_idx));
1311
1312 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1313 if (!ptr)
1314 return -ENOMEM;
1315
1316 nla_put_u8(msg, MT76_TM_ATTR_EEPROM_ACTION,
1317 MT76_TM_EEPROM_ACTION_UPDATE_BUFFER_MODE);
1318
1319 nla_nest_end(msg, ptr);
1320
1321 unl_genl_request(&nl_priv.unl, msg, NULL, NULL);
1322
1323 unl_free(&nl_priv.unl);
1324
1325 return 0;
1326}
1327
developer071927d2022-08-31 20:39:29 +08001328static int atenl_nl_precal_sync_from_driver_cb(struct nl_msg *msg, void *arg)
1329{
1330 struct atenl_nl_priv *nl_priv = (struct atenl_nl_priv *)arg;
1331 struct atenl *an = nl_priv->an;
1332 struct nlattr *tb[NUM_MT76_TM_ATTRS];
1333 struct nlattr *attr, *cur;
1334 int i, rem, prek_offset = nl_priv->attr;
1335
1336
1337 attr = unl_find_attr(&nl_priv->unl, msg, NL80211_ATTR_TESTDATA);
1338 if (!attr)
1339 return NL_SKIP;
1340
1341 nla_parse_nested(tb, MT76_TM_ATTR_MAX, attr, testdata_policy);
1342
1343 if (!tb[MT76_TM_ATTR_PRECAL_INFO] && !tb[MT76_TM_ATTR_PRECAL]) {
1344 atenl_info("No Pre cal data or info!\n");
1345 return NL_SKIP;
1346 }
1347
1348 if (tb[MT76_TM_ATTR_PRECAL_INFO]) {
1349 i = 0;
1350 nla_for_each_nested(cur, tb[MT76_TM_ATTR_PRECAL_INFO], rem) {
1351 an->cal_info[i] = (u32) nla_get_u32(cur);
1352 i++;
1353 }
1354 return NL_SKIP;
1355 }
1356
1357 if (tb[MT76_TM_ATTR_PRECAL] && an->cal) {
1358 i = prek_offset;
1359 nla_for_each_nested(cur, tb[MT76_TM_ATTR_PRECAL], rem) {
1360 an->cal[i] = (u8) nla_get_u8(cur);
1361 i++;
1362 }
1363 return NL_SKIP;
1364 }
1365 atenl_info("No data found for pre-cal!\n");
1366
1367 return NL_SKIP;
1368}
1369
1370static int
1371atenl_nl_precal_sync_partition(struct atenl_nl_priv *nl_priv, enum mt76_testmode_attr attr,
1372 int prek_type, int prek_offset)
1373{
1374 int ret;
1375 void *ptr;
1376 struct nl_msg *msg;
1377 struct atenl *an = nl_priv->an;
1378
1379 msg = unl_genl_msg(&(nl_priv->unl), NL80211_CMD_TESTMODE, true);
1380 nla_put_u32(msg, NL80211_ATTR_WIPHY, get_band_val(an, an->cur_band, phy_idx));
1381 nl_priv->msg = msg;
1382 nl_priv->attr = prek_offset;
1383
1384 ptr = nla_nest_start(msg, NL80211_ATTR_TESTDATA);
1385 if (!ptr)
1386 return -ENOMEM;
1387
1388 nla_put_flag(msg, attr);
1389 if (attr == MT76_TM_ATTR_PRECAL)
1390 nla_put_u8(msg, MT76_TM_ATTR_PRECAL_INFO, prek_type);
1391 nla_nest_end(msg, ptr);
1392
1393 ret = unl_genl_request(&(nl_priv->unl), msg, atenl_nl_precal_sync_from_driver_cb, (void *)nl_priv);
1394
1395 if (ret) {
1396 atenl_err("command process error!\n");
1397 return ret;
1398 }
1399
1400 return 0;
1401}
1402
1403int atenl_nl_precal_sync_from_driver(struct atenl *an, enum prek_ops ops)
1404{
developer11f4a0b2023-03-31 17:43:25 +08001405#define GROUP_IND_MASK BIT(0)
1406#define GROUP_IND_MASK_7996 GENMASK(2, 0)
1407#define DPD_IND_MASK GENMASK(3, 1)
1408#define DPD_IND_MASK_7996 GENMASK(5, 3)
developer071927d2022-08-31 20:39:29 +08001409 int ret;
1410 u32 i, times, group_size, dpd_size, total_size, transmit_size, offs;
developer11f4a0b2023-03-31 17:43:25 +08001411 u32 dpd_per_chan_size, dpd_chan_ratio[3], total_ratio;
1412 u32 size, base, base_idx, dpd_base_map, *size_ptr;
1413 u8 cal_indicator, group_ind_mask, dpd_ind_mask, *precal_info;
developer071927d2022-08-31 20:39:29 +08001414 struct atenl_nl_priv nl_priv = { .an = an };
1415
1416 offs = an->eeprom_prek_offs;
1417 cal_indicator = an->eeprom_data[offs];
developer11f4a0b2023-03-31 17:43:25 +08001418 group_ind_mask = is_mt7996(an) ? GROUP_IND_MASK_7996 : GROUP_IND_MASK;
1419 dpd_ind_mask = is_mt7996(an) ? DPD_IND_MASK_7996 : DPD_IND_MASK;
developer071927d2022-08-31 20:39:29 +08001420
1421 if (cal_indicator) {
1422 precal_info = an->eeprom_data + an->eeprom_size;
1423 memcpy(an->cal_info, precal_info, PRE_CAL_INFO);
1424 group_size = an->cal_info[0];
1425 dpd_size = an->cal_info[1];
1426 total_size = group_size + dpd_size;
developer11f4a0b2023-03-31 17:43:25 +08001427 dpd_chan_ratio[0] = (an->cal_info[2] >> DPD_INFO_6G_SHIFT) &
1428 DPD_INFO_MASK;
1429 dpd_chan_ratio[1] = (an->cal_info[2] >> DPD_INFO_5G_SHIFT) &
1430 DPD_INFO_MASK;
1431 dpd_chan_ratio[2] = (an->cal_info[2] >> DPD_INFO_2G_SHIFT) &
1432 DPD_INFO_MASK;
1433 dpd_per_chan_size = (an->cal_info[2] >> DPD_INFO_CH_SHIFT) &
1434 DPD_INFO_MASK;
1435 total_ratio = dpd_chan_ratio[0] + dpd_chan_ratio[1] +
1436 dpd_chan_ratio[2];
developer071927d2022-08-31 20:39:29 +08001437 }
1438
1439 switch (ops){
1440 case PREK_SYNC_ALL:
1441 size_ptr = &total_size;
1442 base_idx = 0;
developer11f4a0b2023-03-31 17:43:25 +08001443 dpd_base_map = 0;
developer071927d2022-08-31 20:39:29 +08001444 goto start;
1445 case PREK_SYNC_GROUP:
1446 size_ptr = &group_size;
1447 base_idx = 0;
developer11f4a0b2023-03-31 17:43:25 +08001448 dpd_base_map = 0;
developer071927d2022-08-31 20:39:29 +08001449 goto start;
1450 case PREK_SYNC_DPD_6G:
1451 size_ptr = &dpd_size;
1452 base_idx = 0;
developer11f4a0b2023-03-31 17:43:25 +08001453 dpd_base_map = is_mt7996(an) ? GENMASK(2, 1) : 0;
developer071927d2022-08-31 20:39:29 +08001454 goto start;
1455 case PREK_SYNC_DPD_5G:
1456 size_ptr = &dpd_size;
1457 base_idx = 1;
developer11f4a0b2023-03-31 17:43:25 +08001458 dpd_base_map = is_mt7996(an) ? BIT(2) : BIT(0);
developer071927d2022-08-31 20:39:29 +08001459 goto start;
1460 case PREK_SYNC_DPD_2G:
1461 size_ptr = &dpd_size;
1462 base_idx = 2;
developer11f4a0b2023-03-31 17:43:25 +08001463 dpd_base_map = is_mt7996(an) ? 0 : GENMASK(1, 0);
developer071927d2022-08-31 20:39:29 +08001464
1465start:
1466 if (unl_genl_init(&nl_priv.unl, "nl80211") < 0) {
1467 atenl_err("Failed to connect to nl80211\n");
1468 return 2;
1469 }
1470
1471 ret = atenl_nl_precal_sync_partition(&nl_priv, MT76_TM_ATTR_PRECAL_INFO, 0, 0);
1472 if (ret || !an->cal_info)
1473 goto out;
1474
1475 group_size = an->cal_info[0];
1476 dpd_size = an->cal_info[1];
1477 total_size = group_size + dpd_size;
developer11f4a0b2023-03-31 17:43:25 +08001478 dpd_chan_ratio[0] = (an->cal_info[2] >> DPD_INFO_6G_SHIFT) &
1479 DPD_INFO_MASK;
1480 dpd_chan_ratio[1] = (an->cal_info[2] >> DPD_INFO_5G_SHIFT) &
1481 DPD_INFO_MASK;
1482 dpd_chan_ratio[2] = (an->cal_info[2] >> DPD_INFO_2G_SHIFT) &
1483 DPD_INFO_MASK;
1484 dpd_per_chan_size = (an->cal_info[2] >> DPD_INFO_CH_SHIFT) &
1485 DPD_INFO_MASK;
1486 total_ratio = dpd_chan_ratio[0] + dpd_chan_ratio[1] +
1487 dpd_chan_ratio[2];
developer071927d2022-08-31 20:39:29 +08001488 transmit_size = an->cal_info[3];
1489
1490 size = *size_ptr;
developer11f4a0b2023-03-31 17:43:25 +08001491 if (size_ptr == &dpd_size)
1492 size = size / total_ratio * dpd_chan_ratio[base_idx];
1493
developer071927d2022-08-31 20:39:29 +08001494 base = 0;
developer11f4a0b2023-03-31 17:43:25 +08001495 for (i = 0; i < 3; i++) {
1496 if (dpd_base_map & BIT(i))
1497 base += dpd_chan_ratio[i] * dpd_per_chan_size *
1498 MT_EE_CAL_UNIT;
developer071927d2022-08-31 20:39:29 +08001499 }
1500 base += (size_ptr == &dpd_size) ? group_size : 0;
1501
1502 if (!an->cal)
1503 an->cal = (u8 *) calloc(size, sizeof(u8));
1504 times = size / transmit_size + 1;
1505 for (i = 0; i < times; i++) {
1506 ret = atenl_nl_precal_sync_partition(&nl_priv, MT76_TM_ATTR_PRECAL, ops,
1507 i * transmit_size);
1508 if (ret)
1509 goto out;
1510 }
1511
1512 ret = atenl_eeprom_update_precal(an, base, size);
1513 break;
1514 case PREK_CLEAN_GROUP:
developer11f4a0b2023-03-31 17:43:25 +08001515 if (!(cal_indicator & group_ind_mask))
developer071927d2022-08-31 20:39:29 +08001516 return 0;
developer11f4a0b2023-03-31 17:43:25 +08001517 an->cal_info[4] = cal_indicator & group_ind_mask;
developer071927d2022-08-31 20:39:29 +08001518 ret = atenl_eeprom_update_precal(an, 0, group_size);
1519 break;
1520 case PREK_CLEAN_DPD:
developer11f4a0b2023-03-31 17:43:25 +08001521 if (!(cal_indicator & dpd_ind_mask))
developer071927d2022-08-31 20:39:29 +08001522 return 0;
developer11f4a0b2023-03-31 17:43:25 +08001523 an->cal_info[4] = cal_indicator & dpd_ind_mask;
developer071927d2022-08-31 20:39:29 +08001524 ret = atenl_eeprom_update_precal(an, group_size, dpd_size);
1525 break;
1526 default:
1527 break;
1528 }
1529
1530out:
1531 unl_free(&nl_priv.unl);
1532 return ret;
1533}