1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
5 const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
10 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
11 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
12 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
13 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
14 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
15 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
16 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
17 [MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
18 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
19 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
20 [MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
21 [MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
22 [MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
23 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
24 [MT76_TM_ATTR_DRV_DATA] = { .type = NLA_NESTED },
26 EXPORT_SYMBOL_GPL(mt76_tm_policy);
28 void mt76_testmode_tx_pending(struct mt76_phy *phy)
30 struct mt76_testmode_data *td = &phy->test;
31 struct mt76_dev *dev = phy->dev;
32 struct mt76_wcid *wcid = &dev->global_wcid;
33 struct sk_buff *skb = td->tx_skb;
38 if (!skb || !td->tx_pending)
41 qid = skb_get_queue_mapping(skb);
44 tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
46 spin_lock_bh(&q->lock);
48 while (td->tx_pending > 0 &&
49 td->tx_queued - td->tx_done < tx_queued_limit &&
50 q->queued < q->ndesc / 2) {
53 ret = dev->queue_ops->tx_queue_skb(dev, q, skb_get(skb), wcid,
62 dev->queue_ops->kick(dev, q);
64 spin_unlock_bh(&q->lock);
68 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
70 switch (tx_rate_mode) {
71 case MT76_TM_TX_MODE_HT:
72 return IEEE80211_MAX_MPDU_LEN_HT_7935;
73 case MT76_TM_TX_MODE_VHT:
74 case MT76_TM_TX_MODE_HE_SU:
75 case MT76_TM_TX_MODE_HE_EXT_SU:
76 case MT76_TM_TX_MODE_HE_TB:
77 case MT76_TM_TX_MODE_HE_MU:
78 if (phy->sband_5g.sband.vht_cap.cap &
79 IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
80 return IEEE80211_MAX_MPDU_LEN_VHT_7991;
81 return IEEE80211_MAX_MPDU_LEN_VHT_11454;
82 case MT76_TM_TX_MODE_CCK:
83 case MT76_TM_TX_MODE_OFDM:
85 return IEEE80211_MAX_FRAME_LEN;
90 mt76_testmode_free_skb(struct mt76_phy *phy)
92 struct mt76_testmode_data *td = &phy->test;
94 dev_kfree_skb(td->tx_skb);
98 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
100 #define MT_TXP_MAX_LEN 4095
101 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
102 IEEE80211_FCTL_FROMDS;
103 struct mt76_testmode_data *td = &phy->test;
104 bool ext_phy = phy != &phy->dev->phy;
105 struct sk_buff **frag_tail, *head;
106 struct ieee80211_tx_info *info;
107 struct ieee80211_hdr *hdr;
108 u32 max_len, head_len;
111 max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
114 else if (len < sizeof(struct ieee80211_hdr))
115 len = sizeof(struct ieee80211_hdr);
117 nfrags = len / MT_TXP_MAX_LEN;
118 head_len = nfrags ? MT_TXP_MAX_LEN : len;
120 if (len > IEEE80211_MAX_FRAME_LEN)
121 fc |= IEEE80211_STYPE_QOS_DATA;
123 head = alloc_skb(head_len, GFP_KERNEL);
127 hdr = __skb_put_zero(head, head_len);
128 hdr->frame_control = cpu_to_le16(fc);
129 memcpy(hdr->addr1, td->addr[0], ETH_ALEN);
130 memcpy(hdr->addr2, td->addr[1], ETH_ALEN);
131 memcpy(hdr->addr3, td->addr[2], ETH_ALEN);
132 skb_set_queue_mapping(head, IEEE80211_AC_BE);
134 info = IEEE80211_SKB_CB(head);
135 info->flags = IEEE80211_TX_CTL_INJECTED |
136 IEEE80211_TX_CTL_NO_ACK |
137 IEEE80211_TX_CTL_NO_PS_BUFFER;
140 info->hw_queue |= MT_TX_HW_QUEUE_EXT_PHY;
142 frag_tail = &skb_shinfo(head)->frag_list;
144 for (i = 0; i < nfrags; i++) {
145 struct sk_buff *frag;
149 frag_len = len % MT_TXP_MAX_LEN;
151 frag_len = MT_TXP_MAX_LEN;
153 frag = alloc_skb(frag_len, GFP_KERNEL);
155 mt76_testmode_free_skb(phy);
160 __skb_put_zero(frag, frag_len);
161 head->len += frag->len;
162 head->data_len += frag->len;
165 frag_tail = &(*frag_tail)->next;
168 mt76_testmode_free_skb(phy);
173 EXPORT_SYMBOL(mt76_testmode_alloc_skb);
176 mt76_testmode_tx_init(struct mt76_phy *phy)
178 struct mt76_testmode_data *td = &phy->test;
179 struct ieee80211_tx_info *info;
180 struct ieee80211_tx_rate *rate;
181 u8 max_nss = hweight8(phy->antenna_mask);
184 ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
188 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
191 if (td->tx_antenna_mask)
192 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
194 info = IEEE80211_SKB_CB(td->tx_skb);
195 rate = &info->control.rates[0];
197 rate->idx = td->tx_rate_idx;
199 switch (td->tx_rate_mode) {
200 case MT76_TM_TX_MODE_CCK:
201 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
207 case MT76_TM_TX_MODE_OFDM:
208 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
216 case MT76_TM_TX_MODE_HT:
217 if (rate->idx > 8 * max_nss &&
219 phy->chandef.width >= NL80211_CHAN_WIDTH_40))
222 rate->flags |= IEEE80211_TX_RC_MCS;
224 case MT76_TM_TX_MODE_VHT:
228 if (td->tx_rate_nss > max_nss)
231 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
232 rate->flags |= IEEE80211_TX_RC_VHT_MCS;
239 rate->flags |= IEEE80211_TX_RC_SHORT_GI;
241 if (td->tx_rate_ldpc)
242 info->flags |= IEEE80211_TX_CTL_LDPC;
244 if (td->tx_rate_stbc)
245 info->flags |= IEEE80211_TX_CTL_STBC;
247 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
248 switch (phy->chandef.width) {
249 case NL80211_CHAN_WIDTH_40:
250 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
252 case NL80211_CHAN_WIDTH_80:
253 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
255 case NL80211_CHAN_WIDTH_80P80:
256 case NL80211_CHAN_WIDTH_160:
257 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
268 mt76_testmode_tx_start(struct mt76_phy *phy)
270 struct mt76_testmode_data *td = &phy->test;
271 struct mt76_dev *dev = phy->dev;
275 td->tx_pending = td->tx_count;
276 mt76_worker_schedule(&dev->tx_worker);
280 mt76_testmode_tx_stop(struct mt76_phy *phy)
282 struct mt76_testmode_data *td = &phy->test;
283 struct mt76_dev *dev = phy->dev;
285 mt76_worker_disable(&dev->tx_worker);
289 mt76_worker_enable(&dev->tx_worker);
291 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
292 MT76_TM_TIMEOUT * HZ);
294 mt76_testmode_free_skb(phy);
298 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
300 td->param_set[idx / 32] |= BIT(idx % 32);
304 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
306 return td->param_set[idx / 32] & BIT(idx % 32);
310 mt76_testmode_init_defaults(struct mt76_phy *phy)
312 struct mt76_testmode_data *td = &phy->test;
314 if (td->tx_mpdu_len > 0)
317 td->tx_mpdu_len = 1024;
319 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
322 memcpy(td->addr[0], phy->macaddr, ETH_ALEN);
323 memcpy(td->addr[1], phy->macaddr, ETH_ALEN);
324 memcpy(td->addr[2], phy->macaddr, ETH_ALEN);
328 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
330 enum mt76_testmode_state prev_state = phy->test.state;
331 struct mt76_dev *dev = phy->dev;
334 if (prev_state == MT76_TM_STATE_TX_FRAMES)
335 mt76_testmode_tx_stop(phy);
337 if (state == MT76_TM_STATE_TX_FRAMES) {
338 err = mt76_testmode_tx_init(phy);
343 err = dev->test_ops->set_state(phy, state);
345 if (state == MT76_TM_STATE_TX_FRAMES)
346 mt76_testmode_tx_stop(phy);
351 if (state == MT76_TM_STATE_TX_FRAMES)
352 mt76_testmode_tx_start(phy);
353 else if (state == MT76_TM_STATE_RX_FRAMES) {
354 memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
357 phy->test.state = state;
362 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
364 struct mt76_testmode_data *td = &phy->test;
365 struct ieee80211_hw *hw = phy->hw;
367 if (state == td->state && state == MT76_TM_STATE_OFF)
370 if (state > MT76_TM_STATE_OFF &&
371 (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
372 !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
375 if (state != MT76_TM_STATE_IDLE &&
376 td->state != MT76_TM_STATE_IDLE) {
379 ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
384 return __mt76_testmode_set_state(phy, state);
387 EXPORT_SYMBOL(mt76_testmode_set_state);
390 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
397 val = nla_get_u8(attr);
398 if (val < min || val > max)
405 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
408 struct mt76_phy *phy = hw->priv;
409 struct mt76_dev *dev = phy->dev;
410 struct mt76_testmode_data *td = &phy->test;
411 struct nlattr *tb[NUM_MT76_TM_ATTRS];
419 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
420 mt76_tm_policy, NULL);
426 mutex_lock(&dev->mutex);
428 if (tb[MT76_TM_ATTR_RESET]) {
429 mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
430 memset(td, 0, sizeof(*td));
433 mt76_testmode_init_defaults(phy);
435 if (tb[MT76_TM_ATTR_TX_COUNT])
436 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
438 if (tb[MT76_TM_ATTR_TX_RATE_IDX])
439 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
441 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
442 0, MT76_TM_TX_MODE_MAX) ||
443 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
444 1, hweight8(phy->antenna_mask)) ||
445 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
446 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
447 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
448 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
449 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA],
450 &td->tx_antenna_mask, 0, 0xff) ||
451 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
452 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
453 &td->tx_duty_cycle, 0, 99) ||
454 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
455 &td->tx_power_control, 0, 1))
458 if (tb[MT76_TM_ATTR_TX_LENGTH]) {
459 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
461 if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
462 val < sizeof(struct ieee80211_hdr))
465 td->tx_mpdu_len = val;
468 if (tb[MT76_TM_ATTR_TX_IPG])
469 td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
471 if (tb[MT76_TM_ATTR_TX_TIME])
472 td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
474 if (tb[MT76_TM_ATTR_FREQ_OFFSET])
475 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
477 if (tb[MT76_TM_ATTR_STATE]) {
478 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
479 if (state > MT76_TM_STATE_MAX)
485 if (tb[MT76_TM_ATTR_TX_POWER]) {
490 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
491 if (nla_len(cur) != 1 ||
492 idx >= ARRAY_SIZE(td->tx_power))
495 td->tx_power[idx++] = nla_get_u8(cur);
499 if (tb[MT76_TM_ATTR_MAC_ADDRS]) {
504 nla_for_each_nested(cur, tb[MT76_TM_ATTR_MAC_ADDRS], rem) {
505 if (nla_len(cur) != ETH_ALEN || idx >= 3)
508 memcpy(td->addr[idx], nla_data(cur), ETH_ALEN);
513 if (dev->test_ops->set_params) {
514 err = dev->test_ops->set_params(phy, tb, state);
519 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
521 mt76_testmode_param_set(td, i);
524 if (tb[MT76_TM_ATTR_STATE])
525 err = mt76_testmode_set_state(phy, state);
528 mutex_unlock(&dev->mutex);
532 EXPORT_SYMBOL(mt76_testmode_cmd);
535 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
537 struct mt76_testmode_data *td = &phy->test;
538 struct mt76_dev *dev = phy->dev;
540 u64 rx_fcs_error = 0;
543 if (dev->test_ops->dump_stats) {
546 ret = dev->test_ops->dump_stats(phy, msg);
551 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
552 rx_packets += td->rx_stats.packets[i];
553 rx_fcs_error += td->rx_stats.fcs_error[i];
556 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
557 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
558 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
559 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
560 MT76_TM_STATS_ATTR_PAD) ||
561 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
562 MT76_TM_STATS_ATTR_PAD))
568 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
569 struct netlink_callback *cb, void *data, int len)
571 struct mt76_phy *phy = hw->priv;
572 struct mt76_dev *dev = phy->dev;
573 struct mt76_testmode_data *td = &phy->test;
574 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
582 if (cb->args[2]++ > 0)
586 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
587 mt76_tm_policy, NULL);
592 mutex_lock(&dev->mutex);
594 if (tb[MT76_TM_ATTR_STATS]) {
597 a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
599 err = mt76_testmode_dump_stats(phy, msg);
600 nla_nest_end(msg, a);
606 mt76_testmode_init_defaults(phy);
609 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
612 if (dev->test_mtd.name &&
613 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
614 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
617 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
618 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
619 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
620 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
621 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
622 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
623 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
624 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
625 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
626 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
627 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
628 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
629 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
630 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
631 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
632 nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
633 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
634 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
635 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
636 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
637 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
638 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
639 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
640 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
643 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
644 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
648 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
649 if (nla_put_u8(msg, i, td->tx_power[i]))
652 nla_nest_end(msg, a);
655 if (mt76_testmode_param_present(td, MT76_TM_ATTR_MAC_ADDRS)) {
656 a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
660 for (i = 0; i < 3; i++)
661 if (nla_put(msg, i, ETH_ALEN, td->addr[i]))
664 nla_nest_end(msg, a);
670 mutex_unlock(&dev->mutex);
674 EXPORT_SYMBOL(mt76_testmode_dump);