1 // SPDX-License-Identifier: ISC
3 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
5 #include <linux/sched.h>
9 #define CHAN2G(_idx, _freq) { \
10 .band = NL80211_BAND_2GHZ, \
11 .center_freq = (_freq), \
16 #define CHAN5G(_idx, _freq) { \
17 .band = NL80211_BAND_5GHZ, \
18 .center_freq = (_freq), \
23 #define CHAN6G(_idx, _freq) { \
24 .band = NL80211_BAND_6GHZ, \
25 .center_freq = (_freq), \
30 static const struct ieee80211_channel mt76_channels_2ghz[] = {
47 static const struct ieee80211_channel mt76_channels_5ghz[] = {
80 static const struct ieee80211_channel mt76_channels_6ghz[] = {
146 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
147 { .throughput = 0 * 1024, .blink_time = 334 },
148 { .throughput = 1 * 1024, .blink_time = 260 },
149 { .throughput = 5 * 1024, .blink_time = 220 },
150 { .throughput = 10 * 1024, .blink_time = 190 },
151 { .throughput = 20 * 1024, .blink_time = 170 },
152 { .throughput = 50 * 1024, .blink_time = 150 },
153 { .throughput = 70 * 1024, .blink_time = 130 },
154 { .throughput = 100 * 1024, .blink_time = 110 },
155 { .throughput = 200 * 1024, .blink_time = 80 },
156 { .throughput = 300 * 1024, .blink_time = 50 },
159 struct ieee80211_rate mt76_rates[] = {
173 EXPORT_SYMBOL_GPL(mt76_rates);
175 static const struct cfg80211_sar_freq_ranges mt76_sar_freq_ranges[] = {
176 { .start_freq = 2402, .end_freq = 2494, },
177 { .start_freq = 5150, .end_freq = 5350, },
178 { .start_freq = 5350, .end_freq = 5470, },
179 { .start_freq = 5470, .end_freq = 5725, },
180 { .start_freq = 5725, .end_freq = 5950, },
183 static const struct cfg80211_sar_capa mt76_sar_capa = {
184 .type = NL80211_SAR_TYPE_POWER,
185 .num_freq_ranges = ARRAY_SIZE(mt76_sar_freq_ranges),
186 .freq_ranges = &mt76_sar_freq_ranges[0],
189 static int mt76_led_init(struct mt76_dev *dev)
191 struct device_node *np = dev->dev->of_node;
192 struct ieee80211_hw *hw = dev->hw;
195 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
198 snprintf(dev->led_name, sizeof(dev->led_name),
199 "mt76-%s", wiphy_name(hw->wiphy));
201 dev->led_cdev.name = dev->led_name;
202 dev->led_cdev.default_trigger =
203 ieee80211_create_tpt_led_trigger(hw,
204 IEEE80211_TPT_LEDTRIG_FL_RADIO,
206 ARRAY_SIZE(mt76_tpt_blink));
208 np = of_get_child_by_name(np, "led");
210 if (!of_property_read_u32(np, "led-sources", &led_pin))
211 dev->led_pin = led_pin;
212 dev->led_al = of_property_read_bool(np, "led-active-low");
215 return led_classdev_register(dev->dev, &dev->led_cdev);
218 static void mt76_led_cleanup(struct mt76_dev *dev)
220 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
223 led_classdev_unregister(&dev->led_cdev);
226 static void mt76_init_stream_cap(struct mt76_phy *phy,
227 struct ieee80211_supported_band *sband,
230 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
231 int i, nstream = hweight8(phy->antenna_mask);
232 struct ieee80211_sta_vht_cap *vht_cap;
236 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
238 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
240 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
241 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
246 vht_cap = &sband->vht_cap;
248 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
250 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
251 vht_cap->cap |= IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
252 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN;
254 for (i = 0; i < 8; i++) {
256 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
259 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
261 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
262 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
265 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht)
267 if (phy->cap.has_2ghz)
268 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false);
269 if (phy->cap.has_5ghz)
270 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht);
271 if (phy->cap.has_6ghz)
272 mt76_init_stream_cap(phy, &phy->sband_6g.sband, vht);
274 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
277 mt76_init_sband(struct mt76_phy *phy, struct mt76_sband *msband,
278 const struct ieee80211_channel *chan, int n_chan,
279 struct ieee80211_rate *rates, int n_rates,
282 struct ieee80211_supported_band *sband = &msband->sband;
283 struct ieee80211_sta_vht_cap *vht_cap;
284 struct ieee80211_sta_ht_cap *ht_cap;
285 struct mt76_dev *dev = phy->dev;
289 size = n_chan * sizeof(*chan);
290 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
294 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
299 sband->channels = chanlist;
300 sband->n_channels = n_chan;
301 sband->bitrates = rates;
302 sband->n_bitrates = n_rates;
307 ht_cap = &sband->ht_cap;
308 ht_cap->ht_supported = true;
309 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
310 IEEE80211_HT_CAP_GRN_FLD |
311 IEEE80211_HT_CAP_SGI_20 |
312 IEEE80211_HT_CAP_SGI_40 |
313 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
315 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
316 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
318 mt76_init_stream_cap(phy, sband, vht);
323 vht_cap = &sband->vht_cap;
324 vht_cap->vht_supported = true;
325 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
326 IEEE80211_VHT_CAP_RXSTBC_1 |
327 IEEE80211_VHT_CAP_SHORT_GI_80 |
328 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
334 mt76_init_sband_2g(struct mt76_phy *phy, struct ieee80211_rate *rates,
337 phy->hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband;
339 return mt76_init_sband(phy, &phy->sband_2g, mt76_channels_2ghz,
340 ARRAY_SIZE(mt76_channels_2ghz), rates,
341 n_rates, true, false);
345 mt76_init_sband_5g(struct mt76_phy *phy, struct ieee80211_rate *rates,
346 int n_rates, bool vht)
348 phy->hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband;
350 return mt76_init_sband(phy, &phy->sband_5g, mt76_channels_5ghz,
351 ARRAY_SIZE(mt76_channels_5ghz), rates,
356 mt76_init_sband_6g(struct mt76_phy *phy, struct ieee80211_rate *rates,
359 phy->hw->wiphy->bands[NL80211_BAND_6GHZ] = &phy->sband_6g.sband;
361 return mt76_init_sband(phy, &phy->sband_6g, mt76_channels_6ghz,
362 ARRAY_SIZE(mt76_channels_6ghz), rates,
363 n_rates, false, false);
367 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband,
368 enum nl80211_band band)
370 struct ieee80211_supported_band *sband = &msband->sband;
377 for (i = 0; i < sband->n_channels; i++) {
378 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
386 phy->chandef.chan = &sband->channels[0];
387 phy->chan_state = &msband->chan[0];
391 sband->n_channels = 0;
392 phy->hw->wiphy->bands[band] = NULL;
396 mt76_phy_init(struct mt76_phy *phy, struct ieee80211_hw *hw)
398 struct mt76_dev *dev = phy->dev;
399 struct wiphy *wiphy = hw->wiphy;
401 SET_IEEE80211_DEV(hw, dev->dev);
402 SET_IEEE80211_PERM_ADDR(hw, phy->macaddr);
404 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
405 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH |
406 WIPHY_FLAG_SUPPORTS_TDLS |
409 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
410 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
411 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL);
413 wiphy->available_antennas_tx = phy->antenna_mask;
414 wiphy->available_antennas_rx = phy->antenna_mask;
416 wiphy->sar_capa = &mt76_sar_capa;
417 phy->frp = devm_kcalloc(dev->dev, wiphy->sar_capa->num_freq_ranges,
418 sizeof(struct mt76_freq_range_power),
423 hw->txq_data_size = sizeof(struct mt76_txq);
424 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL;
426 if (!hw->max_tx_fragments)
427 hw->max_tx_fragments = 16;
429 ieee80211_hw_set(hw, SIGNAL_DBM);
430 ieee80211_hw_set(hw, AMPDU_AGGREGATION);
431 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
432 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
433 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
434 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
435 ieee80211_hw_set(hw, SUPPORTS_REORDERING_BUFFER);
436 ieee80211_hw_set(hw, TX_AMSDU);
437 ieee80211_hw_set(hw, TX_FRAG_LIST);
438 ieee80211_hw_set(hw, MFP_CAPABLE);
439 ieee80211_hw_set(hw, AP_LINK_PS);
440 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
446 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size,
447 const struct ieee80211_ops *ops)
449 struct ieee80211_hw *hw;
450 unsigned int phy_size;
451 struct mt76_phy *phy;
453 phy_size = ALIGN(sizeof(*phy), 8);
454 hw = ieee80211_alloc_hw(size + phy_size, ops);
461 phy->priv = hw->priv + phy_size;
463 hw->wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
464 hw->wiphy->interface_modes =
465 BIT(NL80211_IFTYPE_STATION) |
466 BIT(NL80211_IFTYPE_AP) |
467 #ifdef CONFIG_MAC80211_MESH
468 BIT(NL80211_IFTYPE_MESH_POINT) |
470 BIT(NL80211_IFTYPE_P2P_CLIENT) |
471 BIT(NL80211_IFTYPE_P2P_GO) |
472 BIT(NL80211_IFTYPE_ADHOC);
476 EXPORT_SYMBOL_GPL(mt76_alloc_phy);
478 int mt76_register_phy(struct mt76_phy *phy, bool vht,
479 struct ieee80211_rate *rates, int n_rates)
483 ret = mt76_phy_init(phy, phy->hw);
487 if (phy->cap.has_2ghz) {
488 ret = mt76_init_sband_2g(phy, rates, n_rates);
493 if (phy->cap.has_5ghz) {
494 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht);
499 if (phy->cap.has_6ghz) {
500 ret = mt76_init_sband_6g(phy, rates + 4, n_rates - 4);
505 wiphy_read_of_freq_limits(phy->hw->wiphy);
506 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ);
507 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ);
508 mt76_check_sband(phy, &phy->sband_6g, NL80211_BAND_6GHZ);
510 ret = ieee80211_register_hw(phy->hw);
514 phy->dev->phy2 = phy;
518 EXPORT_SYMBOL_GPL(mt76_register_phy);
520 void mt76_unregister_phy(struct mt76_phy *phy)
522 struct mt76_dev *dev = phy->dev;
524 mt76_tx_status_check(dev, true);
525 ieee80211_unregister_hw(phy->hw);
528 EXPORT_SYMBOL_GPL(mt76_unregister_phy);
531 mt76_alloc_device(struct device *pdev, unsigned int size,
532 const struct ieee80211_ops *ops,
533 const struct mt76_driver_ops *drv_ops)
535 struct ieee80211_hw *hw;
536 struct mt76_phy *phy;
537 struct mt76_dev *dev;
540 hw = ieee80211_alloc_hw(size, ops);
554 spin_lock_init(&dev->rx_lock);
555 spin_lock_init(&dev->lock);
556 spin_lock_init(&dev->cc_lock);
557 spin_lock_init(&dev->status_lock);
558 mutex_init(&dev->mutex);
559 init_waitqueue_head(&dev->tx_wait);
561 skb_queue_head_init(&dev->mcu.res_q);
562 init_waitqueue_head(&dev->mcu.wait);
563 mutex_init(&dev->mcu.mutex);
564 dev->tx_worker.fn = mt76_tx_worker;
566 hw->wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
567 hw->wiphy->interface_modes =
568 BIT(NL80211_IFTYPE_STATION) |
569 BIT(NL80211_IFTYPE_AP) |
570 #ifdef CONFIG_MAC80211_MESH
571 BIT(NL80211_IFTYPE_MESH_POINT) |
573 BIT(NL80211_IFTYPE_P2P_CLIENT) |
574 BIT(NL80211_IFTYPE_P2P_GO) |
575 BIT(NL80211_IFTYPE_ADHOC);
577 spin_lock_init(&dev->token_lock);
578 idr_init(&dev->token);
580 INIT_LIST_HEAD(&dev->wcid_list);
582 INIT_LIST_HEAD(&dev->txwi_cache);
583 dev->token_size = dev->drv->token_size;
585 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++)
586 skb_queue_head_init(&dev->rx_skb[i]);
588 dev->wq = alloc_ordered_workqueue("mt76", 0);
590 ieee80211_free_hw(hw);
596 EXPORT_SYMBOL_GPL(mt76_alloc_device);
598 int mt76_register_device(struct mt76_dev *dev, bool vht,
599 struct ieee80211_rate *rates, int n_rates)
601 struct ieee80211_hw *hw = dev->hw;
602 struct mt76_phy *phy = &dev->phy;
605 dev_set_drvdata(dev->dev, dev);
606 ret = mt76_phy_init(phy, hw);
610 if (phy->cap.has_2ghz) {
611 ret = mt76_init_sband_2g(phy, rates, n_rates);
616 if (phy->cap.has_5ghz) {
617 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht);
622 if (phy->cap.has_6ghz) {
623 ret = mt76_init_sband_6g(phy, rates + 4, n_rates - 4);
628 wiphy_read_of_freq_limits(hw->wiphy);
629 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ);
630 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ);
631 mt76_check_sband(&dev->phy, &phy->sband_6g, NL80211_BAND_6GHZ);
633 if (IS_ENABLED(CONFIG_MT76_LEDS)) {
634 ret = mt76_led_init(dev);
639 ret = ieee80211_register_hw(hw);
643 WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx"));
644 sched_set_fifo_low(dev->tx_worker.task);
648 EXPORT_SYMBOL_GPL(mt76_register_device);
650 void mt76_unregister_device(struct mt76_dev *dev)
652 struct ieee80211_hw *hw = dev->hw;
654 if (IS_ENABLED(CONFIG_MT76_LEDS))
655 mt76_led_cleanup(dev);
656 mt76_tx_status_check(dev, true);
657 ieee80211_unregister_hw(hw);
659 EXPORT_SYMBOL_GPL(mt76_unregister_device);
661 void mt76_free_device(struct mt76_dev *dev)
663 mt76_worker_teardown(&dev->tx_worker);
665 destroy_workqueue(dev->wq);
668 ieee80211_free_hw(dev->hw);
670 EXPORT_SYMBOL_GPL(mt76_free_device);
672 static void mt76_rx_release_amsdu(struct mt76_phy *phy, enum mt76_rxq_id q)
674 struct sk_buff *skb = phy->rx_amsdu[q].head;
675 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
676 struct mt76_dev *dev = phy->dev;
678 phy->rx_amsdu[q].head = NULL;
679 phy->rx_amsdu[q].tail = NULL;
682 * Validate if the amsdu has a proper first subframe.
683 * A single MSDU can be parsed as A-MSDU when the unauthenticated A-MSDU
684 * flag of the QoS header gets flipped. In such cases, the first
685 * subframe has a LLC/SNAP header in the location of the destination
688 if (skb_shinfo(skb)->frag_list) {
691 if (!(status->flag & RX_FLAG_8023)) {
692 offset = ieee80211_get_hdrlen_from_skb(skb);
695 (RX_FLAG_DECRYPTED | RX_FLAG_IV_STRIPPED)) ==
700 if (ether_addr_equal(skb->data + offset, rfc1042_header)) {
705 __skb_queue_tail(&dev->rx_skb[q], skb);
708 static void mt76_rx_release_burst(struct mt76_phy *phy, enum mt76_rxq_id q,
711 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
713 if (phy->rx_amsdu[q].head &&
714 (!status->amsdu || status->first_amsdu ||
715 status->seqno != phy->rx_amsdu[q].seqno))
716 mt76_rx_release_amsdu(phy, q);
718 if (!phy->rx_amsdu[q].head) {
719 phy->rx_amsdu[q].tail = &skb_shinfo(skb)->frag_list;
720 phy->rx_amsdu[q].seqno = status->seqno;
721 phy->rx_amsdu[q].head = skb;
723 *phy->rx_amsdu[q].tail = skb;
724 phy->rx_amsdu[q].tail = &skb->next;
727 if (!status->amsdu || status->last_amsdu)
728 mt76_rx_release_amsdu(phy, q);
731 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
733 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
734 struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy);
736 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) {
741 #ifdef CONFIG_NL80211_TESTMODE
742 if (phy->test.state == MT76_TM_STATE_RX_FRAMES) {
743 phy->test.rx_stats.packets[q]++;
744 if (status->flag & RX_FLAG_FAILED_FCS_CRC)
745 phy->test.rx_stats.fcs_error[q]++;
749 mt76_rx_release_burst(phy, q, skb);
751 EXPORT_SYMBOL_GPL(mt76_rx);
753 bool mt76_has_tx_pending(struct mt76_phy *phy)
755 struct mt76_queue *q;
758 for (i = 0; i < __MT_TXQ_MAX; i++) {
766 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
768 static struct mt76_channel_state *
769 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c)
771 struct mt76_sband *msband;
774 if (c->band == NL80211_BAND_2GHZ)
775 msband = &phy->sband_2g;
776 else if (c->band == NL80211_BAND_6GHZ)
777 msband = &phy->sband_6g;
779 msband = &phy->sband_5g;
781 idx = c - &msband->sband.channels[0];
782 return &msband->chan[idx];
785 void mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time)
787 struct mt76_channel_state *state = phy->chan_state;
789 state->cc_active += ktime_to_us(ktime_sub(time,
791 phy->survey_time = time;
793 EXPORT_SYMBOL_GPL(mt76_update_survey_active_time);
795 void mt76_update_survey(struct mt76_phy *phy)
797 struct mt76_dev *dev = phy->dev;
800 if (dev->drv->update_survey)
801 dev->drv->update_survey(phy);
803 cur_time = ktime_get_boottime();
804 mt76_update_survey_active_time(phy, cur_time);
806 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
807 struct mt76_channel_state *state = phy->chan_state;
809 spin_lock_bh(&dev->cc_lock);
810 state->cc_bss_rx += dev->cur_cc_bss_rx;
811 dev->cur_cc_bss_rx = 0;
812 spin_unlock_bh(&dev->cc_lock);
815 EXPORT_SYMBOL_GPL(mt76_update_survey);
817 void mt76_set_channel(struct mt76_phy *phy)
819 struct mt76_dev *dev = phy->dev;
820 struct ieee80211_hw *hw = phy->hw;
821 struct cfg80211_chan_def *chandef = &hw->conf.chandef;
822 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
823 int timeout = HZ / 5;
825 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout);
826 mt76_update_survey(phy);
828 if (phy->chandef.chan->center_freq != chandef->chan->center_freq ||
829 phy->chandef.width != chandef->width)
830 phy->dfs_state = MT_DFS_STATE_UNKNOWN;
832 phy->chandef = *chandef;
833 phy->chan_state = mt76_channel_state(phy, chandef->chan);
836 phy->main_chan = chandef->chan;
838 if (chandef->chan != phy->main_chan)
839 memset(phy->chan_state, 0, sizeof(*phy->chan_state));
841 EXPORT_SYMBOL_GPL(mt76_set_channel);
843 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
844 struct survey_info *survey)
846 struct mt76_phy *phy = hw->priv;
847 struct mt76_dev *dev = phy->dev;
848 struct mt76_sband *sband;
849 struct ieee80211_channel *chan;
850 struct mt76_channel_state *state;
853 mutex_lock(&dev->mutex);
854 if (idx == 0 && dev->drv->update_survey)
855 mt76_update_survey(phy);
857 if (idx >= phy->sband_2g.sband.n_channels +
858 phy->sband_5g.sband.n_channels) {
859 idx -= (phy->sband_2g.sband.n_channels +
860 phy->sband_5g.sband.n_channels);
861 sband = &phy->sband_6g;
862 } else if (idx >= phy->sband_2g.sband.n_channels) {
863 idx -= phy->sband_2g.sband.n_channels;
864 sband = &phy->sband_5g;
866 sband = &phy->sband_2g;
869 if (idx >= sband->sband.n_channels) {
874 chan = &sband->sband.channels[idx];
875 state = mt76_channel_state(phy, chan);
877 memset(survey, 0, sizeof(*survey));
878 survey->channel = chan;
879 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
880 survey->filled |= dev->drv->survey_flags;
882 survey->filled |= SURVEY_INFO_NOISE_DBM;
884 if (chan == phy->main_chan) {
885 survey->filled |= SURVEY_INFO_IN_USE;
887 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
888 survey->filled |= SURVEY_INFO_TIME_BSS_RX;
891 survey->time_busy = div_u64(state->cc_busy, 1000);
892 survey->time_rx = div_u64(state->cc_rx, 1000);
893 survey->time = div_u64(state->cc_active, 1000);
894 survey->noise = state->noise;
896 spin_lock_bh(&dev->cc_lock);
897 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
898 survey->time_tx = div_u64(state->cc_tx, 1000);
899 spin_unlock_bh(&dev->cc_lock);
902 mutex_unlock(&dev->mutex);
906 EXPORT_SYMBOL_GPL(mt76_get_survey);
908 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
909 struct ieee80211_key_conf *key)
911 struct ieee80211_key_seq seq;
914 wcid->rx_check_pn = false;
919 if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
922 wcid->rx_check_pn = true;
925 for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
926 ieee80211_get_key_rx_seq(key, i, &seq);
927 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
930 /* robust management frame */
931 ieee80211_get_key_rx_seq(key, -1, &seq);
932 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
935 EXPORT_SYMBOL(mt76_wcid_key_setup);
938 mt76_rx_signal(struct mt76_rx_status *status)
940 s8 *chain_signal = status->chain_signal;
944 for (chains = status->chains; chains; chains >>= 1, chain_signal++) {
948 if (!(chains & BIT(0)) ||
968 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb,
969 struct ieee80211_hw **hw,
970 struct ieee80211_sta **sta)
972 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
973 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
974 struct mt76_rx_status mstat;
976 mstat = *((struct mt76_rx_status *)skb->cb);
977 memset(status, 0, sizeof(*status));
979 status->flag = mstat.flag;
980 status->freq = mstat.freq;
981 status->enc_flags = mstat.enc_flags;
982 status->encoding = mstat.encoding;
983 status->bw = mstat.bw;
984 status->he_ru = mstat.he_ru;
985 status->he_gi = mstat.he_gi;
986 status->he_dcm = mstat.he_dcm;
987 status->rate_idx = mstat.rate_idx;
988 status->nss = mstat.nss;
989 status->band = mstat.band;
990 status->signal = mstat.signal;
991 status->chains = mstat.chains;
992 status->ampdu_reference = mstat.ampdu_ref;
993 status->device_timestamp = mstat.timestamp;
994 status->mactime = mstat.timestamp;
995 status->signal = mt76_rx_signal(&mstat);
996 if (status->signal <= -128)
997 status->flag |= RX_FLAG_NO_SIGNAL_VAL;
999 if (ieee80211_is_beacon(hdr->frame_control) ||
1000 ieee80211_is_probe_resp(hdr->frame_control))
1001 status->boottime_ns = ktime_get_boottime_ns();
1003 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
1004 BUILD_BUG_ON(sizeof(status->chain_signal) !=
1005 sizeof(mstat.chain_signal));
1006 memcpy(status->chain_signal, mstat.chain_signal,
1007 sizeof(mstat.chain_signal));
1009 *sta = wcid_to_sta(mstat.wcid);
1010 *hw = mt76_phy_hw(dev, mstat.ext_phy);
1014 mt76_check_ccmp_pn(struct sk_buff *skb)
1016 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1017 struct mt76_wcid *wcid = status->wcid;
1018 struct ieee80211_hdr *hdr;
1022 if (!(status->flag & RX_FLAG_DECRYPTED))
1025 if (!wcid || !wcid->rx_check_pn)
1028 security_idx = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK;
1029 if (status->flag & RX_FLAG_8023)
1030 goto skip_hdr_check;
1032 hdr = mt76_skb_get_hdr(skb);
1033 if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
1035 * Validate the first fragment both here and in mac80211
1036 * All further fragments will be validated by mac80211 only.
1038 if (ieee80211_is_frag(hdr) &&
1039 !ieee80211_is_first_frag(hdr->frame_control))
1043 /* IEEE 802.11-2020, 12.5.3.4.4 "PN and replay detection" c):
1045 * the recipient shall maintain a single replay counter for received
1046 * individually addressed robust Management frames that are received
1047 * with the To DS subfield equal to 0, [...]
1049 if (ieee80211_is_mgmt(hdr->frame_control) &&
1050 !ieee80211_has_tods(hdr->frame_control))
1051 security_idx = IEEE80211_NUM_TIDS;
1054 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
1055 ret = memcmp(status->iv, wcid->rx_key_pn[security_idx],
1056 sizeof(status->iv));
1058 return -EINVAL; /* replay */
1060 memcpy(wcid->rx_key_pn[security_idx], status->iv, sizeof(status->iv));
1062 if (status->flag & RX_FLAG_IV_STRIPPED)
1063 status->flag |= RX_FLAG_PN_VALIDATED;
1069 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
1072 struct mt76_wcid *wcid = status->wcid;
1073 struct ieee80211_rx_status info = {
1074 .enc_flags = status->enc_flags,
1075 .rate_idx = status->rate_idx,
1076 .encoding = status->encoding,
1077 .band = status->band,
1081 struct ieee80211_sta *sta;
1083 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK;
1085 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len);
1086 spin_lock(&dev->cc_lock);
1087 dev->cur_cc_bss_rx += airtime;
1088 spin_unlock(&dev->cc_lock);
1090 if (!wcid || !wcid->sta)
1093 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
1094 ieee80211_sta_register_airtime(sta, tidno, 0, airtime);
1098 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
1100 struct mt76_wcid *wcid;
1103 if (!dev->rx_ampdu_len)
1106 wcid_idx = dev->rx_ampdu_status.wcid_idx;
1107 if (wcid_idx < ARRAY_SIZE(dev->wcid))
1108 wcid = rcu_dereference(dev->wcid[wcid_idx]);
1111 dev->rx_ampdu_status.wcid = wcid;
1113 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
1115 dev->rx_ampdu_len = 0;
1116 dev->rx_ampdu_ref = 0;
1120 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
1122 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1123 struct mt76_wcid *wcid = status->wcid;
1125 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
1128 if (!wcid || !wcid->sta) {
1129 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
1131 if (status->flag & RX_FLAG_8023)
1134 if (!ether_addr_equal(hdr->addr1, dev->phy.macaddr))
1140 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
1141 status->ampdu_ref != dev->rx_ampdu_ref)
1142 mt76_airtime_flush_ampdu(dev);
1144 if (status->flag & RX_FLAG_AMPDU_DETAILS) {
1145 if (!dev->rx_ampdu_len ||
1146 status->ampdu_ref != dev->rx_ampdu_ref) {
1147 dev->rx_ampdu_status = *status;
1148 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
1149 dev->rx_ampdu_ref = status->ampdu_ref;
1152 dev->rx_ampdu_len += skb->len;
1156 mt76_airtime_report(dev, status, skb->len);
1160 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
1162 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1163 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
1164 struct ieee80211_sta *sta;
1165 struct ieee80211_hw *hw;
1166 struct mt76_wcid *wcid = status->wcid;
1167 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK;
1170 hw = mt76_phy_hw(dev, status->ext_phy);
1171 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid &&
1172 !(status->flag & RX_FLAG_8023)) {
1173 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL);
1175 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
1178 mt76_airtime_check(dev, skb);
1180 if (!wcid || !wcid->sta)
1183 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
1185 if (status->signal <= 0)
1186 ewma_signal_add(&wcid->rssi, -status->signal);
1188 wcid->inactive_count = 0;
1190 if (status->flag & RX_FLAG_8023)
1193 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
1196 if (ieee80211_is_pspoll(hdr->frame_control)) {
1197 ieee80211_sta_pspoll(sta);
1201 if (ieee80211_has_morefrags(hdr->frame_control) ||
1202 !(ieee80211_is_mgmt(hdr->frame_control) ||
1203 ieee80211_is_data(hdr->frame_control)))
1206 ps = ieee80211_has_pm(hdr->frame_control);
1208 if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
1209 ieee80211_is_qos_nullfunc(hdr->frame_control)))
1210 ieee80211_sta_uapsd_trigger(sta, tidno);
1212 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
1216 set_bit(MT_WCID_FLAG_PS, &wcid->flags);
1218 dev->drv->sta_ps(dev, sta, ps);
1221 clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
1223 ieee80211_sta_ps_transition(sta, ps);
1226 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
1227 struct napi_struct *napi)
1229 struct ieee80211_sta *sta;
1230 struct ieee80211_hw *hw;
1231 struct sk_buff *skb, *tmp;
1234 spin_lock(&dev->rx_lock);
1235 while ((skb = __skb_dequeue(frames)) != NULL) {
1236 struct sk_buff *nskb = skb_shinfo(skb)->frag_list;
1238 if (mt76_check_ccmp_pn(skb)) {
1243 skb_shinfo(skb)->frag_list = NULL;
1244 mt76_rx_convert(dev, skb, &hw, &sta);
1245 ieee80211_rx_list(hw, sta, skb, &list);
1247 /* subsequent amsdu frames */
1253 mt76_rx_convert(dev, skb, &hw, &sta);
1254 ieee80211_rx_list(hw, sta, skb, &list);
1257 spin_unlock(&dev->rx_lock);
1260 netif_receive_skb_list(&list);
1264 list_for_each_entry_safe(skb, tmp, &list, list) {
1265 skb_list_del_init(skb);
1266 napi_gro_receive(napi, skb);
1270 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
1271 struct napi_struct *napi)
1273 struct sk_buff_head frames;
1274 struct sk_buff *skb;
1276 __skb_queue_head_init(&frames);
1278 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
1279 mt76_check_sta(dev, skb);
1280 mt76_rx_aggr_reorder(skb, &frames);
1283 mt76_rx_complete(dev, &frames, napi);
1285 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
1288 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
1289 struct ieee80211_sta *sta, bool ext_phy)
1291 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1295 mutex_lock(&dev->mutex);
1297 ret = dev->drv->sta_add(dev, vif, sta);
1301 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
1302 struct mt76_txq *mtxq;
1307 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
1308 mtxq->wcid = wcid->idx;
1311 ewma_signal_init(&wcid->rssi);
1313 mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx);
1314 wcid->ext_phy = ext_phy;
1315 rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
1317 mt76_packet_id_init(wcid);
1319 mutex_unlock(&dev->mutex);
1324 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1325 struct ieee80211_sta *sta)
1327 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1328 int i, idx = wcid->idx;
1330 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
1331 mt76_rx_aggr_stop(dev, wcid, i);
1333 if (dev->drv->sta_remove)
1334 dev->drv->sta_remove(dev, vif, sta);
1336 mt76_packet_id_flush(dev, wcid);
1338 mt76_wcid_mask_clear(dev->wcid_mask, idx);
1339 mt76_wcid_mask_clear(dev->wcid_phy_mask, idx);
1341 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
1344 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1345 struct ieee80211_sta *sta)
1347 mutex_lock(&dev->mutex);
1348 __mt76_sta_remove(dev, vif, sta);
1349 mutex_unlock(&dev->mutex);
1352 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1353 struct ieee80211_sta *sta,
1354 enum ieee80211_sta_state old_state,
1355 enum ieee80211_sta_state new_state)
1357 struct mt76_phy *phy = hw->priv;
1358 struct mt76_dev *dev = phy->dev;
1359 bool ext_phy = phy != &dev->phy;
1361 if (old_state == IEEE80211_STA_NOTEXIST &&
1362 new_state == IEEE80211_STA_NONE)
1363 return mt76_sta_add(dev, vif, sta, ext_phy);
1365 if (old_state == IEEE80211_STA_AUTH &&
1366 new_state == IEEE80211_STA_ASSOC &&
1367 dev->drv->sta_assoc)
1368 dev->drv->sta_assoc(dev, vif, sta);
1370 if (old_state == IEEE80211_STA_NONE &&
1371 new_state == IEEE80211_STA_NOTEXIST)
1372 mt76_sta_remove(dev, vif, sta);
1376 EXPORT_SYMBOL_GPL(mt76_sta_state);
1378 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1379 struct ieee80211_sta *sta)
1381 struct mt76_phy *phy = hw->priv;
1382 struct mt76_dev *dev = phy->dev;
1383 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1385 mutex_lock(&dev->mutex);
1386 spin_lock_bh(&dev->status_lock);
1387 rcu_assign_pointer(dev->wcid[wcid->idx], NULL);
1388 spin_unlock_bh(&dev->status_lock);
1389 mutex_unlock(&dev->mutex);
1391 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove);
1393 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1396 struct mt76_phy *phy = hw->priv;
1397 int n_chains = hweight8(phy->antenna_mask);
1398 int delta = mt76_tx_power_nss_delta(n_chains);
1400 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2);
1404 EXPORT_SYMBOL_GPL(mt76_get_txpower);
1406 int mt76_init_sar_power(struct ieee80211_hw *hw,
1407 const struct cfg80211_sar_specs *sar)
1409 struct mt76_phy *phy = hw->priv;
1410 const struct cfg80211_sar_capa *capa = hw->wiphy->sar_capa;
1413 if (sar->type != NL80211_SAR_TYPE_POWER || !sar->num_sub_specs)
1416 for (i = 0; i < sar->num_sub_specs; i++) {
1417 u32 index = sar->sub_specs[i].freq_range_index;
1418 /* SAR specifies power limitaton in 0.25dbm */
1419 s32 power = sar->sub_specs[i].power >> 1;
1421 if (power > 127 || power < -127)
1424 phy->frp[index].range = &capa->freq_ranges[index];
1425 phy->frp[index].power = power;
1430 EXPORT_SYMBOL_GPL(mt76_init_sar_power);
1432 int mt76_get_sar_power(struct mt76_phy *phy,
1433 struct ieee80211_channel *chan,
1436 const struct cfg80211_sar_capa *capa = phy->hw->wiphy->sar_capa;
1439 if (!capa || !phy->frp)
1442 if (power > 127 || power < -127)
1445 freq = ieee80211_channel_to_frequency(chan->hw_value, chan->band);
1446 for (i = 0 ; i < capa->num_freq_ranges; i++) {
1447 if (phy->frp[i].range &&
1448 freq >= phy->frp[i].range->start_freq &&
1449 freq < phy->frp[i].range->end_freq) {
1450 power = min_t(int, phy->frp[i].power, power);
1457 EXPORT_SYMBOL_GPL(mt76_get_sar_power);
1460 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
1462 if (vif->csa_active && ieee80211_beacon_cntdwn_is_complete(vif))
1463 ieee80211_csa_finish(vif);
1466 void mt76_csa_finish(struct mt76_dev *dev)
1468 if (!dev->csa_complete)
1471 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1472 IEEE80211_IFACE_ITER_RESUME_ALL,
1473 __mt76_csa_finish, dev);
1475 dev->csa_complete = 0;
1477 EXPORT_SYMBOL_GPL(mt76_csa_finish);
1480 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
1482 struct mt76_dev *dev = priv;
1484 if (!vif->csa_active)
1487 dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif);
1490 void mt76_csa_check(struct mt76_dev *dev)
1492 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1493 IEEE80211_IFACE_ITER_RESUME_ALL,
1494 __mt76_csa_check, dev);
1496 EXPORT_SYMBOL_GPL(mt76_csa_check);
1499 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
1503 EXPORT_SYMBOL_GPL(mt76_set_tim);
1505 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
1507 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1508 int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
1509 u8 *hdr, *pn = status->iv;
1512 memmove(skb->data, skb->data + 8, hdr_len);
1513 hdr = skb->data + hdr_len;
1518 hdr[3] = 0x20 | (key_id << 6);
1524 status->flag &= ~RX_FLAG_IV_STRIPPED;
1526 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1528 int mt76_get_rate(struct mt76_dev *dev,
1529 struct ieee80211_supported_band *sband,
1532 int i, offset = 0, len = sband->n_bitrates;
1535 if (sband != &dev->phy.sband_2g.sband)
1538 idx &= ~BIT(2); /* short preamble */
1539 } else if (sband == &dev->phy.sband_2g.sband) {
1543 for (i = offset; i < len; i++) {
1544 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1550 EXPORT_SYMBOL_GPL(mt76_get_rate);
1552 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1555 struct mt76_phy *phy = hw->priv;
1557 set_bit(MT76_SCANNING, &phy->state);
1559 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1561 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1563 struct mt76_phy *phy = hw->priv;
1565 clear_bit(MT76_SCANNING, &phy->state);
1567 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1569 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1571 struct mt76_phy *phy = hw->priv;
1572 struct mt76_dev *dev = phy->dev;
1574 mutex_lock(&dev->mutex);
1575 *tx_ant = phy->antenna_mask;
1576 *rx_ant = phy->antenna_mask;
1577 mutex_unlock(&dev->mutex);
1581 EXPORT_SYMBOL_GPL(mt76_get_antenna);
1584 mt76_init_queue(struct mt76_dev *dev, int qid, int idx, int n_desc,
1585 int ring_base, u32 flags)
1587 struct mt76_queue *hwq;
1590 hwq = devm_kzalloc(dev->dev, sizeof(*hwq), GFP_KERNEL);
1592 return ERR_PTR(-ENOMEM);
1596 err = dev->queue_ops->alloc(dev, hwq, idx, n_desc, 0, ring_base);
1598 return ERR_PTR(err);
1602 EXPORT_SYMBOL_GPL(mt76_init_queue);
1604 u16 mt76_calculate_default_rate(struct mt76_phy *phy, int rateidx)
1608 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
1611 /* pick the lowest rate for hidden nodes */
1616 if (rateidx >= ARRAY_SIZE(mt76_rates))
1619 return mt76_rates[rateidx].hw_value;
1621 EXPORT_SYMBOL_GPL(mt76_calculate_default_rate);
1623 void mt76_ethtool_worker(struct mt76_ethtool_worker_info *wi,
1624 struct mt76_sta_stats *stats)
1626 int i, ei = wi->initial_stat_idx;
1627 u64 *data = wi->data;
1631 data[ei++] += stats->tx_mode[MT_PHY_TYPE_CCK];
1632 data[ei++] += stats->tx_mode[MT_PHY_TYPE_OFDM];
1633 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HT];
1634 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HT_GF];
1635 data[ei++] += stats->tx_mode[MT_PHY_TYPE_VHT];
1636 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_SU];
1637 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_EXT_SU];
1638 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_TB];
1639 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_MU];
1641 for (i = 0; i < ARRAY_SIZE(stats->tx_bw); i++)
1642 data[ei++] += stats->tx_bw[i];
1644 for (i = 0; i < 12; i++)
1645 data[ei++] += stats->tx_mcs[i];
1647 wi->worker_stat_count = ei - wi->initial_stat_idx;
1649 EXPORT_SYMBOL_GPL(mt76_ethtool_worker);
1651 enum mt76_dfs_state mt76_phy_dfs_state(struct mt76_phy *phy)
1653 struct ieee80211_hw *hw = phy->hw;
1654 struct mt76_dev *dev = phy->dev;
1656 if (dev->region == NL80211_DFS_UNSET ||
1657 test_bit(MT76_SCANNING, &phy->state))
1658 return MT_DFS_STATE_DISABLED;
1660 if (!hw->conf.radar_enabled) {
1661 if ((hw->conf.flags & IEEE80211_CONF_MONITOR) &&
1662 (phy->chandef.chan->flags & IEEE80211_CHAN_RADAR))
1663 return MT_DFS_STATE_ACTIVE;
1665 return MT_DFS_STATE_DISABLED;
1668 if (!cfg80211_reg_can_beacon(hw->wiphy, &phy->chandef, NL80211_IFTYPE_AP))
1669 return MT_DFS_STATE_CAC;
1671 return MT_DFS_STATE_ACTIVE;
1673 EXPORT_SYMBOL_GPL(mt76_phy_dfs_state);