2 * (c) Copyright 2002-2010, Ralink Technology, Inc.
3 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
4 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
5 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2
9 * as published by the Free Software Foundation
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
23 #include "initvals_phy.h"
25 #include <linux/etherdevice.h>
28 mt76x0_rf_csr_wr(struct mt76x0_dev *dev, u32 offset, u8 value)
33 if (test_bit(MT76_REMOVED, &dev->mt76.state))
36 bank = MT_RF_BANK(offset);
37 reg = MT_RF_REG(offset);
39 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
42 mutex_lock(&dev->reg_atomic_mutex);
44 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
49 mt76_wr(dev, MT_RF_CSR_CFG,
50 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
51 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
52 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
55 trace_mt76x0_rf_write(&dev->mt76, bank, offset, value);
57 mutex_unlock(&dev->reg_atomic_mutex);
60 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
67 mt76x0_rf_csr_rr(struct mt76x0_dev *dev, u32 offset)
73 if (test_bit(MT76_REMOVED, &dev->mt76.state))
76 bank = MT_RF_BANK(offset);
77 reg = MT_RF_REG(offset);
79 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
82 mutex_lock(&dev->reg_atomic_mutex);
84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
87 mt76_wr(dev, MT_RF_CSR_CFG,
88 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
89 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
92 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
95 val = mt76_rr(dev, MT_RF_CSR_CFG);
96 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
97 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank) {
98 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
99 trace_mt76x0_rf_read(&dev->mt76, bank, offset, ret);
102 mutex_unlock(&dev->reg_atomic_mutex);
105 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
112 rf_wr(struct mt76x0_dev *dev, u32 offset, u8 val)
114 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
115 struct mt76_reg_pair pair = {
120 return mt76x0_write_reg_pairs(dev, MT_MCU_MEMMAP_RF, &pair, 1);
123 return mt76x0_rf_csr_wr(dev, offset, val);
128 rf_rr(struct mt76x0_dev *dev, u32 offset)
133 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
134 struct mt76_reg_pair pair = {
138 ret = mt76x0_read_reg_pairs(dev, MT_MCU_MEMMAP_RF, &pair, 1);
142 ret = val = mt76x0_rf_csr_rr(dev, offset);
145 return (ret < 0) ? ret : val;
149 rf_rmw(struct mt76x0_dev *dev, u32 offset, u8 mask, u8 val)
153 ret = rf_rr(dev, offset);
157 ret = rf_wr(dev, offset, val);
165 rf_set(struct mt76x0_dev *dev, u32 offset, u8 val)
167 return rf_rmw(dev, offset, 0, val);
172 rf_clear(struct mt76x0_dev *dev, u32 offset, u8 mask)
174 return rf_rmw(dev, offset, mask, 0);
178 #define RF_RANDOM_WRITE(dev, tab) \
179 mt76x0_write_reg_pairs(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));
181 int mt76x0_wait_bbp_ready(struct mt76x0_dev *dev)
187 val = mt76_rr(dev, MT_BBP(CORE, 0));
188 printk("BBP version %08x\n", val);
194 dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
202 mt76x0_bbp_set_ctrlch(struct mt76x0_dev *dev, enum nl80211_chan_width width,
205 int core_val, agc_val;
208 case NL80211_CHAN_WIDTH_80:
212 case NL80211_CHAN_WIDTH_40:
222 mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
223 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
224 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
225 mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
228 int mt76x0_phy_get_rssi(struct mt76x0_dev *dev, struct mt76x0_rxwi *rxwi)
230 s8 lna_gain, rssi_offset;
233 if (dev->mt76.chandef.chan->band == NL80211_BAND_2GHZ) {
234 lna_gain = dev->ee->lna_gain_2ghz;
235 rssi_offset = dev->ee->rssi_offset_2ghz[0];
237 lna_gain = dev->ee->lna_gain_5ghz[0];
238 rssi_offset = dev->ee->rssi_offset_5ghz[0];
241 val = rxwi->rssi[0] + rssi_offset - lna_gain;
246 static void mt76x0_vco_cal(struct mt76x0_dev *dev, u8 channel)
250 val = rf_rr(dev, MT_RF(0, 4));
251 if ((val & 0x70) != 0x30)
255 * Calibration Mode - Open loop, closed loop, and amplitude:
257 * B0.R06.[3:1] bp_close_code: 100
258 * B0.R05.[7:0] bp_open_code: 0x0
259 * B0.R04.[2:0] cal_bits: 000
260 * B0.R03.[2:0] startup_time: 011
261 * B0.R03.[6:4] settle_time:
266 val = rf_rr(dev, MT_RF(0, 6));
269 rf_wr(dev, MT_RF(0, 6), val);
271 val = rf_rr(dev, MT_RF(0, 5));
273 rf_wr(dev, MT_RF(0, 5), 0x0);
275 val = rf_rr(dev, MT_RF(0, 4));
277 rf_wr(dev, MT_RF(0, 4), val);
279 val = rf_rr(dev, MT_RF(0, 3));
281 if (channel == 1 || channel == 7 || channel == 9 || channel >= 13) {
283 } else if (channel == 3 || channel == 4 || channel == 10) {
285 } else if (channel == 2 || channel == 5 || channel == 6 ||
286 channel == 8 || channel == 11 || channel == 12) {
289 WARN(1, "Unknown channel %u\n", channel);
292 rf_wr(dev, MT_RF(0, 3), val);
294 /* TODO replace by mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7)); */
295 val = rf_rr(dev, MT_RF(0, 4));
296 val = ((val & ~(0x80)) | 0x80);
297 rf_wr(dev, MT_RF(0, 4), val);
303 mt76x0_mac_set_ctrlch(struct mt76x0_dev *dev, bool primary_upper)
305 mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
310 mt76x0_phy_set_band(struct mt76x0_dev *dev, enum nl80211_band band)
313 case NL80211_BAND_2GHZ:
314 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
316 rf_wr(dev, MT_RF(5, 0), 0x45);
317 rf_wr(dev, MT_RF(6, 0), 0x44);
319 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
320 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
322 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
323 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
325 case NL80211_BAND_5GHZ:
326 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
328 rf_wr(dev, MT_RF(5, 0), 0x44);
329 rf_wr(dev, MT_RF(6, 0), 0x45);
331 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
332 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
334 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
335 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
342 #define EXT_PA_2G_5G 0x0
343 #define EXT_PA_5G_ONLY 0x1
344 #define EXT_PA_2G_ONLY 0x2
345 #define INT_PA_2G_5G 0x3
348 mt76x0_phy_set_chan_rf_params(struct mt76x0_dev *dev, u8 channel, u16 rf_bw_band)
350 u16 rf_band = rf_bw_band & 0xff00;
351 u16 rf_bw = rf_bw_band & 0x00ff;
356 const struct mt76x0_freq_item *freq_item;
358 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
359 if (channel == mt76x0_sdm_channel[i]) {
365 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
366 if (channel == mt76x0_frequency_plan[i].channel) {
367 rf_band = mt76x0_frequency_plan[i].band;
370 freq_item = &(mt76x0_sdm_frequency_plan[i]);
372 freq_item = &(mt76x0_frequency_plan[i]);
374 rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
375 rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
376 rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
377 rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
378 rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
380 rf_val = rf_rr(dev, MT_RF(0, 32));
382 rf_val |= freq_item->pllR32_b7b5;
383 rf_wr(dev, MT_RF(0, 32), rf_val);
385 /* R32<4:0> pll_den: (Denomina - 8) */
386 rf_val = rf_rr(dev, MT_RF(0, 32));
388 rf_val |= freq_item->pllR32_b4b0;
389 rf_wr(dev, MT_RF(0, 32), rf_val);
392 rf_val = rf_rr(dev, MT_RF(0, 31));
394 rf_val |= freq_item->pllR31_b7b5;
395 rf_wr(dev, MT_RF(0, 31), rf_val);
397 /* R31<4:0> pll_k(Nominator) */
398 rf_val = rf_rr(dev, MT_RF(0, 31));
400 rf_val |= freq_item->pllR31_b4b0;
401 rf_wr(dev, MT_RF(0, 31), rf_val);
403 /* R30<7> sdm_reset_n */
404 rf_val = rf_rr(dev, MT_RF(0, 30));
407 rf_wr(dev, MT_RF(0, 30), rf_val);
409 rf_wr(dev, MT_RF(0, 30), rf_val);
411 rf_val |= freq_item->pllR30_b7;
412 rf_wr(dev, MT_RF(0, 30), rf_val);
415 /* R30<6:2> sdmmash_prbs,sin */
416 rf_val = rf_rr(dev, MT_RF(0, 30));
418 rf_val |= freq_item->pllR30_b6b2;
419 rf_wr(dev, MT_RF(0, 30), rf_val);
422 rf_val = rf_rr(dev, MT_RF(0, 30));
424 rf_val |= (freq_item->pllR30_b1 << 1);
425 rf_wr(dev, MT_RF(0, 30), rf_val);
427 /* R30<0> R29<7:0> (hex) pll_n */
428 rf_val = freq_item->pll_n & 0x00FF;
429 rf_wr(dev, MT_RF(0, 29), rf_val);
431 rf_val = rf_rr(dev, MT_RF(0, 30));
433 rf_val |= ((freq_item->pll_n >> 8) & 0x0001);
434 rf_wr(dev, MT_RF(0, 30), rf_val);
436 /* R28<7:6> isi_iso */
437 rf_val = rf_rr(dev, MT_RF(0, 28));
439 rf_val |= freq_item->pllR28_b7b6;
440 rf_wr(dev, MT_RF(0, 28), rf_val);
442 /* R28<5:4> pfd_dly */
443 rf_val = rf_rr(dev, MT_RF(0, 28));
445 rf_val |= freq_item->pllR28_b5b4;
446 rf_wr(dev, MT_RF(0, 28), rf_val);
448 /* R28<3:2> clksel option */
449 rf_val = rf_rr(dev, MT_RF(0, 28));
451 rf_val |= freq_item->pllR28_b3b2;
452 rf_wr(dev, MT_RF(0, 28), rf_val);
454 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
455 rf_val = freq_item->pll_sdm_k & 0x000000FF;
456 rf_wr(dev, MT_RF(0, 26), rf_val);
458 rf_val = ((freq_item->pll_sdm_k >> 8) & 0x000000FF);
459 rf_wr(dev, MT_RF(0, 27), rf_val);
461 rf_val = rf_rr(dev, MT_RF(0, 28));
463 rf_val |= ((freq_item->pll_sdm_k >> 16) & 0x0003);
464 rf_wr(dev, MT_RF(0, 28), rf_val);
466 /* R24<1:0> xo_div */
467 rf_val = rf_rr(dev, MT_RF(0, 24));
469 rf_val |= freq_item->pllR24_b1b0;
470 rf_wr(dev, MT_RF(0, 24), rf_val);
476 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
477 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
478 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
479 mt76x0_rf_bw_switch_tab[i].value);
480 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
481 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
482 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
483 mt76x0_rf_bw_switch_tab[i].value);
487 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
488 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
489 rf_wr(dev, mt76x0_rf_band_switch_tab[i].rf_bank_reg,
490 mt76x0_rf_band_switch_tab[i].value);
494 mac_reg = mt76_rr(dev, MT_RF_MISC);
495 mac_reg &= ~0xC; /* Clear 0x518[3:2] */
496 mt76_wr(dev, MT_RF_MISC, mac_reg);
498 if (dev->ee->pa_type == INT_PA_2G_5G ||
499 (dev->ee->pa_type == EXT_PA_5G_ONLY && (rf_band & RF_G_BAND)) ||
500 (dev->ee->pa_type == EXT_PA_2G_ONLY && (rf_band & RF_A_BAND))) {
501 ; /* Internal PA - nothing to do. */
504 MT_RF_MISC (offset: 0x0518)
505 [2]1'b1: enable external A band PA, 1'b0: disable external A band PA
506 [3]1'b1: enable external G band PA, 1'b0: disable external G band PA
508 if (rf_band & RF_A_BAND) {
509 mac_reg = mt76_rr(dev, MT_RF_MISC);
511 mt76_wr(dev, MT_RF_MISC, mac_reg);
513 mac_reg = mt76_rr(dev, MT_RF_MISC);
515 mt76_wr(dev, MT_RF_MISC, mac_reg);
519 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
520 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
521 rf_wr(dev, mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
522 mt76x0_rf_ext_pa_tab[i].value);
525 if (rf_band & RF_G_BAND) {
526 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
527 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
528 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
529 mac_reg &= 0x896400FF;
530 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
532 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
533 /* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */
534 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
535 mac_reg &= 0x890400FF;
536 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
541 mt76x0_phy_set_chan_bbp_params(struct mt76x0_dev *dev, u8 channel, u16 rf_bw_band)
545 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
546 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
547 const struct mt76_reg_pair *pair = &item->reg_pair;
549 if ((rf_bw_band & item->bw_band) != rf_bw_band)
552 if (pair->reg == MT_BBP(AGC, 8)) {
553 u32 val = pair->value;
554 u8 gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
558 gain -= dev->ee->lna_gain_5ghz[0]*2;
559 else if (channel < 137)
560 gain -= dev->ee->lna_gain_5ghz[1]*2;
562 gain -= dev->ee->lna_gain_5ghz[2]*2;
565 gain -= dev->ee->lna_gain_2ghz*2;
568 val &= ~MT_BBP_AGC_GAIN;
569 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
570 mt76_wr(dev, pair->reg, val);
572 mt76_wr(dev, pair->reg, pair->value);
579 mt76x0_extra_power_over_mac(struct mt76x0_dev *dev)
583 val = ((mt76_rr(dev, MT_TX_PWR_CFG_1) & 0x00003f00) >> 8);
584 val |= ((mt76_rr(dev, MT_TX_PWR_CFG_2) & 0x00003f00) << 8);
585 mt76_wr(dev, MT_TX_PWR_CFG_7, val);
588 val = ((mt76_rr(dev, MT_TX_PWR_CFG_3) & 0x0000ff00) >> 8);
589 mt76_wr(dev, MT_TX_PWR_CFG_8, val);
591 val = ((mt76_rr(dev, MT_TX_PWR_CFG_4) & 0x0000ff00) >> 8);
592 mt76_wr(dev, MT_TX_PWR_CFG_9, val);
596 mt76x0_phy_set_tx_power(struct mt76x0_dev *dev, u8 channel, u8 rf_bw_band)
600 int bw = (rf_bw_band & RF_BW_20) ? 0 : 1;
602 for (i = 0; i < 4; i++) {
604 val = dev->ee->tx_pwr_cfg_2g[i][bw];
606 val = dev->ee->tx_pwr_cfg_5g[i][bw];
608 mt76_wr(dev, MT_TX_PWR_CFG_0 + 4*i, val);
611 mt76x0_extra_power_over_mac(dev);
616 mt76x0_bbp_set_bw(struct mt76x0_dev *dev, enum nl80211_chan_width width)
618 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
623 case NL80211_CHAN_WIDTH_20_NOHT:
624 case NL80211_CHAN_WIDTH_20:
627 case NL80211_CHAN_WIDTH_40:
630 case NL80211_CHAN_WIDTH_80:
633 case NL80211_CHAN_WIDTH_10:
636 case NL80211_CHAN_WIDTH_80P80:
637 case NL80211_CHAN_WIDTH_160:
638 case NL80211_CHAN_WIDTH_5:
643 mt76x0_mcu_function_select(dev, BW_SETTING, bw);
647 mt76x0_phy_set_chan_pwr(struct mt76x0_dev *dev, u8 channel)
649 static const int mt76x0_tx_pwr_ch_list[] = {
650 1,2,3,4,5,6,7,8,9,10,11,12,13,14,
651 36,38,40,44,46,48,52,54,56,60,62,64,
652 100,102,104,108,110,112,116,118,120,124,126,128,132,134,136,140,
653 149,151,153,157,159,161,165,167,169,171,173,
659 for (i = 0; i < ARRAY_SIZE(mt76x0_tx_pwr_ch_list); i++)
660 if (mt76x0_tx_pwr_ch_list[i] == channel)
663 if (WARN_ON(i == ARRAY_SIZE(mt76x0_tx_pwr_ch_list)))
666 val = mt76_rr(dev, MT_TX_ALC_CFG_0);
668 val |= dev->ee->tx_pwr_per_chan[i];
670 mt76_wr(dev, MT_TX_ALC_CFG_0, val);
674 __mt76x0_phy_set_channel(struct mt76x0_dev *dev,
675 struct cfg80211_chan_def *chandef)
677 u32 ext_cca_chan[4] = {
678 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
679 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
680 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
681 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
682 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
683 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
684 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
685 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
686 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
687 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
688 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
689 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
690 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
691 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
692 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
693 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
694 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
695 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
696 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
697 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
699 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
700 int ch_group_index, freq, freq1;
705 freq = chandef->chan->center_freq;
706 freq1 = chandef->center_freq1;
707 channel = chandef->chan->hw_value;
708 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
710 switch (chandef->width) {
711 case NL80211_CHAN_WIDTH_40:
716 channel += 2 - ch_group_index * 4;
717 rf_bw_band |= RF_BW_40;
719 case NL80211_CHAN_WIDTH_80:
720 ch_group_index = (freq - freq1 + 30) / 20;
721 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
723 channel += 6 - ch_group_index * 4;
724 rf_bw_band |= RF_BW_80;
728 rf_bw_band |= RF_BW_20;
732 mt76x0_bbp_set_bw(dev, chandef->width);
733 mt76x0_bbp_set_ctrlch(dev, chandef->width, ch_group_index);
734 mt76x0_mac_set_ctrlch(dev, ch_group_index & 1);
736 mt76_rmw(dev, MT_EXT_CCA_CFG,
737 (MT_EXT_CCA_CFG_CCA0 |
738 MT_EXT_CCA_CFG_CCA1 |
739 MT_EXT_CCA_CFG_CCA2 |
740 MT_EXT_CCA_CFG_CCA3 |
741 MT_EXT_CCA_CFG_CCA_MASK),
742 ext_cca_chan[ch_group_index]);
744 mt76x0_phy_set_band(dev, chandef->chan->band);
745 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
747 /* set Japan Tx filter at channel 14 */
748 val = mt76_rr(dev, MT_BBP(CORE, 1));
753 mt76_wr(dev, MT_BBP(CORE, 1), val);
755 mt76x0_phy_set_chan_bbp_params(dev, channel, rf_bw_band);
757 /* Vendor driver don't do it */
758 /* mt76x0_phy_set_tx_power(dev, channel, rf_bw_band); */
760 mt76x0_vco_cal(dev, channel);
762 mt76x0_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
764 mt76x0_phy_set_chan_pwr(dev, channel);
766 dev->mt76.chandef = *chandef;
770 int mt76x0_phy_set_channel(struct mt76x0_dev *dev,
771 struct cfg80211_chan_def *chandef)
775 mutex_lock(&dev->hw_atomic_mutex);
776 ret = __mt76x0_phy_set_channel(dev, chandef);
777 mutex_unlock(&dev->hw_atomic_mutex);
782 void mt76x0_phy_recalibrate_after_assoc(struct mt76x0_dev *dev)
785 u8 channel = dev->mt76.chandef.chan->hw_value;
786 int is_5ghz = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
788 mt76x0_mcu_calibrate(dev, MCU_CAL_R, 0);
790 mt76x0_vco_cal(dev, channel);
792 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
793 mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
794 usleep_range(500, 700);
796 reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
797 mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
799 mt76x0_mcu_calibrate(dev, MCU_CAL_RXDCOC, 0);
801 mt76x0_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
802 mt76x0_mcu_calibrate(dev, MCU_CAL_LOFT, is_5ghz);
803 mt76x0_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz);
804 mt76x0_mcu_calibrate(dev, MCU_CAL_TX_GROUP_DELAY, is_5ghz);
805 mt76x0_mcu_calibrate(dev, MCU_CAL_RXIQ, is_5ghz);
806 mt76x0_mcu_calibrate(dev, MCU_CAL_RX_GROUP_DELAY, is_5ghz);
808 mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
809 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
812 mt76x0_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
815 void mt76x0_agc_save(struct mt76x0_dev *dev)
817 /* Only one RX path */
818 dev->agc_save = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, 8)));
821 void mt76x0_agc_restore(struct mt76x0_dev *dev)
823 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, dev->agc_save);
826 static void mt76x0_temp_sensor(struct mt76x0_dev *dev)
828 u8 rf_b7_73, rf_b0_66, rf_b0_67;
833 rf_b7_73 = rf_rr(dev, MT_RF(7, 73));
834 rf_b0_66 = rf_rr(dev, MT_RF(0, 66));
835 rf_b0_67 = rf_rr(dev, MT_RF(0, 73));
837 rf_wr(dev, MT_RF(7, 73), 0x02);
838 rf_wr(dev, MT_RF(0, 66), 0x23);
839 rf_wr(dev, MT_RF(0, 73), 0x01);
841 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
843 for (cycle = 0; cycle < 2000; cycle++) {
844 val = mt76_rr(dev, MT_BBP(CORE, 34));
852 mt76_wr(dev, MT_BBP(CORE, 34), val);
856 sval = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
858 sval &= 0x7f; /* Positive */
860 sval |= 0xffffff00; /* Negative */
862 temp = (35 * (sval - dev->ee->temp_off))/ 10 + 25;
865 rf_wr(dev, MT_RF(7, 73), rf_b7_73);
866 rf_wr(dev, MT_RF(0, 66), rf_b0_66);
867 rf_wr(dev, MT_RF(0, 73), rf_b0_67);
870 static void mt76x0_dynamic_vga_tuning(struct mt76x0_dev *dev)
874 init_vga = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 0x54 : 0x4E;
875 if (dev->avg_rssi > -60)
877 else if (dev->avg_rssi > -70)
880 val = mt76_rr(dev, MT_BBP(AGC, 8));
882 val |= init_vga << 8;
883 mt76_wr(dev, MT_BBP(AGC,8), val);
886 static void mt76x0_phy_calibrate(struct work_struct *work)
888 struct mt76x0_dev *dev = container_of(work, struct mt76x0_dev,
891 mt76x0_dynamic_vga_tuning(dev);
892 mt76x0_temp_sensor(dev);
894 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
895 MT_CALIBRATE_INTERVAL);
898 void mt76x0_phy_con_cal_onoff(struct mt76x0_dev *dev,
899 struct ieee80211_bss_conf *info)
901 /* Start/stop collecting beacon data */
902 spin_lock_bh(&dev->con_mon_lock);
903 ether_addr_copy(dev->ap_bssid, info->bssid);
905 dev->bcn_freq_off = MT_FREQ_OFFSET_INVALID;
906 spin_unlock_bh(&dev->con_mon_lock);
910 mt76x0_set_rx_chains(struct mt76x0_dev *dev)
914 val = mt76_rr(dev, MT_BBP(AGC, 0));
915 val &= ~(BIT(3) | BIT(4));
917 if (dev->chainmask & BIT(1))
920 mt76_wr(dev, MT_BBP(AGC, 0), val);
923 val = mt76_rr(dev, MT_BBP(AGC, 0));
927 mt76x0_set_tx_dac(struct mt76x0_dev *dev)
929 if (dev->chainmask & BIT(1))
930 mt76_set(dev, MT_BBP(TXBE, 5), 3);
932 mt76_clear(dev, MT_BBP(TXBE, 5), 3);
936 mt76x0_rf_init(struct mt76x0_dev *dev)
941 RF_RANDOM_WRITE(dev, mt76x0_rf_central_tab);
942 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
943 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
944 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
946 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
947 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
949 if (item->bw_band == RF_BW_20)
950 rf_wr(dev, item->rf_bank_reg, item->value);
951 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20))
952 rf_wr(dev, item->rf_bank_reg, item->value);
955 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
956 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
958 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
959 mt76x0_rf_band_switch_tab[i].value);
964 Frequency calibration
965 E1: B0.R22<6:0>: xo_cxo<6:0>
966 E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
968 rf_wr(dev, MT_RF(0, 22), min_t(u8, dev->ee->rf_freq_off, 0xBF));
969 val = rf_rr(dev, MT_RF(0, 22));
972 Reset the DAC (Set B0.R73<7>=1, then set B0.R73<7>=0, and then set B0.R73<7>) during power up.
974 val = rf_rr(dev, MT_RF(0, 73));
976 rf_wr(dev, MT_RF(0, 73), val);
978 rf_wr(dev, MT_RF(0, 73), val);
980 rf_wr(dev, MT_RF(0, 73), val);
983 vcocal_en (initiate VCO calibration (reset after completion)) - It should be at the end of RF configuration.
985 rf_set(dev, MT_RF(0, 4), 0x80);
988 static void mt76x0_ant_select(struct mt76x0_dev *dev)
990 /* Single antenna mode. */
991 mt76_rmw(dev, MT_WLAN_FUN_CTRL, BIT(5), BIT(6));
992 mt76_clear(dev, MT_CMB_CTRL, BIT(14) | BIT(12));
993 mt76_clear(dev, MT_COEXCFG0, BIT(2));
994 mt76_rmw(dev, MT_COEXCFG3, BIT(5) | BIT(4) | BIT(3) | BIT(2), BIT(1));
997 void mt76x0_phy_init(struct mt76x0_dev *dev)
999 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibrate);
1001 mt76x0_ant_select(dev);
1003 mt76x0_rf_init(dev);
1005 mt76x0_set_rx_chains(dev);
1006 mt76x0_set_tx_dac(dev);