1 // SPDX-License-Identifier: ISC
3 * Copyright (C) 2016 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com>
8 #define RADAR_SPEC(m, len, el, eh, wl, wh, \
9 w_tolerance, tl, th, t_tolerance, \
10 bl, bh, event_exp, power_jmp) \
18 .w_margin = w_tolerance, \
21 .t_margin = t_tolerance, \
24 .event_expiration = event_exp, \
25 .pwr_jmp = power_jmp \
28 static const struct mt76x02_radar_specs etsi_radar_specs[] = {
30 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
31 0x7fffffff, 0x155cc0, 0x19cc),
32 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
33 0x7fffffff, 0x155cc0, 0x19cc),
34 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
35 0x7fffffff, 0x155cc0, 0x19dd),
36 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
37 0x7fffffff, 0x2191c0, 0x15cc),
39 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
40 0x7fffffff, 0x155cc0, 0x19cc),
41 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
42 0x7fffffff, 0x155cc0, 0x19cc),
43 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
44 0x7fffffff, 0x155cc0, 0x19dd),
45 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
46 0x7fffffff, 0x2191c0, 0x15cc),
48 RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0,
49 0x7fffffff, 0x155cc0, 0x19cc),
50 RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0,
51 0x7fffffff, 0x155cc0, 0x19cc),
52 RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0,
53 0x7fffffff, 0x155cc0, 0x19dd),
54 RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0,
55 0x7fffffff, 0x2191c0, 0x15cc)
58 static const struct mt76x02_radar_specs fcc_radar_specs[] = {
60 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
61 0x7fffffff, 0xfe808, 0x13dc),
62 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
63 0x7fffffff, 0xfe808, 0x19dd),
64 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
65 0x7fffffff, 0xfe808, 0x12cc),
66 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
67 0x3938700, 0x57bcf00, 0x1289),
69 RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0,
70 0x7fffffff, 0xfe808, 0x13dc),
71 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
72 0x7fffffff, 0xfe808, 0x19dd),
73 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
74 0x7fffffff, 0xfe808, 0x12cc),
75 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
76 0x3938700, 0x57bcf00, 0x1289),
78 RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0,
79 0x7fffffff, 0xfe808, 0x16cc),
80 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
81 0x7fffffff, 0xfe808, 0x19dd),
82 RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0,
83 0x7fffffff, 0xfe808, 0x12cc),
84 RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0,
85 0x3938700, 0x57bcf00, 0x1289)
88 static const struct mt76x02_radar_specs jp_w56_radar_specs[] = {
90 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
91 0x7fffffff, 0x14c080, 0x13dc),
92 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
93 0x7fffffff, 0x14c080, 0x19dd),
94 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
95 0x7fffffff, 0x14c080, 0x12cc),
96 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
97 0x3938700, 0X57bcf00, 0x1289),
99 RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0,
100 0x7fffffff, 0x14c080, 0x13dc),
101 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
102 0x7fffffff, 0x14c080, 0x19dd),
103 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
104 0x7fffffff, 0x14c080, 0x12cc),
105 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
106 0x3938700, 0X57bcf00, 0x1289),
108 RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0,
109 0x7fffffff, 0x14c080, 0x16cc),
110 RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0,
111 0x7fffffff, 0x14c080, 0x19dd),
112 RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0,
113 0x7fffffff, 0x14c080, 0x12cc),
114 RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0,
115 0x3938700, 0X57bcf00, 0x1289)
118 static const struct mt76x02_radar_specs jp_w53_radar_specs[] = {
120 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
121 0x7fffffff, 0x14c080, 0x16cc),
123 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
124 0x7fffffff, 0x14c080, 0x16cc),
127 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
128 0x7fffffff, 0x14c080, 0x16cc),
130 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
131 0x7fffffff, 0x14c080, 0x16cc),
134 RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0,
135 0x7fffffff, 0x14c080, 0x16cc),
137 RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0,
138 0x7fffffff, 0x14c080, 0x16cc),
143 mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev *dev, u8 enable)
147 data = (1 << 1) | enable;
148 mt76_wr(dev, MT_BBP(DFS, 36), data);
151 static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev *dev,
152 struct mt76x02_dfs_sequence *seq)
154 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
156 list_add(&seq->head, &dfs_pd->seq_pool);
158 dfs_pd->seq_stats.seq_pool_len++;
159 dfs_pd->seq_stats.seq_len--;
162 static struct mt76x02_dfs_sequence *
163 mt76x02_dfs_seq_pool_get(struct mt76x02_dev *dev)
165 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
166 struct mt76x02_dfs_sequence *seq;
168 if (list_empty(&dfs_pd->seq_pool)) {
169 seq = devm_kzalloc(dev->mt76.dev, sizeof(*seq), GFP_ATOMIC);
171 seq = list_first_entry(&dfs_pd->seq_pool,
172 struct mt76x02_dfs_sequence,
174 list_del(&seq->head);
175 dfs_pd->seq_stats.seq_pool_len--;
178 dfs_pd->seq_stats.seq_len++;
183 static int mt76x02_dfs_get_multiple(int val, int frac, int margin)
185 int remainder, factor;
190 if (abs(val - frac) <= margin)
194 remainder = val % frac;
196 if (remainder > margin) {
197 if ((frac - remainder) <= margin)
205 static void mt76x02_dfs_detector_reset(struct mt76x02_dev *dev)
207 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
208 struct mt76x02_dfs_sequence *seq, *tmp_seq;
211 /* reset hw detector */
212 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
214 /* reset sw detector */
215 for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) {
216 dfs_pd->event_rb[i].h_rb = 0;
217 dfs_pd->event_rb[i].t_rb = 0;
220 list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) {
221 list_del_init(&seq->head);
222 mt76x02_dfs_seq_pool_put(dev, seq);
226 static bool mt76x02_dfs_check_chirp(struct mt76x02_dev *dev)
229 u32 current_ts, delta_ts;
230 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
232 current_ts = mt76_rr(dev, MT_PBF_LIFE_TIMER);
233 delta_ts = current_ts - dfs_pd->chirp_pulse_ts;
234 dfs_pd->chirp_pulse_ts = current_ts;
237 if (delta_ts <= (12 * (1 << 20))) {
238 if (++dfs_pd->chirp_pulse_cnt > 8)
241 dfs_pd->chirp_pulse_cnt = 1;
247 static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev *dev,
248 struct mt76x02_dfs_hw_pulse *pulse)
253 data = (MT_DFS_CH_EN << 16) | pulse->engine;
254 mt76_wr(dev, MT_BBP(DFS, 0), data);
256 /* reported period */
257 pulse->period = mt76_rr(dev, MT_BBP(DFS, 19));
260 pulse->w1 = mt76_rr(dev, MT_BBP(DFS, 20));
261 pulse->w2 = mt76_rr(dev, MT_BBP(DFS, 23));
263 /* reported burst number */
264 pulse->burst = mt76_rr(dev, MT_BBP(DFS, 22));
267 static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev *dev,
268 struct mt76x02_dfs_hw_pulse *pulse)
272 if (!pulse->period || !pulse->w1)
275 switch (dev->mt76.region) {
276 case NL80211_DFS_FCC:
277 if (pulse->engine > 3)
280 if (pulse->engine == 3) {
281 ret = mt76x02_dfs_check_chirp(dev);
285 /* check short pulse*/
287 ret = (pulse->period >= 2900 &&
288 (pulse->period <= 4700 ||
289 pulse->period >= 6400) &&
290 (pulse->period <= 6800 ||
291 pulse->period >= 10200) &&
292 pulse->period <= 61600);
293 else if (pulse->w1 < 130) /* 120 - 130 */
294 ret = (pulse->period >= 2900 &&
295 pulse->period <= 61600);
297 ret = (pulse->period >= 3500 &&
298 pulse->period <= 10100);
300 case NL80211_DFS_ETSI:
301 if (pulse->engine >= 3)
304 ret = (pulse->period >= 4900 &&
305 (pulse->period <= 10200 ||
306 pulse->period >= 12400) &&
307 pulse->period <= 100100);
310 if (dev->mphy.chandef.chan->center_freq >= 5250 &&
311 dev->mphy.chandef.chan->center_freq <= 5350) {
313 if (pulse->w1 <= 130)
314 ret = (pulse->period >= 28360 &&
315 (pulse->period <= 28700 ||
316 pulse->period >= 76900) &&
317 pulse->period <= 76940);
321 if (pulse->engine > 3)
324 if (pulse->engine == 3) {
325 ret = mt76x02_dfs_check_chirp(dev);
329 /* check short pulse*/
331 ret = (pulse->period >= 2900 &&
332 (pulse->period <= 4700 ||
333 pulse->period >= 6400) &&
334 (pulse->period <= 6800 ||
335 pulse->period >= 27560) &&
336 (pulse->period <= 27960 ||
337 pulse->period >= 28360) &&
338 (pulse->period <= 28700 ||
339 pulse->period >= 79900) &&
340 pulse->period <= 80100);
341 else if (pulse->w1 < 130) /* 120 - 130 */
342 ret = (pulse->period >= 2900 &&
343 (pulse->period <= 10100 ||
344 pulse->period >= 27560) &&
345 (pulse->period <= 27960 ||
346 pulse->period >= 28360) &&
347 (pulse->period <= 28700 ||
348 pulse->period >= 79900) &&
349 pulse->period <= 80100);
351 ret = (pulse->period >= 3900 &&
352 pulse->period <= 10100);
354 case NL80211_DFS_UNSET:
362 static bool mt76x02_dfs_fetch_event(struct mt76x02_dev *dev,
363 struct mt76x02_dfs_event *event)
367 /* 1st: DFS_R37[31]: 0 (engine 0) - 1 (engine 2)
368 * 2nd: DFS_R37[21:0]: pulse time
369 * 3rd: DFS_R37[11:0]: pulse width
370 * 3rd: DFS_R37[25:16]: phase
371 * 4th: DFS_R37[12:0]: current pwr
372 * 4th: DFS_R37[21:16]: pwr stable counter
374 * 1st: DFS_R37[31:0] set to 0xffffffff means no event detected
376 data = mt76_rr(dev, MT_BBP(DFS, 37));
377 if (!MT_DFS_CHECK_EVENT(data))
380 event->engine = MT_DFS_EVENT_ENGINE(data);
381 data = mt76_rr(dev, MT_BBP(DFS, 37));
382 event->ts = MT_DFS_EVENT_TIMESTAMP(data);
383 data = mt76_rr(dev, MT_BBP(DFS, 37));
384 event->width = MT_DFS_EVENT_WIDTH(data);
389 static bool mt76x02_dfs_check_event(struct mt76x02_dev *dev,
390 struct mt76x02_dfs_event *event)
392 if (event->engine == 2) {
393 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
394 struct mt76x02_dfs_event_rb *event_buff = &dfs_pd->event_rb[1];
398 last_event_idx = mt76_decr(event_buff->t_rb,
399 MT_DFS_EVENT_BUFLEN);
400 delta_ts = event->ts - event_buff->data[last_event_idx].ts;
401 if (delta_ts < MT_DFS_EVENT_TIME_MARGIN &&
402 event_buff->data[last_event_idx].width >= 200)
408 static void mt76x02_dfs_queue_event(struct mt76x02_dev *dev,
409 struct mt76x02_dfs_event *event)
411 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
412 struct mt76x02_dfs_event_rb *event_buff;
414 /* add radar event to ring buffer */
415 event_buff = event->engine == 2 ? &dfs_pd->event_rb[1]
416 : &dfs_pd->event_rb[0];
417 event_buff->data[event_buff->t_rb] = *event;
418 event_buff->data[event_buff->t_rb].fetch_ts = jiffies;
420 event_buff->t_rb = mt76_incr(event_buff->t_rb, MT_DFS_EVENT_BUFLEN);
421 if (event_buff->t_rb == event_buff->h_rb)
422 event_buff->h_rb = mt76_incr(event_buff->h_rb,
423 MT_DFS_EVENT_BUFLEN);
426 static int mt76x02_dfs_create_sequence(struct mt76x02_dev *dev,
427 struct mt76x02_dfs_event *event,
430 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
431 struct mt76x02_dfs_sw_detector_params *sw_params;
432 u32 width_delta, with_sum;
433 struct mt76x02_dfs_sequence seq, *seq_p;
434 struct mt76x02_dfs_event_rb *event_rb;
435 struct mt76x02_dfs_event *cur_event;
436 int i, j, end, pri, factor, cur_pri;
438 event_rb = event->engine == 2 ? &dfs_pd->event_rb[1]
439 : &dfs_pd->event_rb[0];
441 i = mt76_decr(event_rb->t_rb, MT_DFS_EVENT_BUFLEN);
442 end = mt76_decr(event_rb->h_rb, MT_DFS_EVENT_BUFLEN);
445 cur_event = &event_rb->data[i];
446 with_sum = event->width + cur_event->width;
448 sw_params = &dfs_pd->sw_dpd_params;
449 switch (dev->mt76.region) {
450 case NL80211_DFS_FCC:
455 width_delta = with_sum >> 3;
457 case NL80211_DFS_ETSI:
458 if (event->engine == 2)
459 width_delta = with_sum >> 6;
460 else if (with_sum < 620)
465 case NL80211_DFS_UNSET:
470 pri = event->ts - cur_event->ts;
471 if (abs(event->width - cur_event->width) > width_delta ||
472 pri < sw_params->min_pri)
475 if (pri > sw_params->max_pri)
478 seq.pri = event->ts - cur_event->ts;
479 seq.first_ts = cur_event->ts;
480 seq.last_ts = event->ts;
481 seq.engine = event->engine;
484 j = mt76_decr(i, MT_DFS_EVENT_BUFLEN);
486 cur_event = &event_rb->data[j];
487 cur_pri = event->ts - cur_event->ts;
488 factor = mt76x02_dfs_get_multiple(cur_pri, seq.pri,
489 sw_params->pri_margin);
491 seq.first_ts = cur_event->ts;
495 j = mt76_decr(j, MT_DFS_EVENT_BUFLEN);
497 if (seq.count <= cur_len)
500 seq_p = mt76x02_dfs_seq_pool_get(dev);
505 INIT_LIST_HEAD(&seq_p->head);
506 list_add(&seq_p->head, &dfs_pd->sequences);
508 i = mt76_decr(i, MT_DFS_EVENT_BUFLEN);
513 static u16 mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev *dev,
514 struct mt76x02_dfs_event *event)
516 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
517 struct mt76x02_dfs_sw_detector_params *sw_params;
518 struct mt76x02_dfs_sequence *seq, *tmp_seq;
522 sw_params = &dfs_pd->sw_dpd_params;
523 list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) {
524 if (event->ts > seq->first_ts + MT_DFS_SEQUENCE_WINDOW) {
525 list_del_init(&seq->head);
526 mt76x02_dfs_seq_pool_put(dev, seq);
530 if (event->engine != seq->engine)
533 pri = event->ts - seq->last_ts;
534 factor = mt76x02_dfs_get_multiple(pri, seq->pri,
535 sw_params->pri_margin);
537 seq->last_ts = event->ts;
539 max_seq_len = max_t(u16, max_seq_len, seq->count);
545 static bool mt76x02_dfs_check_detection(struct mt76x02_dev *dev)
547 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
548 struct mt76x02_dfs_sequence *seq;
550 if (list_empty(&dfs_pd->sequences))
553 list_for_each_entry(seq, &dfs_pd->sequences, head) {
554 if (seq->count > MT_DFS_SEQUENCE_TH) {
555 dfs_pd->stats[seq->engine].sw_pattern++;
562 static void mt76x02_dfs_add_events(struct mt76x02_dev *dev)
564 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
565 struct mt76x02_dfs_event event;
568 /* disable debug mode */
569 mt76x02_dfs_set_capture_mode_ctrl(dev, false);
570 for (i = 0; i < MT_DFS_EVENT_LOOP; i++) {
571 if (!mt76x02_dfs_fetch_event(dev, &event))
574 if (dfs_pd->last_event_ts > event.ts)
575 mt76x02_dfs_detector_reset(dev);
576 dfs_pd->last_event_ts = event.ts;
578 if (!mt76x02_dfs_check_event(dev, &event))
581 seq_len = mt76x02_dfs_add_event_to_sequence(dev, &event);
582 mt76x02_dfs_create_sequence(dev, &event, seq_len);
584 mt76x02_dfs_queue_event(dev, &event);
586 mt76x02_dfs_set_capture_mode_ctrl(dev, true);
589 static void mt76x02_dfs_check_event_window(struct mt76x02_dev *dev)
591 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
592 struct mt76x02_dfs_event_rb *event_buff;
593 struct mt76x02_dfs_event *event;
596 for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) {
597 event_buff = &dfs_pd->event_rb[i];
599 while (event_buff->h_rb != event_buff->t_rb) {
600 event = &event_buff->data[event_buff->h_rb];
603 if (time_is_after_jiffies(event->fetch_ts +
604 MT_DFS_EVENT_WINDOW))
606 event_buff->h_rb = mt76_incr(event_buff->h_rb,
607 MT_DFS_EVENT_BUFLEN);
612 static void mt76x02_dfs_tasklet(struct tasklet_struct *t)
614 struct mt76x02_dfs_pattern_detector *dfs_pd = from_tasklet(dfs_pd, t,
616 struct mt76x02_dev *dev = container_of(dfs_pd, typeof(*dev), dfs_pd);
620 if (test_bit(MT76_SCANNING, &dev->mphy.state))
623 if (time_is_before_jiffies(dfs_pd->last_sw_check +
624 MT_DFS_SW_TIMEOUT)) {
627 dfs_pd->last_sw_check = jiffies;
629 mt76x02_dfs_add_events(dev);
630 radar_detected = mt76x02_dfs_check_detection(dev);
631 if (radar_detected) {
632 /* sw detector rx radar pattern */
633 ieee80211_radar_detected(dev->mt76.hw);
634 mt76x02_dfs_detector_reset(dev);
638 mt76x02_dfs_check_event_window(dev);
641 engine_mask = mt76_rr(dev, MT_BBP(DFS, 1));
642 if (!(engine_mask & 0xf))
645 for (i = 0; i < MT_DFS_NUM_ENGINES; i++) {
646 struct mt76x02_dfs_hw_pulse pulse;
648 if (!(engine_mask & (1 << i)))
652 mt76x02_dfs_get_hw_pulse(dev, &pulse);
654 if (!mt76x02_dfs_check_hw_pulse(dev, &pulse)) {
655 dfs_pd->stats[i].hw_pulse_discarded++;
659 /* hw detector rx radar pattern */
660 dfs_pd->stats[i].hw_pattern++;
661 ieee80211_radar_detected(dev->mt76.hw);
662 mt76x02_dfs_detector_reset(dev);
667 /* reset hw detector */
668 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
671 mt76x02_irq_enable(dev, MT_INT_GPTIMER);
674 static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev *dev)
676 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
678 switch (dev->mt76.region) {
679 case NL80211_DFS_FCC:
680 dfs_pd->sw_dpd_params.max_pri = MT_DFS_FCC_MAX_PRI;
681 dfs_pd->sw_dpd_params.min_pri = MT_DFS_FCC_MIN_PRI;
682 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN;
684 case NL80211_DFS_ETSI:
685 dfs_pd->sw_dpd_params.max_pri = MT_DFS_ETSI_MAX_PRI;
686 dfs_pd->sw_dpd_params.min_pri = MT_DFS_ETSI_MIN_PRI;
687 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN << 2;
690 dfs_pd->sw_dpd_params.max_pri = MT_DFS_JP_MAX_PRI;
691 dfs_pd->sw_dpd_params.min_pri = MT_DFS_JP_MIN_PRI;
692 dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN;
694 case NL80211_DFS_UNSET:
700 static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev *dev)
702 const struct mt76x02_radar_specs *radar_specs;
706 switch (dev->mphy.chandef.width) {
707 case NL80211_CHAN_WIDTH_40:
708 shift = MT_DFS_NUM_ENGINES;
710 case NL80211_CHAN_WIDTH_80:
711 shift = 2 * MT_DFS_NUM_ENGINES;
718 switch (dev->mt76.region) {
719 case NL80211_DFS_FCC:
720 radar_specs = &fcc_radar_specs[shift];
722 case NL80211_DFS_ETSI:
723 radar_specs = &etsi_radar_specs[shift];
726 if (dev->mphy.chandef.chan->center_freq >= 5250 &&
727 dev->mphy.chandef.chan->center_freq <= 5350)
728 radar_specs = &jp_w53_radar_specs[shift];
730 radar_specs = &jp_w56_radar_specs[shift];
732 case NL80211_DFS_UNSET:
737 data = (MT_DFS_VGA_MASK << 16) |
738 (MT_DFS_PWR_GAIN_OFFSET << 12) |
739 (MT_DFS_PWR_DOWN_TIME << 8) |
740 (MT_DFS_SYM_ROUND << 4) |
741 (MT_DFS_DELTA_DELAY & 0xf);
742 mt76_wr(dev, MT_BBP(DFS, 2), data);
744 data = (MT_DFS_RX_PE_MASK << 16) | MT_DFS_PKT_END_MASK;
745 mt76_wr(dev, MT_BBP(DFS, 3), data);
747 for (i = 0; i < MT_DFS_NUM_ENGINES; i++) {
748 /* configure engine */
749 mt76_wr(dev, MT_BBP(DFS, 0), i);
751 /* detection mode + avg_len */
752 data = ((radar_specs[i].avg_len & 0x1ff) << 16) |
753 (radar_specs[i].mode & 0xf);
754 mt76_wr(dev, MT_BBP(DFS, 4), data);
757 data = ((radar_specs[i].e_high & 0x0fff) << 16) |
758 (radar_specs[i].e_low & 0x0fff);
759 mt76_wr(dev, MT_BBP(DFS, 5), data);
762 mt76_wr(dev, MT_BBP(DFS, 7), radar_specs[i].t_low);
763 mt76_wr(dev, MT_BBP(DFS, 9), radar_specs[i].t_high);
766 mt76_wr(dev, MT_BBP(DFS, 11), radar_specs[i].b_low);
767 mt76_wr(dev, MT_BBP(DFS, 13), radar_specs[i].b_high);
770 data = ((radar_specs[i].w_high & 0x0fff) << 16) |
771 (radar_specs[i].w_low & 0x0fff);
772 mt76_wr(dev, MT_BBP(DFS, 14), data);
775 data = (radar_specs[i].w_margin << 16) |
776 radar_specs[i].t_margin;
777 mt76_wr(dev, MT_BBP(DFS, 15), data);
779 /* dfs event expiration */
780 mt76_wr(dev, MT_BBP(DFS, 17), radar_specs[i].event_expiration);
783 mt76_wr(dev, MT_BBP(DFS, 30), radar_specs[i].pwr_jmp);
787 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
788 mt76_wr(dev, MT_BBP(DFS, 36), 0x3);
790 /* enable detection*/
791 mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16);
792 mt76_wr(dev, MT_BBP(IBI, 11), 0x0c350001);
795 void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev *dev)
797 u32 agc_r8, agc_r4, val_r8, val_r4, dfs_r31;
799 agc_r8 = mt76_rr(dev, MT_BBP(AGC, 8));
800 agc_r4 = mt76_rr(dev, MT_BBP(AGC, 4));
802 val_r8 = (agc_r8 & 0x00007e00) >> 9;
803 val_r4 = agc_r4 & ~0x1f000000;
804 val_r4 += (((val_r8 + 1) >> 1) << 24);
805 mt76_wr(dev, MT_BBP(AGC, 4), val_r4);
807 dfs_r31 = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, val_r4);
809 dfs_r31 -= (agc_r8 & 0x00000038) >> 3;
810 dfs_r31 = (dfs_r31 << 16) | 0x00000307;
811 mt76_wr(dev, MT_BBP(DFS, 31), dfs_r31);
813 if (is_mt76x2(dev)) {
814 mt76_wr(dev, MT_BBP(DFS, 32), 0x00040071);
816 /* disable hw detector */
817 mt76_wr(dev, MT_BBP(DFS, 0), 0);
818 /* enable hw detector */
819 mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16);
822 EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc);
824 void mt76x02_dfs_init_params(struct mt76x02_dev *dev)
826 if (mt76_phy_dfs_state(&dev->mphy) > MT_DFS_STATE_DISABLED) {
827 mt76x02_dfs_init_sw_detector(dev);
828 mt76x02_dfs_set_bbp_params(dev);
829 /* enable debug mode */
830 mt76x02_dfs_set_capture_mode_ctrl(dev, true);
832 mt76x02_irq_enable(dev, MT_INT_GPTIMER);
833 mt76_rmw_field(dev, MT_INT_TIMER_EN,
834 MT_INT_TIMER_EN_GP_TIMER_EN, 1);
836 /* disable hw detector */
837 mt76_wr(dev, MT_BBP(DFS, 0), 0);
838 /* clear detector status */
839 mt76_wr(dev, MT_BBP(DFS, 1), 0xf);
840 if (mt76_chip(&dev->mt76) == 0x7610 ||
841 mt76_chip(&dev->mt76) == 0x7630)
842 mt76_wr(dev, MT_BBP(IBI, 11), 0xfde8081);
844 mt76_wr(dev, MT_BBP(IBI, 11), 0);
846 mt76x02_irq_disable(dev, MT_INT_GPTIMER);
847 mt76_rmw_field(dev, MT_INT_TIMER_EN,
848 MT_INT_TIMER_EN_GP_TIMER_EN, 0);
851 EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params);
853 void mt76x02_dfs_init_detector(struct mt76x02_dev *dev)
855 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
857 INIT_LIST_HEAD(&dfs_pd->sequences);
858 INIT_LIST_HEAD(&dfs_pd->seq_pool);
859 dev->mt76.region = NL80211_DFS_UNSET;
860 dfs_pd->last_sw_check = jiffies;
861 tasklet_setup(&dfs_pd->dfs_tasklet, mt76x02_dfs_tasklet);
865 mt76x02_dfs_set_domain(struct mt76x02_dev *dev,
866 enum nl80211_dfs_regions region)
868 struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd;
870 mutex_lock(&dev->mt76.mutex);
871 if (dev->mt76.region != region) {
872 tasklet_disable(&dfs_pd->dfs_tasklet);
874 dev->ed_monitor = dev->ed_monitor_enabled &&
875 region == NL80211_DFS_ETSI;
876 mt76x02_edcca_init(dev);
878 dev->mt76.region = region;
879 mt76x02_dfs_init_params(dev);
880 tasklet_enable(&dfs_pd->dfs_tasklet);
882 mutex_unlock(&dev->mt76.mutex);
885 void mt76x02_regd_notifier(struct wiphy *wiphy,
886 struct regulatory_request *request)
888 struct ieee80211_hw *hw = wiphy_to_ieee80211_hw(wiphy);
889 struct mt76x02_dev *dev = hw->priv;
891 mt76x02_dfs_set_domain(dev, request->dfs_region);