1 | // SPDX-License-Identifier: ISC |
2 | /* |
3 | * Copyright (C) 2016 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com> |
4 | */ |
5 | |
6 | #include "mt76x02.h" |
7 | |
8 | #define RADAR_SPEC(m, len, el, eh, wl, wh, \ |
9 | w_tolerance, tl, th, t_tolerance, \ |
10 | bl, bh, event_exp, power_jmp) \ |
11 | { \ |
12 | .mode = m, \ |
13 | .avg_len = len, \ |
14 | .e_low = el, \ |
15 | .e_high = eh, \ |
16 | .w_low = wl, \ |
17 | .w_high = wh, \ |
18 | .w_margin = w_tolerance, \ |
19 | .t_low = tl, \ |
20 | .t_high = th, \ |
21 | .t_margin = t_tolerance, \ |
22 | .b_low = bl, \ |
23 | .b_high = bh, \ |
24 | .event_expiration = event_exp, \ |
25 | .pwr_jmp = power_jmp \ |
26 | } |
27 | |
28 | static const struct mt76x02_radar_specs etsi_radar_specs[] = { |
29 | /* 20MHz */ |
30 | RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0, |
31 | 0x7fffffff, 0x155cc0, 0x19cc), |
32 | RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0, |
33 | 0x7fffffff, 0x155cc0, 0x19cc), |
34 | RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0, |
35 | 0x7fffffff, 0x155cc0, 0x19dd), |
36 | RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0, |
37 | 0x7fffffff, 0x2191c0, 0x15cc), |
38 | /* 40MHz */ |
39 | RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0, |
40 | 0x7fffffff, 0x155cc0, 0x19cc), |
41 | RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0, |
42 | 0x7fffffff, 0x155cc0, 0x19cc), |
43 | RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0, |
44 | 0x7fffffff, 0x155cc0, 0x19dd), |
45 | RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0, |
46 | 0x7fffffff, 0x2191c0, 0x15cc), |
47 | /* 80MHz */ |
48 | RADAR_SPEC(0, 8, 2, 15, 106, 150, 10, 4900, 100096, 10, 0, |
49 | 0x7fffffff, 0x155cc0, 0x19cc), |
50 | RADAR_SPEC(0, 40, 4, 59, 96, 380, 150, 4900, 100096, 40, 0, |
51 | 0x7fffffff, 0x155cc0, 0x19cc), |
52 | RADAR_SPEC(3, 60, 20, 46, 300, 640, 80, 4900, 10100, 80, 0, |
53 | 0x7fffffff, 0x155cc0, 0x19dd), |
54 | RADAR_SPEC(8, 8, 2, 9, 106, 150, 32, 4900, 296704, 32, 0, |
55 | 0x7fffffff, 0x2191c0, 0x15cc) |
56 | }; |
57 | |
58 | static const struct mt76x02_radar_specs fcc_radar_specs[] = { |
59 | /* 20MHz */ |
60 | RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0, |
61 | 0x7fffffff, 0xfe808, 0x13dc), |
62 | RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, |
63 | 0x7fffffff, 0xfe808, 0x19dd), |
64 | RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0, |
65 | 0x7fffffff, 0xfe808, 0x12cc), |
66 | RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0, |
67 | 0x3938700, 0x57bcf00, 0x1289), |
68 | /* 40MHz */ |
69 | RADAR_SPEC(0, 8, 2, 12, 106, 150, 5, 2900, 80100, 5, 0, |
70 | 0x7fffffff, 0xfe808, 0x13dc), |
71 | RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, |
72 | 0x7fffffff, 0xfe808, 0x19dd), |
73 | RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0, |
74 | 0x7fffffff, 0xfe808, 0x12cc), |
75 | RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0, |
76 | 0x3938700, 0x57bcf00, 0x1289), |
77 | /* 80MHz */ |
78 | RADAR_SPEC(0, 8, 2, 14, 106, 150, 15, 2900, 80100, 15, 0, |
79 | 0x7fffffff, 0xfe808, 0x16cc), |
80 | RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, |
81 | 0x7fffffff, 0xfe808, 0x19dd), |
82 | RADAR_SPEC(0, 40, 4, 54, 96, 480, 150, 2900, 80100, 40, 0, |
83 | 0x7fffffff, 0xfe808, 0x12cc), |
84 | RADAR_SPEC(2, 60, 15, 63, 640, 2080, 32, 19600, 40200, 32, 0, |
85 | 0x3938700, 0x57bcf00, 0x1289) |
86 | }; |
87 | |
88 | static const struct mt76x02_radar_specs jp_w56_radar_specs[] = { |
89 | /* 20MHz */ |
90 | RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0, |
91 | 0x7fffffff, 0x14c080, 0x13dc), |
92 | RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, |
93 | 0x7fffffff, 0x14c080, 0x19dd), |
94 | RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0, |
95 | 0x7fffffff, 0x14c080, 0x12cc), |
96 | RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0, |
97 | 0x3938700, 0X57bcf00, 0x1289), |
98 | /* 40MHz */ |
99 | RADAR_SPEC(0, 8, 2, 7, 106, 150, 5, 2900, 80100, 5, 0, |
100 | 0x7fffffff, 0x14c080, 0x13dc), |
101 | RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, |
102 | 0x7fffffff, 0x14c080, 0x19dd), |
103 | RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0, |
104 | 0x7fffffff, 0x14c080, 0x12cc), |
105 | RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0, |
106 | 0x3938700, 0X57bcf00, 0x1289), |
107 | /* 80MHz */ |
108 | RADAR_SPEC(0, 8, 2, 9, 106, 150, 15, 2900, 80100, 15, 0, |
109 | 0x7fffffff, 0x14c080, 0x16cc), |
110 | RADAR_SPEC(0, 8, 2, 7, 106, 140, 5, 27600, 27900, 5, 0, |
111 | 0x7fffffff, 0x14c080, 0x19dd), |
112 | RADAR_SPEC(0, 40, 4, 44, 96, 480, 150, 2900, 80100, 40, 0, |
113 | 0x7fffffff, 0x14c080, 0x12cc), |
114 | RADAR_SPEC(2, 60, 15, 48, 940, 2080, 32, 19600, 40200, 32, 0, |
115 | 0x3938700, 0X57bcf00, 0x1289) |
116 | }; |
117 | |
118 | static const struct mt76x02_radar_specs jp_w53_radar_specs[] = { |
119 | /* 20MHz */ |
120 | RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0, |
121 | 0x7fffffff, 0x14c080, 0x16cc), |
122 | { 0 }, |
123 | RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0, |
124 | 0x7fffffff, 0x14c080, 0x16cc), |
125 | { 0 }, |
126 | /* 40MHz */ |
127 | RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0, |
128 | 0x7fffffff, 0x14c080, 0x16cc), |
129 | { 0 }, |
130 | RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0, |
131 | 0x7fffffff, 0x14c080, 0x16cc), |
132 | { 0 }, |
133 | /* 80MHz */ |
134 | RADAR_SPEC(0, 8, 2, 9, 106, 150, 20, 28400, 77000, 20, 0, |
135 | 0x7fffffff, 0x14c080, 0x16cc), |
136 | { 0 }, |
137 | RADAR_SPEC(0, 40, 4, 44, 96, 200, 150, 28400, 77000, 60, 0, |
138 | 0x7fffffff, 0x14c080, 0x16cc), |
139 | { 0 } |
140 | }; |
141 | |
142 | static void |
143 | mt76x02_dfs_set_capture_mode_ctrl(struct mt76x02_dev *dev, u8 enable) |
144 | { |
145 | u32 data; |
146 | |
147 | data = (1 << 1) | enable; |
148 | mt76_wr(dev, MT_BBP(DFS, 36), data); |
149 | } |
150 | |
151 | static void mt76x02_dfs_seq_pool_put(struct mt76x02_dev *dev, |
152 | struct mt76x02_dfs_sequence *seq) |
153 | { |
154 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
155 | |
156 | list_add(new: &seq->head, head: &dfs_pd->seq_pool); |
157 | |
158 | dfs_pd->seq_stats.seq_pool_len++; |
159 | dfs_pd->seq_stats.seq_len--; |
160 | } |
161 | |
162 | static struct mt76x02_dfs_sequence * |
163 | mt76x02_dfs_seq_pool_get(struct mt76x02_dev *dev) |
164 | { |
165 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
166 | struct mt76x02_dfs_sequence *seq; |
167 | |
168 | if (list_empty(head: &dfs_pd->seq_pool)) { |
169 | seq = devm_kzalloc(dev: dev->mt76.dev, size: sizeof(*seq), GFP_ATOMIC); |
170 | } else { |
171 | seq = list_first_entry(&dfs_pd->seq_pool, |
172 | struct mt76x02_dfs_sequence, |
173 | head); |
174 | list_del(entry: &seq->head); |
175 | dfs_pd->seq_stats.seq_pool_len--; |
176 | } |
177 | if (seq) |
178 | dfs_pd->seq_stats.seq_len++; |
179 | |
180 | return seq; |
181 | } |
182 | |
183 | static int mt76x02_dfs_get_multiple(int val, int frac, int margin) |
184 | { |
185 | int remainder, factor; |
186 | |
187 | if (!frac) |
188 | return 0; |
189 | |
190 | if (abs(val - frac) <= margin) |
191 | return 1; |
192 | |
193 | factor = val / frac; |
194 | remainder = val % frac; |
195 | |
196 | if (remainder > margin) { |
197 | if ((frac - remainder) <= margin) |
198 | factor++; |
199 | else |
200 | factor = 0; |
201 | } |
202 | return factor; |
203 | } |
204 | |
205 | static void mt76x02_dfs_detector_reset(struct mt76x02_dev *dev) |
206 | { |
207 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
208 | struct mt76x02_dfs_sequence *seq, *tmp_seq; |
209 | int i; |
210 | |
211 | /* reset hw detector */ |
212 | mt76_wr(dev, MT_BBP(DFS, 1), 0xf); |
213 | |
214 | /* reset sw detector */ |
215 | for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) { |
216 | dfs_pd->event_rb[i].h_rb = 0; |
217 | dfs_pd->event_rb[i].t_rb = 0; |
218 | } |
219 | |
220 | list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) { |
221 | list_del_init(entry: &seq->head); |
222 | mt76x02_dfs_seq_pool_put(dev, seq); |
223 | } |
224 | } |
225 | |
226 | static bool mt76x02_dfs_check_chirp(struct mt76x02_dev *dev) |
227 | { |
228 | bool ret = false; |
229 | u32 current_ts, delta_ts; |
230 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
231 | |
232 | current_ts = mt76_rr(dev, MT_PBF_LIFE_TIMER); |
233 | delta_ts = current_ts - dfs_pd->chirp_pulse_ts; |
234 | dfs_pd->chirp_pulse_ts = current_ts; |
235 | |
236 | /* 12 sec */ |
237 | if (delta_ts <= (12 * (1 << 20))) { |
238 | if (++dfs_pd->chirp_pulse_cnt > 8) |
239 | ret = true; |
240 | } else { |
241 | dfs_pd->chirp_pulse_cnt = 1; |
242 | } |
243 | |
244 | return ret; |
245 | } |
246 | |
247 | static void mt76x02_dfs_get_hw_pulse(struct mt76x02_dev *dev, |
248 | struct mt76x02_dfs_hw_pulse *pulse) |
249 | { |
250 | u32 data; |
251 | |
252 | /* select channel */ |
253 | data = (MT_DFS_CH_EN << 16) | pulse->engine; |
254 | mt76_wr(dev, MT_BBP(DFS, 0), data); |
255 | |
256 | /* reported period */ |
257 | pulse->period = mt76_rr(dev, MT_BBP(DFS, 19)); |
258 | |
259 | /* reported width */ |
260 | pulse->w1 = mt76_rr(dev, MT_BBP(DFS, 20)); |
261 | pulse->w2 = mt76_rr(dev, MT_BBP(DFS, 23)); |
262 | |
263 | /* reported burst number */ |
264 | pulse->burst = mt76_rr(dev, MT_BBP(DFS, 22)); |
265 | } |
266 | |
267 | static bool mt76x02_dfs_check_hw_pulse(struct mt76x02_dev *dev, |
268 | struct mt76x02_dfs_hw_pulse *pulse) |
269 | { |
270 | bool ret = false; |
271 | |
272 | if (!pulse->period || !pulse->w1) |
273 | return false; |
274 | |
275 | switch (dev->mt76.region) { |
276 | case NL80211_DFS_FCC: |
277 | if (pulse->engine > 3) |
278 | break; |
279 | |
280 | if (pulse->engine == 3) { |
281 | ret = mt76x02_dfs_check_chirp(dev); |
282 | break; |
283 | } |
284 | |
285 | /* check short pulse*/ |
286 | if (pulse->w1 < 120) |
287 | ret = (pulse->period >= 2900 && |
288 | (pulse->period <= 4700 || |
289 | pulse->period >= 6400) && |
290 | (pulse->period <= 6800 || |
291 | pulse->period >= 10200) && |
292 | pulse->period <= 61600); |
293 | else if (pulse->w1 < 130) /* 120 - 130 */ |
294 | ret = (pulse->period >= 2900 && |
295 | pulse->period <= 61600); |
296 | else |
297 | ret = (pulse->period >= 3500 && |
298 | pulse->period <= 10100); |
299 | break; |
300 | case NL80211_DFS_ETSI: |
301 | if (pulse->engine >= 3) |
302 | break; |
303 | |
304 | ret = (pulse->period >= 4900 && |
305 | (pulse->period <= 10200 || |
306 | pulse->period >= 12400) && |
307 | pulse->period <= 100100); |
308 | break; |
309 | case NL80211_DFS_JP: |
310 | if (dev->mphy.chandef.chan->center_freq >= 5250 && |
311 | dev->mphy.chandef.chan->center_freq <= 5350) { |
312 | /* JPW53 */ |
313 | if (pulse->w1 <= 130) |
314 | ret = (pulse->period >= 28360 && |
315 | (pulse->period <= 28700 || |
316 | pulse->period >= 76900) && |
317 | pulse->period <= 76940); |
318 | break; |
319 | } |
320 | |
321 | if (pulse->engine > 3) |
322 | break; |
323 | |
324 | if (pulse->engine == 3) { |
325 | ret = mt76x02_dfs_check_chirp(dev); |
326 | break; |
327 | } |
328 | |
329 | /* check short pulse*/ |
330 | if (pulse->w1 < 120) |
331 | ret = (pulse->period >= 2900 && |
332 | (pulse->period <= 4700 || |
333 | pulse->period >= 6400) && |
334 | (pulse->period <= 6800 || |
335 | pulse->period >= 27560) && |
336 | (pulse->period <= 27960 || |
337 | pulse->period >= 28360) && |
338 | (pulse->period <= 28700 || |
339 | pulse->period >= 79900) && |
340 | pulse->period <= 80100); |
341 | else if (pulse->w1 < 130) /* 120 - 130 */ |
342 | ret = (pulse->period >= 2900 && |
343 | (pulse->period <= 10100 || |
344 | pulse->period >= 27560) && |
345 | (pulse->period <= 27960 || |
346 | pulse->period >= 28360) && |
347 | (pulse->period <= 28700 || |
348 | pulse->period >= 79900) && |
349 | pulse->period <= 80100); |
350 | else |
351 | ret = (pulse->period >= 3900 && |
352 | pulse->period <= 10100); |
353 | break; |
354 | case NL80211_DFS_UNSET: |
355 | default: |
356 | return false; |
357 | } |
358 | |
359 | return ret; |
360 | } |
361 | |
362 | static bool mt76x02_dfs_fetch_event(struct mt76x02_dev *dev, |
363 | struct mt76x02_dfs_event *event) |
364 | { |
365 | u32 data; |
366 | |
367 | /* 1st: DFS_R37[31]: 0 (engine 0) - 1 (engine 2) |
368 | * 2nd: DFS_R37[21:0]: pulse time |
369 | * 3rd: DFS_R37[11:0]: pulse width |
370 | * 3rd: DFS_R37[25:16]: phase |
371 | * 4th: DFS_R37[12:0]: current pwr |
372 | * 4th: DFS_R37[21:16]: pwr stable counter |
373 | * |
374 | * 1st: DFS_R37[31:0] set to 0xffffffff means no event detected |
375 | */ |
376 | data = mt76_rr(dev, MT_BBP(DFS, 37)); |
377 | if (!MT_DFS_CHECK_EVENT(data)) |
378 | return false; |
379 | |
380 | event->engine = MT_DFS_EVENT_ENGINE(data); |
381 | data = mt76_rr(dev, MT_BBP(DFS, 37)); |
382 | event->ts = MT_DFS_EVENT_TIMESTAMP(data); |
383 | data = mt76_rr(dev, MT_BBP(DFS, 37)); |
384 | event->width = MT_DFS_EVENT_WIDTH(data); |
385 | |
386 | return true; |
387 | } |
388 | |
389 | static bool mt76x02_dfs_check_event(struct mt76x02_dev *dev, |
390 | struct mt76x02_dfs_event *event) |
391 | { |
392 | if (event->engine == 2) { |
393 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
394 | struct mt76x02_dfs_event_rb *event_buff = &dfs_pd->event_rb[1]; |
395 | u16 last_event_idx; |
396 | u32 delta_ts; |
397 | |
398 | last_event_idx = mt76_decr(val: event_buff->t_rb, |
399 | MT_DFS_EVENT_BUFLEN); |
400 | delta_ts = event->ts - event_buff->data[last_event_idx].ts; |
401 | if (delta_ts < MT_DFS_EVENT_TIME_MARGIN && |
402 | event_buff->data[last_event_idx].width >= 200) |
403 | return false; |
404 | } |
405 | return true; |
406 | } |
407 | |
408 | static void mt76x02_dfs_queue_event(struct mt76x02_dev *dev, |
409 | struct mt76x02_dfs_event *event) |
410 | { |
411 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
412 | struct mt76x02_dfs_event_rb *event_buff; |
413 | |
414 | /* add radar event to ring buffer */ |
415 | event_buff = event->engine == 2 ? &dfs_pd->event_rb[1] |
416 | : &dfs_pd->event_rb[0]; |
417 | event_buff->data[event_buff->t_rb] = *event; |
418 | event_buff->data[event_buff->t_rb].fetch_ts = jiffies; |
419 | |
420 | event_buff->t_rb = mt76_incr(val: event_buff->t_rb, MT_DFS_EVENT_BUFLEN); |
421 | if (event_buff->t_rb == event_buff->h_rb) |
422 | event_buff->h_rb = mt76_incr(val: event_buff->h_rb, |
423 | MT_DFS_EVENT_BUFLEN); |
424 | } |
425 | |
426 | static int mt76x02_dfs_create_sequence(struct mt76x02_dev *dev, |
427 | struct mt76x02_dfs_event *event, |
428 | u16 cur_len) |
429 | { |
430 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
431 | struct mt76x02_dfs_sw_detector_params *sw_params; |
432 | u32 width_delta, with_sum; |
433 | struct mt76x02_dfs_sequence seq, *seq_p; |
434 | struct mt76x02_dfs_event_rb *event_rb; |
435 | struct mt76x02_dfs_event *cur_event; |
436 | int i, j, end, pri, factor, cur_pri; |
437 | |
438 | event_rb = event->engine == 2 ? &dfs_pd->event_rb[1] |
439 | : &dfs_pd->event_rb[0]; |
440 | |
441 | i = mt76_decr(val: event_rb->t_rb, MT_DFS_EVENT_BUFLEN); |
442 | end = mt76_decr(val: event_rb->h_rb, MT_DFS_EVENT_BUFLEN); |
443 | |
444 | while (i != end) { |
445 | cur_event = &event_rb->data[i]; |
446 | with_sum = event->width + cur_event->width; |
447 | |
448 | sw_params = &dfs_pd->sw_dpd_params; |
449 | switch (dev->mt76.region) { |
450 | case NL80211_DFS_FCC: |
451 | case NL80211_DFS_JP: |
452 | if (with_sum < 600) |
453 | width_delta = 8; |
454 | else |
455 | width_delta = with_sum >> 3; |
456 | break; |
457 | case NL80211_DFS_ETSI: |
458 | if (event->engine == 2) |
459 | width_delta = with_sum >> 6; |
460 | else if (with_sum < 620) |
461 | width_delta = 24; |
462 | else |
463 | width_delta = 8; |
464 | break; |
465 | case NL80211_DFS_UNSET: |
466 | default: |
467 | return -EINVAL; |
468 | } |
469 | |
470 | pri = event->ts - cur_event->ts; |
471 | if (abs(event->width - cur_event->width) > width_delta || |
472 | pri < sw_params->min_pri) |
473 | goto next; |
474 | |
475 | if (pri > sw_params->max_pri) |
476 | break; |
477 | |
478 | seq.pri = event->ts - cur_event->ts; |
479 | seq.first_ts = cur_event->ts; |
480 | seq.last_ts = event->ts; |
481 | seq.engine = event->engine; |
482 | seq.count = 2; |
483 | |
484 | j = mt76_decr(val: i, MT_DFS_EVENT_BUFLEN); |
485 | while (j != end) { |
486 | cur_event = &event_rb->data[j]; |
487 | cur_pri = event->ts - cur_event->ts; |
488 | factor = mt76x02_dfs_get_multiple(val: cur_pri, frac: seq.pri, |
489 | margin: sw_params->pri_margin); |
490 | if (factor > 0) { |
491 | seq.first_ts = cur_event->ts; |
492 | seq.count++; |
493 | } |
494 | |
495 | j = mt76_decr(val: j, MT_DFS_EVENT_BUFLEN); |
496 | } |
497 | if (seq.count <= cur_len) |
498 | goto next; |
499 | |
500 | seq_p = mt76x02_dfs_seq_pool_get(dev); |
501 | if (!seq_p) |
502 | return -ENOMEM; |
503 | |
504 | *seq_p = seq; |
505 | INIT_LIST_HEAD(list: &seq_p->head); |
506 | list_add(new: &seq_p->head, head: &dfs_pd->sequences); |
507 | next: |
508 | i = mt76_decr(val: i, MT_DFS_EVENT_BUFLEN); |
509 | } |
510 | return 0; |
511 | } |
512 | |
513 | static u16 mt76x02_dfs_add_event_to_sequence(struct mt76x02_dev *dev, |
514 | struct mt76x02_dfs_event *event) |
515 | { |
516 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
517 | struct mt76x02_dfs_sw_detector_params *sw_params; |
518 | struct mt76x02_dfs_sequence *seq, *tmp_seq; |
519 | u16 max_seq_len = 0; |
520 | int factor, pri; |
521 | |
522 | sw_params = &dfs_pd->sw_dpd_params; |
523 | list_for_each_entry_safe(seq, tmp_seq, &dfs_pd->sequences, head) { |
524 | if (event->ts > seq->first_ts + MT_DFS_SEQUENCE_WINDOW) { |
525 | list_del_init(entry: &seq->head); |
526 | mt76x02_dfs_seq_pool_put(dev, seq); |
527 | continue; |
528 | } |
529 | |
530 | if (event->engine != seq->engine) |
531 | continue; |
532 | |
533 | pri = event->ts - seq->last_ts; |
534 | factor = mt76x02_dfs_get_multiple(val: pri, frac: seq->pri, |
535 | margin: sw_params->pri_margin); |
536 | if (factor > 0) { |
537 | seq->last_ts = event->ts; |
538 | seq->count++; |
539 | max_seq_len = max_t(u16, max_seq_len, seq->count); |
540 | } |
541 | } |
542 | return max_seq_len; |
543 | } |
544 | |
545 | static bool mt76x02_dfs_check_detection(struct mt76x02_dev *dev) |
546 | { |
547 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
548 | struct mt76x02_dfs_sequence *seq; |
549 | |
550 | if (list_empty(head: &dfs_pd->sequences)) |
551 | return false; |
552 | |
553 | list_for_each_entry(seq, &dfs_pd->sequences, head) { |
554 | if (seq->count > MT_DFS_SEQUENCE_TH) { |
555 | dfs_pd->stats[seq->engine].sw_pattern++; |
556 | return true; |
557 | } |
558 | } |
559 | return false; |
560 | } |
561 | |
562 | static void mt76x02_dfs_add_events(struct mt76x02_dev *dev) |
563 | { |
564 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
565 | struct mt76x02_dfs_event event; |
566 | int i, seq_len; |
567 | |
568 | /* disable debug mode */ |
569 | mt76x02_dfs_set_capture_mode_ctrl(dev, enable: false); |
570 | for (i = 0; i < MT_DFS_EVENT_LOOP; i++) { |
571 | if (!mt76x02_dfs_fetch_event(dev, event: &event)) |
572 | break; |
573 | |
574 | if (dfs_pd->last_event_ts > event.ts) |
575 | mt76x02_dfs_detector_reset(dev); |
576 | dfs_pd->last_event_ts = event.ts; |
577 | |
578 | if (!mt76x02_dfs_check_event(dev, event: &event)) |
579 | continue; |
580 | |
581 | seq_len = mt76x02_dfs_add_event_to_sequence(dev, event: &event); |
582 | mt76x02_dfs_create_sequence(dev, event: &event, cur_len: seq_len); |
583 | |
584 | mt76x02_dfs_queue_event(dev, event: &event); |
585 | } |
586 | mt76x02_dfs_set_capture_mode_ctrl(dev, enable: true); |
587 | } |
588 | |
589 | static void mt76x02_dfs_check_event_window(struct mt76x02_dev *dev) |
590 | { |
591 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
592 | struct mt76x02_dfs_event_rb *event_buff; |
593 | struct mt76x02_dfs_event *event; |
594 | int i; |
595 | |
596 | for (i = 0; i < ARRAY_SIZE(dfs_pd->event_rb); i++) { |
597 | event_buff = &dfs_pd->event_rb[i]; |
598 | |
599 | while (event_buff->h_rb != event_buff->t_rb) { |
600 | event = &event_buff->data[event_buff->h_rb]; |
601 | |
602 | /* sorted list */ |
603 | if (time_is_after_jiffies(event->fetch_ts + |
604 | MT_DFS_EVENT_WINDOW)) |
605 | break; |
606 | event_buff->h_rb = mt76_incr(val: event_buff->h_rb, |
607 | MT_DFS_EVENT_BUFLEN); |
608 | } |
609 | } |
610 | } |
611 | |
612 | static void mt76x02_dfs_tasklet(struct tasklet_struct *t) |
613 | { |
614 | struct mt76x02_dfs_pattern_detector *dfs_pd = from_tasklet(dfs_pd, t, |
615 | dfs_tasklet); |
616 | struct mt76x02_dev *dev = container_of(dfs_pd, typeof(*dev), dfs_pd); |
617 | u32 engine_mask; |
618 | int i; |
619 | |
620 | if (test_bit(MT76_SCANNING, &dev->mphy.state)) |
621 | goto out; |
622 | |
623 | if (time_is_before_jiffies(dfs_pd->last_sw_check + |
624 | MT_DFS_SW_TIMEOUT)) { |
625 | bool radar_detected; |
626 | |
627 | dfs_pd->last_sw_check = jiffies; |
628 | |
629 | mt76x02_dfs_add_events(dev); |
630 | radar_detected = mt76x02_dfs_check_detection(dev); |
631 | if (radar_detected) { |
632 | /* sw detector rx radar pattern */ |
633 | ieee80211_radar_detected(hw: dev->mt76.hw); |
634 | mt76x02_dfs_detector_reset(dev); |
635 | |
636 | return; |
637 | } |
638 | mt76x02_dfs_check_event_window(dev); |
639 | } |
640 | |
641 | engine_mask = mt76_rr(dev, MT_BBP(DFS, 1)); |
642 | if (!(engine_mask & 0xf)) |
643 | goto out; |
644 | |
645 | for (i = 0; i < MT_DFS_NUM_ENGINES; i++) { |
646 | struct mt76x02_dfs_hw_pulse pulse; |
647 | |
648 | if (!(engine_mask & (1 << i))) |
649 | continue; |
650 | |
651 | pulse.engine = i; |
652 | mt76x02_dfs_get_hw_pulse(dev, pulse: &pulse); |
653 | |
654 | if (!mt76x02_dfs_check_hw_pulse(dev, pulse: &pulse)) { |
655 | dfs_pd->stats[i].hw_pulse_discarded++; |
656 | continue; |
657 | } |
658 | |
659 | /* hw detector rx radar pattern */ |
660 | dfs_pd->stats[i].hw_pattern++; |
661 | ieee80211_radar_detected(hw: dev->mt76.hw); |
662 | mt76x02_dfs_detector_reset(dev); |
663 | |
664 | return; |
665 | } |
666 | |
667 | /* reset hw detector */ |
668 | mt76_wr(dev, MT_BBP(DFS, 1), 0xf); |
669 | |
670 | out: |
671 | mt76x02_irq_enable(dev, MT_INT_GPTIMER); |
672 | } |
673 | |
674 | static void mt76x02_dfs_init_sw_detector(struct mt76x02_dev *dev) |
675 | { |
676 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
677 | |
678 | switch (dev->mt76.region) { |
679 | case NL80211_DFS_FCC: |
680 | dfs_pd->sw_dpd_params.max_pri = MT_DFS_FCC_MAX_PRI; |
681 | dfs_pd->sw_dpd_params.min_pri = MT_DFS_FCC_MIN_PRI; |
682 | dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN; |
683 | break; |
684 | case NL80211_DFS_ETSI: |
685 | dfs_pd->sw_dpd_params.max_pri = MT_DFS_ETSI_MAX_PRI; |
686 | dfs_pd->sw_dpd_params.min_pri = MT_DFS_ETSI_MIN_PRI; |
687 | dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN << 2; |
688 | break; |
689 | case NL80211_DFS_JP: |
690 | dfs_pd->sw_dpd_params.max_pri = MT_DFS_JP_MAX_PRI; |
691 | dfs_pd->sw_dpd_params.min_pri = MT_DFS_JP_MIN_PRI; |
692 | dfs_pd->sw_dpd_params.pri_margin = MT_DFS_PRI_MARGIN; |
693 | break; |
694 | case NL80211_DFS_UNSET: |
695 | default: |
696 | break; |
697 | } |
698 | } |
699 | |
700 | static void mt76x02_dfs_set_bbp_params(struct mt76x02_dev *dev) |
701 | { |
702 | const struct mt76x02_radar_specs *radar_specs; |
703 | u8 i, shift; |
704 | u32 data; |
705 | |
706 | switch (dev->mphy.chandef.width) { |
707 | case NL80211_CHAN_WIDTH_40: |
708 | shift = MT_DFS_NUM_ENGINES; |
709 | break; |
710 | case NL80211_CHAN_WIDTH_80: |
711 | shift = 2 * MT_DFS_NUM_ENGINES; |
712 | break; |
713 | default: |
714 | shift = 0; |
715 | break; |
716 | } |
717 | |
718 | switch (dev->mt76.region) { |
719 | case NL80211_DFS_FCC: |
720 | radar_specs = &fcc_radar_specs[shift]; |
721 | break; |
722 | case NL80211_DFS_ETSI: |
723 | radar_specs = &etsi_radar_specs[shift]; |
724 | break; |
725 | case NL80211_DFS_JP: |
726 | if (dev->mphy.chandef.chan->center_freq >= 5250 && |
727 | dev->mphy.chandef.chan->center_freq <= 5350) |
728 | radar_specs = &jp_w53_radar_specs[shift]; |
729 | else |
730 | radar_specs = &jp_w56_radar_specs[shift]; |
731 | break; |
732 | case NL80211_DFS_UNSET: |
733 | default: |
734 | return; |
735 | } |
736 | |
737 | data = (MT_DFS_VGA_MASK << 16) | |
738 | (MT_DFS_PWR_GAIN_OFFSET << 12) | |
739 | (MT_DFS_PWR_DOWN_TIME << 8) | |
740 | (MT_DFS_SYM_ROUND << 4) | |
741 | (MT_DFS_DELTA_DELAY & 0xf); |
742 | mt76_wr(dev, MT_BBP(DFS, 2), data); |
743 | |
744 | data = (MT_DFS_RX_PE_MASK << 16) | MT_DFS_PKT_END_MASK; |
745 | mt76_wr(dev, MT_BBP(DFS, 3), data); |
746 | |
747 | for (i = 0; i < MT_DFS_NUM_ENGINES; i++) { |
748 | /* configure engine */ |
749 | mt76_wr(dev, MT_BBP(DFS, 0), i); |
750 | |
751 | /* detection mode + avg_len */ |
752 | data = ((radar_specs[i].avg_len & 0x1ff) << 16) | |
753 | (radar_specs[i].mode & 0xf); |
754 | mt76_wr(dev, MT_BBP(DFS, 4), data); |
755 | |
756 | /* dfs energy */ |
757 | data = ((radar_specs[i].e_high & 0x0fff) << 16) | |
758 | (radar_specs[i].e_low & 0x0fff); |
759 | mt76_wr(dev, MT_BBP(DFS, 5), data); |
760 | |
761 | /* dfs period */ |
762 | mt76_wr(dev, MT_BBP(DFS, 7), radar_specs[i].t_low); |
763 | mt76_wr(dev, MT_BBP(DFS, 9), radar_specs[i].t_high); |
764 | |
765 | /* dfs burst */ |
766 | mt76_wr(dev, MT_BBP(DFS, 11), radar_specs[i].b_low); |
767 | mt76_wr(dev, MT_BBP(DFS, 13), radar_specs[i].b_high); |
768 | |
769 | /* dfs width */ |
770 | data = ((radar_specs[i].w_high & 0x0fff) << 16) | |
771 | (radar_specs[i].w_low & 0x0fff); |
772 | mt76_wr(dev, MT_BBP(DFS, 14), data); |
773 | |
774 | /* dfs margins */ |
775 | data = (radar_specs[i].w_margin << 16) | |
776 | radar_specs[i].t_margin; |
777 | mt76_wr(dev, MT_BBP(DFS, 15), data); |
778 | |
779 | /* dfs event expiration */ |
780 | mt76_wr(dev, MT_BBP(DFS, 17), radar_specs[i].event_expiration); |
781 | |
782 | /* dfs pwr adj */ |
783 | mt76_wr(dev, MT_BBP(DFS, 30), radar_specs[i].pwr_jmp); |
784 | } |
785 | |
786 | /* reset status */ |
787 | mt76_wr(dev, MT_BBP(DFS, 1), 0xf); |
788 | mt76_wr(dev, MT_BBP(DFS, 36), 0x3); |
789 | |
790 | /* enable detection*/ |
791 | mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16); |
792 | mt76_wr(dev, MT_BBP(IBI, 11), 0x0c350001); |
793 | } |
794 | |
795 | void mt76x02_phy_dfs_adjust_agc(struct mt76x02_dev *dev) |
796 | { |
797 | u32 agc_r8, agc_r4, val_r8, val_r4, dfs_r31; |
798 | |
799 | agc_r8 = mt76_rr(dev, MT_BBP(AGC, 8)); |
800 | agc_r4 = mt76_rr(dev, MT_BBP(AGC, 4)); |
801 | |
802 | val_r8 = (agc_r8 & 0x00007e00) >> 9; |
803 | val_r4 = agc_r4 & ~0x1f000000; |
804 | val_r4 += (((val_r8 + 1) >> 1) << 24); |
805 | mt76_wr(dev, MT_BBP(AGC, 4), val_r4); |
806 | |
807 | dfs_r31 = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, val_r4); |
808 | dfs_r31 += val_r8; |
809 | dfs_r31 -= (agc_r8 & 0x00000038) >> 3; |
810 | dfs_r31 = (dfs_r31 << 16) | 0x00000307; |
811 | mt76_wr(dev, MT_BBP(DFS, 31), dfs_r31); |
812 | |
813 | if (is_mt76x2(dev)) { |
814 | mt76_wr(dev, MT_BBP(DFS, 32), 0x00040071); |
815 | } else { |
816 | /* disable hw detector */ |
817 | mt76_wr(dev, MT_BBP(DFS, 0), 0); |
818 | /* enable hw detector */ |
819 | mt76_wr(dev, MT_BBP(DFS, 0), MT_DFS_CH_EN << 16); |
820 | } |
821 | } |
822 | EXPORT_SYMBOL_GPL(mt76x02_phy_dfs_adjust_agc); |
823 | |
824 | void mt76x02_dfs_init_params(struct mt76x02_dev *dev) |
825 | { |
826 | if (mt76_phy_dfs_state(phy: &dev->mphy) > MT_DFS_STATE_DISABLED) { |
827 | mt76x02_dfs_init_sw_detector(dev); |
828 | mt76x02_dfs_set_bbp_params(dev); |
829 | /* enable debug mode */ |
830 | mt76x02_dfs_set_capture_mode_ctrl(dev, enable: true); |
831 | |
832 | mt76x02_irq_enable(dev, MT_INT_GPTIMER); |
833 | mt76_rmw_field(dev, MT_INT_TIMER_EN, |
834 | MT_INT_TIMER_EN_GP_TIMER_EN, 1); |
835 | } else { |
836 | /* disable hw detector */ |
837 | mt76_wr(dev, MT_BBP(DFS, 0), 0); |
838 | /* clear detector status */ |
839 | mt76_wr(dev, MT_BBP(DFS, 1), 0xf); |
840 | if (mt76_chip(dev: &dev->mt76) == 0x7610 || |
841 | mt76_chip(dev: &dev->mt76) == 0x7630) |
842 | mt76_wr(dev, MT_BBP(IBI, 11), 0xfde8081); |
843 | else |
844 | mt76_wr(dev, MT_BBP(IBI, 11), 0); |
845 | |
846 | mt76x02_irq_disable(dev, MT_INT_GPTIMER); |
847 | mt76_rmw_field(dev, MT_INT_TIMER_EN, |
848 | MT_INT_TIMER_EN_GP_TIMER_EN, 0); |
849 | } |
850 | } |
851 | EXPORT_SYMBOL_GPL(mt76x02_dfs_init_params); |
852 | |
853 | void mt76x02_dfs_init_detector(struct mt76x02_dev *dev) |
854 | { |
855 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
856 | |
857 | INIT_LIST_HEAD(list: &dfs_pd->sequences); |
858 | INIT_LIST_HEAD(list: &dfs_pd->seq_pool); |
859 | dev->mt76.region = NL80211_DFS_UNSET; |
860 | dfs_pd->last_sw_check = jiffies; |
861 | tasklet_setup(t: &dfs_pd->dfs_tasklet, callback: mt76x02_dfs_tasklet); |
862 | } |
863 | |
864 | static void |
865 | mt76x02_dfs_set_domain(struct mt76x02_dev *dev, |
866 | enum nl80211_dfs_regions region) |
867 | { |
868 | struct mt76x02_dfs_pattern_detector *dfs_pd = &dev->dfs_pd; |
869 | |
870 | mutex_lock(&dev->mt76.mutex); |
871 | if (dev->mt76.region != region) { |
872 | tasklet_disable(t: &dfs_pd->dfs_tasklet); |
873 | |
874 | dev->ed_monitor = dev->ed_monitor_enabled && |
875 | region == NL80211_DFS_ETSI; |
876 | mt76x02_edcca_init(dev); |
877 | |
878 | dev->mt76.region = region; |
879 | mt76x02_dfs_init_params(dev); |
880 | tasklet_enable(t: &dfs_pd->dfs_tasklet); |
881 | } |
882 | mutex_unlock(lock: &dev->mt76.mutex); |
883 | } |
884 | |
885 | void mt76x02_regd_notifier(struct wiphy *wiphy, |
886 | struct regulatory_request *request) |
887 | { |
888 | struct ieee80211_hw *hw = wiphy_to_ieee80211_hw(wiphy); |
889 | struct mt76x02_dev *dev = hw->priv; |
890 | |
891 | mt76x02_dfs_set_domain(dev, region: request->dfs_region); |
892 | } |
893 | |