1// SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2/* Copyright(c) 2023 Realtek Corporation
3 */
4
5#include "debug.h"
6#include "mac.h"
7#include "phy.h"
8#include "reg.h"
9
10static const struct rtw89_ccx_regs rtw89_ccx_regs_be = {
11 .setting_addr = R_CCX,
12 .edcca_opt_mask = B_CCX_EDCCA_OPT_MSK_V1,
13 .measurement_trig_mask = B_MEASUREMENT_TRIG_MSK,
14 .trig_opt_mask = B_CCX_TRIG_OPT_MSK,
15 .en_mask = B_CCX_EN_MSK,
16 .ifs_cnt_addr = R_IFS_COUNTER,
17 .ifs_clm_period_mask = B_IFS_CLM_PERIOD_MSK,
18 .ifs_clm_cnt_unit_mask = B_IFS_CLM_COUNTER_UNIT_MSK,
19 .ifs_clm_cnt_clear_mask = B_IFS_COUNTER_CLR_MSK,
20 .ifs_collect_en_mask = B_IFS_COLLECT_EN,
21 .ifs_t1_addr = R_IFS_T1,
22 .ifs_t1_th_h_mask = B_IFS_T1_TH_HIGH_MSK,
23 .ifs_t1_en_mask = B_IFS_T1_EN_MSK,
24 .ifs_t1_th_l_mask = B_IFS_T1_TH_LOW_MSK,
25 .ifs_t2_addr = R_IFS_T2,
26 .ifs_t2_th_h_mask = B_IFS_T2_TH_HIGH_MSK,
27 .ifs_t2_en_mask = B_IFS_T2_EN_MSK,
28 .ifs_t2_th_l_mask = B_IFS_T2_TH_LOW_MSK,
29 .ifs_t3_addr = R_IFS_T3,
30 .ifs_t3_th_h_mask = B_IFS_T3_TH_HIGH_MSK,
31 .ifs_t3_en_mask = B_IFS_T3_EN_MSK,
32 .ifs_t3_th_l_mask = B_IFS_T3_TH_LOW_MSK,
33 .ifs_t4_addr = R_IFS_T4,
34 .ifs_t4_th_h_mask = B_IFS_T4_TH_HIGH_MSK,
35 .ifs_t4_en_mask = B_IFS_T4_EN_MSK,
36 .ifs_t4_th_l_mask = B_IFS_T4_TH_LOW_MSK,
37 .ifs_clm_tx_cnt_addr = R_IFS_CLM_TX_CNT_V1,
38 .ifs_clm_edcca_excl_cca_fa_mask = B_IFS_CLM_EDCCA_EXCLUDE_CCA_FA_MSK,
39 .ifs_clm_tx_cnt_msk = B_IFS_CLM_TX_CNT_MSK,
40 .ifs_clm_cca_addr = R_IFS_CLM_CCA_V1,
41 .ifs_clm_ofdmcca_excl_fa_mask = B_IFS_CLM_OFDMCCA_EXCLUDE_FA_MSK,
42 .ifs_clm_cckcca_excl_fa_mask = B_IFS_CLM_CCKCCA_EXCLUDE_FA_MSK,
43 .ifs_clm_fa_addr = R_IFS_CLM_FA_V1,
44 .ifs_clm_ofdm_fa_mask = B_IFS_CLM_OFDM_FA_MSK,
45 .ifs_clm_cck_fa_mask = B_IFS_CLM_CCK_FA_MSK,
46 .ifs_his_addr = R_IFS_HIS_V1,
47 .ifs_t4_his_mask = B_IFS_T4_HIS_MSK,
48 .ifs_t3_his_mask = B_IFS_T3_HIS_MSK,
49 .ifs_t2_his_mask = B_IFS_T2_HIS_MSK,
50 .ifs_t1_his_mask = B_IFS_T1_HIS_MSK,
51 .ifs_avg_l_addr = R_IFS_AVG_L_V1,
52 .ifs_t2_avg_mask = B_IFS_T2_AVG_MSK,
53 .ifs_t1_avg_mask = B_IFS_T1_AVG_MSK,
54 .ifs_avg_h_addr = R_IFS_AVG_H_V1,
55 .ifs_t4_avg_mask = B_IFS_T4_AVG_MSK,
56 .ifs_t3_avg_mask = B_IFS_T3_AVG_MSK,
57 .ifs_cca_l_addr = R_IFS_CCA_L_V1,
58 .ifs_t2_cca_mask = B_IFS_T2_CCA_MSK,
59 .ifs_t1_cca_mask = B_IFS_T1_CCA_MSK,
60 .ifs_cca_h_addr = R_IFS_CCA_H_V1,
61 .ifs_t4_cca_mask = B_IFS_T4_CCA_MSK,
62 .ifs_t3_cca_mask = B_IFS_T3_CCA_MSK,
63 .ifs_total_addr = R_IFSCNT_V1,
64 .ifs_cnt_done_mask = B_IFSCNT_DONE_MSK,
65 .ifs_total_mask = B_IFSCNT_TOTAL_CNT_MSK,
66};
67
68static const struct rtw89_physts_regs rtw89_physts_regs_be = {
69 .setting_addr = R_PLCP_HISTOGRAM,
70 .dis_trigger_fail_mask = B_STS_DIS_TRIG_BY_FAIL,
71 .dis_trigger_brk_mask = B_STS_DIS_TRIG_BY_BRK,
72};
73
74static const struct rtw89_cfo_regs rtw89_cfo_regs_be = {
75 .comp = R_DCFO_WEIGHT_V1,
76 .weighting_mask = B_DCFO_WEIGHT_MSK_V1,
77 .comp_seg0 = R_DCFO_OPT_V1,
78 .valid_0_mask = B_DCFO_OPT_EN_V1,
79};
80
81static u32 rtw89_phy0_phy1_offset_be(struct rtw89_dev *rtwdev, u32 addr)
82{
83 u32 phy_page = addr >> 8;
84 u32 ofst = 0;
85
86 if ((phy_page >= 0x4 && phy_page <= 0xF) ||
87 (phy_page >= 0x20 && phy_page <= 0x2B) ||
88 (phy_page >= 0x40 && phy_page <= 0x4f) ||
89 (phy_page >= 0x60 && phy_page <= 0x6f) ||
90 (phy_page >= 0xE4 && phy_page <= 0xE5) ||
91 (phy_page >= 0xE8 && phy_page <= 0xED))
92 ofst = 0x1000;
93 else
94 ofst = 0x0;
95
96 return ofst;
97}
98
99union rtw89_phy_bb_gain_arg_be {
100 u32 addr;
101 struct {
102 u8 type;
103#define BB_GAIN_TYPE_SUB0_BE GENMASK(3, 0)
104#define BB_GAIN_TYPE_SUB1_BE GENMASK(7, 4)
105 u8 path_bw;
106#define BB_GAIN_PATH_BE GENMASK(3, 0)
107#define BB_GAIN_BW_BE GENMASK(7, 4)
108 u8 gain_band;
109 u8 cfg_type;
110 } __packed;
111} __packed;
112
113static void
114rtw89_phy_cfg_bb_gain_error_be(struct rtw89_dev *rtwdev,
115 union rtw89_phy_bb_gain_arg_be arg, u32 data)
116{
117 struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
118 u8 bw_type = u8_get_bits(v: arg.path_bw, BB_GAIN_BW_BE);
119 u8 path = u8_get_bits(v: arg.path_bw, BB_GAIN_PATH_BE);
120 u8 gband = arg.gain_band;
121 u8 type = arg.type;
122 int i;
123
124 switch (type) {
125 case 0:
126 for (i = 0; i < 4; i++, data >>= 8)
127 gain->lna_gain[gband][bw_type][path][i] = data & 0xff;
128 break;
129 case 1:
130 for (i = 4; i < 7; i++, data >>= 8)
131 gain->lna_gain[gband][bw_type][path][i] = data & 0xff;
132 break;
133 case 2:
134 for (i = 0; i < 2; i++, data >>= 8)
135 gain->tia_gain[gband][bw_type][path][i] = data & 0xff;
136 break;
137 default:
138 rtw89_warn(rtwdev,
139 "bb gain error {0x%x:0x%x} with unknown type: %d\n",
140 arg.addr, data, type);
141 break;
142 }
143}
144
145static void
146rtw89_phy_cfg_bb_rpl_ofst_be(struct rtw89_dev *rtwdev,
147 union rtw89_phy_bb_gain_arg_be arg, u32 data)
148{
149 struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
150 u8 type_sub0 = u8_get_bits(v: arg.type, BB_GAIN_TYPE_SUB0_BE);
151 u8 type_sub1 = u8_get_bits(v: arg.type, BB_GAIN_TYPE_SUB1_BE);
152 u8 path = u8_get_bits(v: arg.path_bw, BB_GAIN_PATH_BE);
153 u8 gband = arg.gain_band;
154 u8 ofst = 0;
155 int i;
156
157 switch (type_sub1) {
158 case RTW89_CMAC_BW_20M:
159 gain->rpl_ofst_20[gband][path][0] = (s8)data;
160 break;
161 case RTW89_CMAC_BW_40M:
162 for (i = 0; i < RTW89_BW20_SC_40M; i++, data >>= 8)
163 gain->rpl_ofst_40[gband][path][i] = data & 0xff;
164 break;
165 case RTW89_CMAC_BW_80M:
166 for (i = 0; i < RTW89_BW20_SC_80M; i++, data >>= 8)
167 gain->rpl_ofst_80[gband][path][i] = data & 0xff;
168 break;
169 case RTW89_CMAC_BW_160M:
170 if (type_sub0 == 0)
171 ofst = 0;
172 else
173 ofst = RTW89_BW20_SC_80M;
174
175 for (i = 0; i < RTW89_BW20_SC_80M; i++, data >>= 8)
176 gain->rpl_ofst_160[gband][path][i + ofst] = data & 0xff;
177 break;
178 default:
179 rtw89_warn(rtwdev,
180 "bb rpl ofst {0x%x:0x%x} with unknown type_sub1: %d\n",
181 arg.addr, data, type_sub1);
182 break;
183 }
184}
185
186static void
187rtw89_phy_cfg_bb_gain_op1db_be(struct rtw89_dev *rtwdev,
188 union rtw89_phy_bb_gain_arg_be arg, u32 data)
189{
190 struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
191 u8 bw_type = u8_get_bits(v: arg.path_bw, BB_GAIN_BW_BE);
192 u8 path = u8_get_bits(v: arg.path_bw, BB_GAIN_PATH_BE);
193 u8 gband = arg.gain_band;
194 u8 type = arg.type;
195 int i;
196
197 switch (type) {
198 case 0:
199 for (i = 0; i < 4; i++, data >>= 8)
200 gain->lna_op1db[gband][bw_type][path][i] = data & 0xff;
201 break;
202 case 1:
203 for (i = 4; i < 7; i++, data >>= 8)
204 gain->lna_op1db[gband][bw_type][path][i] = data & 0xff;
205 break;
206 case 2:
207 for (i = 0; i < 4; i++, data >>= 8)
208 gain->tia_lna_op1db[gband][bw_type][path][i] = data & 0xff;
209 break;
210 case 3:
211 for (i = 4; i < 8; i++, data >>= 8)
212 gain->tia_lna_op1db[gband][bw_type][path][i] = data & 0xff;
213 break;
214 default:
215 rtw89_warn(rtwdev,
216 "bb gain op1db {0x%x:0x%x} with unknown type: %d\n",
217 arg.addr, data, type);
218 break;
219 }
220}
221
222static void rtw89_phy_config_bb_gain_be(struct rtw89_dev *rtwdev,
223 const struct rtw89_reg2_def *reg,
224 enum rtw89_rf_path rf_path,
225 void *extra_data)
226{
227 const struct rtw89_chip_info *chip = rtwdev->chip;
228 union rtw89_phy_bb_gain_arg_be arg = { .addr = reg->addr };
229 struct rtw89_efuse *efuse = &rtwdev->efuse;
230 u8 bw_type = u8_get_bits(v: arg.path_bw, BB_GAIN_BW_BE);
231 u8 path = u8_get_bits(v: arg.path_bw, BB_GAIN_PATH_BE);
232
233 if (bw_type >= RTW89_BB_BW_NR_BE)
234 return;
235
236 if (arg.gain_band >= RTW89_BB_GAIN_BAND_NR_BE)
237 return;
238
239 if (path >= chip->rf_path_num)
240 return;
241
242 if (arg.addr >= 0xf9 && arg.addr <= 0xfe) {
243 rtw89_warn(rtwdev, "bb gain table with flow ctrl\n");
244 return;
245 }
246
247 switch (arg.cfg_type) {
248 case 0:
249 rtw89_phy_cfg_bb_gain_error_be(rtwdev, arg, data: reg->data);
250 break;
251 case 1:
252 rtw89_phy_cfg_bb_rpl_ofst_be(rtwdev, arg, data: reg->data);
253 break;
254 case 2:
255 /* ignore BB gain bypass */
256 break;
257 case 3:
258 rtw89_phy_cfg_bb_gain_op1db_be(rtwdev, arg, data: reg->data);
259 break;
260 case 4:
261 /* This cfg_type is only used by rfe_type >= 50 with eFEM */
262 if (efuse->rfe_type < 50)
263 break;
264 fallthrough;
265 default:
266 rtw89_warn(rtwdev,
267 "bb gain {0x%x:0x%x} with unknown cfg type: %d\n",
268 arg.addr, reg->data, arg.cfg_type);
269 break;
270 }
271}
272
273static void rtw89_phy_preinit_rf_nctl_be(struct rtw89_dev *rtwdev)
274{
275 rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C0, B_GOTX_IQKDPK, data: 0x3);
276 rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C1, B_GOTX_IQKDPK, data: 0x3);
277 rtw89_phy_write32_mask(rtwdev, R_IQKDPK_HC, B_IQKDPK_HC, data: 0x1);
278 rtw89_phy_write32_mask(rtwdev, R_CLK_GCK, B_CLK_GCK, data: 0x00fffff);
279 rtw89_phy_write32_mask(rtwdev, R_IOQ_IQK_DPK, B_IOQ_IQK_DPK_CLKEN, data: 0x3);
280 rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST, B_IQK_DPK_RST, data: 0x1);
281 rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST, B_IQK_DPK_PRST, data: 0x1);
282 rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST_C1, B_IQK_DPK_PRST, data: 0x1);
283 rtw89_phy_write32_mask(rtwdev, R_TXRFC, B_TXRFC_RST, data: 0x1);
284
285 if (rtwdev->dbcc_en) {
286 rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST_C1, B_IQK_DPK_RST, data: 0x1);
287 rtw89_phy_write32_mask(rtwdev, R_TXRFC_C1, B_TXRFC_RST, data: 0x1);
288 }
289}
290
291static
292void rtw89_phy_bb_wrap_pwr_by_macid_init(struct rtw89_dev *rtwdev)
293{
294 u32 macid_idx, cr, base_macid_lmt, max_macid = 32;
295
296 base_macid_lmt = R_BE_PWR_MACID_LMT_BASE;
297
298 for (macid_idx = 0; macid_idx < 4 * max_macid; macid_idx += 4) {
299 cr = base_macid_lmt + macid_idx;
300 rtw89_write32(rtwdev, addr: cr, data: 0x03007F7F);
301 }
302}
303
304static
305void rtw89_phy_bb_wrap_tx_path_by_macid_init(struct rtw89_dev *rtwdev)
306{
307 int i, max_macid = 32;
308 u32 cr = R_BE_PWR_MACID_PATH_BASE;
309
310 for (i = 0; i < max_macid; i++, cr += 4)
311 rtw89_write32(rtwdev, addr: cr, data: 0x03C86000);
312}
313
314static void rtw89_phy_bb_wrap_tpu_set_all(struct rtw89_dev *rtwdev,
315 enum rtw89_mac_idx mac_idx)
316{
317 u32 addr;
318
319 for (addr = R_BE_PWR_BY_RATE; addr <= R_BE_PWR_BY_RATE_END; addr += 4)
320 rtw89_write32(rtwdev, addr, data: 0);
321 for (addr = R_BE_PWR_RULMT_START; addr <= R_BE_PWR_RULMT_END; addr += 4)
322 rtw89_write32(rtwdev, addr, data: 0);
323 for (addr = R_BE_PWR_RATE_OFST_CTRL; addr <= R_BE_PWR_RATE_OFST_END; addr += 4)
324 rtw89_write32(rtwdev, addr, data: 0);
325
326 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_REF_CTRL, band: mac_idx);
327 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_LMT_DB, data: 0);
328 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_LMTBF, band: mac_idx);
329 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_LMTBF_DB, data: 0);
330 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RATE_CTRL, band: mac_idx);
331 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_BYRATE_DB, data: 0);
332 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_RULMT, band: mac_idx);
333 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_RULMT_DB, data: 0);
334 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_SW, band: mac_idx);
335 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_SW_DB, data: 0);
336}
337
338static
339void rtw89_phy_bb_wrap_listen_path_en_init(struct rtw89_dev *rtwdev)
340{
341 u32 addr;
342 int ret;
343
344 ret = rtw89_mac_check_mac_en(rtwdev, band: RTW89_MAC_1, sel: RTW89_CMAC_SEL);
345 if (ret)
346 return;
347
348 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_LISTEN_PATH, band: RTW89_MAC_1);
349 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_LISTEN_PATH_EN, data: 0x2);
350}
351
352static void rtw89_phy_bb_wrap_force_cr_init(struct rtw89_dev *rtwdev,
353 enum rtw89_mac_idx mac_idx)
354{
355 u32 addr;
356
357 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FORCE_LMT, band: mac_idx);
358 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_LMT_ON, data: 0);
359 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, band: mac_idx);
360 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RATE_ON, data: 0);
361 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_RULMT, band: mac_idx);
362 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RU_ENON, data: 0);
363 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RU_ON, data: 0);
364 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FORCE_MACID, band: mac_idx);
365 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_MACID_ON, data: 0);
366 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_COEX_CTRL, band: mac_idx);
367 rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_COEX_ON, data: 0);
368 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RATE_CTRL, band: mac_idx);
369 rtw89_write32_mask(rtwdev, addr, B_BE_FORCE_PWR_BY_RATE_EN, data: 0);
370}
371
372static void rtw89_phy_bb_wrap_ftm_init(struct rtw89_dev *rtwdev,
373 enum rtw89_mac_idx mac_idx)
374{
375 u32 addr;
376
377 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM, band: mac_idx);
378 rtw89_write32(rtwdev, addr, data: 0xE4E431);
379
380 addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM_SS, band: mac_idx);
381 rtw89_write32_mask(rtwdev, addr, mask: 0x7, data: 0);
382}
383
384static void rtw89_phy_bb_wrap_init_be(struct rtw89_dev *rtwdev)
385{
386 enum rtw89_mac_idx mac_idx = RTW89_MAC_0;
387
388 rtw89_phy_bb_wrap_pwr_by_macid_init(rtwdev);
389 rtw89_phy_bb_wrap_tx_path_by_macid_init(rtwdev);
390 rtw89_phy_bb_wrap_listen_path_en_init(rtwdev);
391 rtw89_phy_bb_wrap_force_cr_init(rtwdev, mac_idx);
392 rtw89_phy_bb_wrap_ftm_init(rtwdev, mac_idx);
393 rtw89_phy_bb_wrap_tpu_set_all(rtwdev, mac_idx);
394}
395
396static void rtw89_phy_ch_info_init_be(struct rtw89_dev *rtwdev)
397{
398 rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG, B_CHINFO_SEG_LEN, data: 0x0);
399 rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG, B_CHINFO_SEG, data: 0xf);
400 rtw89_phy_write32_mask(rtwdev, R_CHINFO_DATA, B_CHINFO_DATA_BITMAP, data: 0x1);
401 rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ELM_SRC, B_CHINFO_ELM_BITMAP, val: 0x40303);
402 rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ELM_SRC, B_CHINFO_SRC, val: 0x0);
403 rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_TYPE_SCAL, B_CHINFO_TYPE, val: 0x3);
404 rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_TYPE_SCAL, B_CHINFO_SCAL, val: 0x0);
405}
406
407struct rtw89_byr_spec_ent_be {
408 struct rtw89_rate_desc init;
409 u8 num_of_idx;
410 bool no_over_bw40;
411 bool no_multi_nss;
412};
413
414static const struct rtw89_byr_spec_ent_be rtw89_byr_spec_be[] = {
415 {
416 .init = { .rs = RTW89_RS_CCK },
417 .num_of_idx = RTW89_RATE_CCK_NUM,
418 .no_over_bw40 = true,
419 .no_multi_nss = true,
420 },
421 {
422 .init = { .rs = RTW89_RS_OFDM },
423 .num_of_idx = RTW89_RATE_OFDM_NUM,
424 .no_multi_nss = true,
425 },
426 {
427 .init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_NON_OFDMA },
428 .num_of_idx = 2,
429 .no_multi_nss = true,
430 },
431 {
432 .init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_OFDMA },
433 .num_of_idx = 2,
434 .no_multi_nss = true,
435 },
436 {
437 .init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_NON_OFDMA },
438 .num_of_idx = 14,
439 },
440 {
441 .init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_NON_OFDMA },
442 .num_of_idx = RTW89_RATE_HEDCM_NUM,
443 },
444 {
445 .init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_OFDMA },
446 .num_of_idx = 14,
447 },
448 {
449 .init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_OFDMA },
450 .num_of_idx = RTW89_RATE_HEDCM_NUM,
451 },
452};
453
454static
455void __phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev, u8 band, u8 bw,
456 u8 nss, u32 *addr, enum rtw89_phy_idx phy_idx)
457{
458 const struct rtw89_byr_spec_ent_be *ent;
459 struct rtw89_rate_desc desc;
460 int pos = 0;
461 int i, j;
462 u32 val;
463 s8 v[4];
464
465 for (i = 0; i < ARRAY_SIZE(rtw89_byr_spec_be); i++) {
466 ent = &rtw89_byr_spec_be[i];
467
468 if (bw > RTW89_CHANNEL_WIDTH_40 && ent->no_over_bw40)
469 continue;
470 if (nss > RTW89_NSS_1 && ent->no_multi_nss)
471 continue;
472
473 desc = ent->init;
474 desc.nss = nss;
475 for (j = 0; j < ent->num_of_idx; j++, desc.idx++) {
476 v[pos] = rtw89_phy_read_txpwr_byrate(rtwdev, band, bw,
477 rate_desc: &desc);
478 pos = (pos + 1) % 4;
479 if (pos)
480 continue;
481
482 val = u32_encode_bits(v: v[0], GENMASK(7, 0)) |
483 u32_encode_bits(v: v[1], GENMASK(15, 8)) |
484 u32_encode_bits(v: v[2], GENMASK(23, 16)) |
485 u32_encode_bits(v: v[3], GENMASK(31, 24));
486
487 rtw89_mac_txpwr_write32(rtwdev, phy_idx, reg_base: *addr, val);
488 *addr += 4;
489 }
490 }
491}
492
493static void rtw89_phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev,
494 const struct rtw89_chan *chan,
495 enum rtw89_phy_idx phy_idx)
496{
497 u32 addr = R_BE_PWR_BY_RATE;
498 u8 band = chan->band_type;
499 u8 bw, nss;
500
501 rtw89_debug(rtwdev, mask: RTW89_DBG_TXPWR,
502 fmt: "[TXPWR] set txpwr byrate on band %d\n", band);
503
504 for (bw = 0; bw <= RTW89_CHANNEL_WIDTH_320; bw++)
505 for (nss = 0; nss <= RTW89_NSS_2; nss++)
506 __phy_set_txpwr_byrate_be(rtwdev, band, bw, nss,
507 addr: &addr, phy_idx);
508}
509
510static void rtw89_phy_set_txpwr_offset_be(struct rtw89_dev *rtwdev,
511 const struct rtw89_chan *chan,
512 enum rtw89_phy_idx phy_idx)
513{
514 struct rtw89_rate_desc desc = {
515 .nss = RTW89_NSS_1,
516 .rs = RTW89_RS_OFFSET,
517 };
518 u8 band = chan->band_type;
519 s8 v[RTW89_RATE_OFFSET_NUM_BE] = {};
520 u32 val;
521
522 rtw89_debug(rtwdev, mask: RTW89_DBG_TXPWR,
523 fmt: "[TXPWR] set txpwr offset on band %d\n", band);
524
525 for (desc.idx = 0; desc.idx < RTW89_RATE_OFFSET_NUM_BE; desc.idx++)
526 v[desc.idx] = rtw89_phy_read_txpwr_byrate(rtwdev, band, bw: 0, rate_desc: &desc);
527
528 val = u32_encode_bits(v: v[RTW89_RATE_OFFSET_CCK], GENMASK(3, 0)) |
529 u32_encode_bits(v: v[RTW89_RATE_OFFSET_OFDM], GENMASK(7, 4)) |
530 u32_encode_bits(v: v[RTW89_RATE_OFFSET_HT], GENMASK(11, 8)) |
531 u32_encode_bits(v: v[RTW89_RATE_OFFSET_VHT], GENMASK(15, 12)) |
532 u32_encode_bits(v: v[RTW89_RATE_OFFSET_HE], GENMASK(19, 16)) |
533 u32_encode_bits(v: v[RTW89_RATE_OFFSET_EHT], GENMASK(23, 20)) |
534 u32_encode_bits(v: v[RTW89_RATE_OFFSET_DLRU_HE], GENMASK(27, 24)) |
535 u32_encode_bits(v: v[RTW89_RATE_OFFSET_DLRU_EHT], GENMASK(31, 28));
536
537 rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_BE_PWR_RATE_OFST_CTRL, val);
538}
539
540static void
541fill_limit_nonbf_bf(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
542 u8 band, u8 bw, u8 ntx, u8 rs, u8 ch)
543{
544 int bf;
545
546 for (bf = 0; bf < RTW89_BF_NUM; bf++)
547 (*ptr)[bf] = rtw89_phy_read_txpwr_limit(rtwdev, band, bw, ntx,
548 rs, bf, ch);
549}
550
551static void
552fill_limit_nonbf_bf_min(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
553 u8 band, u8 bw, u8 ntx, u8 rs, u8 ch1, u8 ch2)
554{
555 s8 v1[RTW89_BF_NUM];
556 s8 v2[RTW89_BF_NUM];
557 int bf;
558
559 fill_limit_nonbf_bf(rtwdev, ptr: &v1, band, bw, ntx, rs, ch: ch1);
560 fill_limit_nonbf_bf(rtwdev, ptr: &v2, band, bw, ntx, rs, ch: ch2);
561
562 for (bf = 0; bf < RTW89_BF_NUM; bf++)
563 (*ptr)[bf] = min(v1[bf], v2[bf]);
564}
565
566static void phy_fill_limit_20m_be(struct rtw89_dev *rtwdev,
567 struct rtw89_txpwr_limit_be *lmt,
568 u8 band, u8 ntx, u8 ch)
569{
570 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->cck_20m, band,
571 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_CCK, ch);
572 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->cck_40m, band,
573 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_CCK, ch);
574 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->ofdm, band,
575 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_OFDM, ch);
576 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[0], band,
577 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch);
578}
579
580static void phy_fill_limit_40m_be(struct rtw89_dev *rtwdev,
581 struct rtw89_txpwr_limit_be *lmt,
582 u8 band, u8 ntx, u8 ch, u8 pri_ch)
583{
584 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->cck_20m, band,
585 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_CCK, ch: ch - 2);
586 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->cck_40m, band,
587 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_CCK, ch);
588
589 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->ofdm, band,
590 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_OFDM, ch: pri_ch);
591
592 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[0], band,
593 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 2);
594 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[1], band,
595 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 2);
596 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[0], band,
597 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch);
598}
599
600static void phy_fill_limit_80m_be(struct rtw89_dev *rtwdev,
601 struct rtw89_txpwr_limit_be *lmt,
602 u8 band, u8 ntx, u8 ch, u8 pri_ch)
603{
604 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->ofdm, band,
605 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_OFDM, ch: pri_ch);
606
607 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[0], band,
608 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 6);
609 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[1], band,
610 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 2);
611 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[2], band,
612 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 2);
613 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[3], band,
614 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 6);
615 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[0], band,
616 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 4);
617 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[1], band,
618 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 4);
619 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[0], band,
620 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch);
621
622 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_0p5, band,
623 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
624 ch1: ch - 4, ch2: ch + 4);
625}
626
627static void phy_fill_limit_160m_be(struct rtw89_dev *rtwdev,
628 struct rtw89_txpwr_limit_be *lmt,
629 u8 band, u8 ntx, u8 ch, u8 pri_ch)
630{
631 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->ofdm, band,
632 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_OFDM, ch: pri_ch);
633
634 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[0], band,
635 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 14);
636 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[1], band,
637 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 10);
638 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[2], band,
639 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 6);
640 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[3], band,
641 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 2);
642 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[4], band,
643 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 2);
644 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[5], band,
645 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 6);
646 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[6], band,
647 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 10);
648 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[7], band,
649 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 14);
650
651 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[0], band,
652 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 12);
653 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[1], band,
654 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 4);
655 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[2], band,
656 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 4);
657 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[3], band,
658 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 12);
659
660 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[0], band,
661 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch: ch - 8);
662 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[1], band,
663 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch: ch + 8);
664
665 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_160m[0], band,
666 bw: RTW89_CHANNEL_WIDTH_160, ntx, rs: RTW89_RS_MCS, ch);
667
668 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_0p5, band,
669 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
670 ch1: ch - 12, ch2: ch - 4);
671 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_2p5, band,
672 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
673 ch1: ch + 4, ch2: ch + 12);
674}
675
676static void phy_fill_limit_320m_be(struct rtw89_dev *rtwdev,
677 struct rtw89_txpwr_limit_be *lmt,
678 u8 band, u8 ntx, u8 ch, u8 pri_ch)
679{
680 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->ofdm, band,
681 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_OFDM, ch: pri_ch);
682
683 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[0], band,
684 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 30);
685 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[1], band,
686 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 26);
687 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[2], band,
688 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 22);
689 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[3], band,
690 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 18);
691 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[4], band,
692 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 14);
693 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[5], band,
694 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 10);
695 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[6], band,
696 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 6);
697 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[7], band,
698 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch - 2);
699 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[8], band,
700 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 2);
701 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[9], band,
702 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 6);
703 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[10], band,
704 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 10);
705 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[11], band,
706 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 14);
707 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[12], band,
708 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 18);
709 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[13], band,
710 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 22);
711 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[14], band,
712 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 26);
713 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_20m[15], band,
714 bw: RTW89_CHANNEL_WIDTH_20, ntx, rs: RTW89_RS_MCS, ch: ch + 30);
715
716 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[0], band,
717 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 28);
718 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[1], band,
719 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 20);
720 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[2], band,
721 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 12);
722 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[3], band,
723 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch - 4);
724 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[4], band,
725 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 4);
726 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[5], band,
727 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 12);
728 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[6], band,
729 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 20);
730 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_40m[7], band,
731 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS, ch: ch + 28);
732
733 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[0], band,
734 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch: ch - 24);
735 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[1], band,
736 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch: ch - 8);
737 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[2], band,
738 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch: ch + 8);
739 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_80m[3], band,
740 bw: RTW89_CHANNEL_WIDTH_80, ntx, rs: RTW89_RS_MCS, ch: ch + 24);
741
742 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_160m[0], band,
743 bw: RTW89_CHANNEL_WIDTH_160, ntx, rs: RTW89_RS_MCS, ch: ch - 16);
744 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_160m[1], band,
745 bw: RTW89_CHANNEL_WIDTH_160, ntx, rs: RTW89_RS_MCS, ch: ch + 16);
746
747 fill_limit_nonbf_bf(rtwdev, ptr: &lmt->mcs_320m, band,
748 bw: RTW89_CHANNEL_WIDTH_320, ntx, rs: RTW89_RS_MCS, ch);
749
750 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_0p5, band,
751 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
752 ch1: ch - 28, ch2: ch - 20);
753 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_2p5, band,
754 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
755 ch1: ch - 12, ch2: ch - 4);
756 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_4p5, band,
757 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
758 ch1: ch + 4, ch2: ch + 12);
759 fill_limit_nonbf_bf_min(rtwdev, ptr: &lmt->mcs_40m_6p5, band,
760 bw: RTW89_CHANNEL_WIDTH_40, ntx, rs: RTW89_RS_MCS,
761 ch1: ch + 20, ch2: ch + 28);
762}
763
764static void rtw89_phy_fill_limit_be(struct rtw89_dev *rtwdev,
765 const struct rtw89_chan *chan,
766 struct rtw89_txpwr_limit_be *lmt,
767 u8 ntx)
768{
769 u8 band = chan->band_type;
770 u8 pri_ch = chan->primary_channel;
771 u8 ch = chan->channel;
772 u8 bw = chan->band_width;
773
774 memset(lmt, 0, sizeof(*lmt));
775
776 switch (bw) {
777 case RTW89_CHANNEL_WIDTH_20:
778 phy_fill_limit_20m_be(rtwdev, lmt, band, ntx, ch);
779 break;
780 case RTW89_CHANNEL_WIDTH_40:
781 phy_fill_limit_40m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
782 break;
783 case RTW89_CHANNEL_WIDTH_80:
784 phy_fill_limit_80m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
785 break;
786 case RTW89_CHANNEL_WIDTH_160:
787 phy_fill_limit_160m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
788 break;
789 case RTW89_CHANNEL_WIDTH_320:
790 phy_fill_limit_320m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
791 break;
792 }
793}
794
795static void rtw89_phy_set_txpwr_limit_be(struct rtw89_dev *rtwdev,
796 const struct rtw89_chan *chan,
797 enum rtw89_phy_idx phy_idx)
798{
799 struct rtw89_txpwr_limit_be lmt;
800 const s8 *ptr;
801 u32 addr, val;
802 u8 i, j;
803
804 BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_be) !=
805 RTW89_TXPWR_LMT_PAGE_SIZE_BE);
806
807 rtw89_debug(rtwdev, mask: RTW89_DBG_TXPWR,
808 fmt: "[TXPWR] set txpwr limit on band %d bw %d\n",
809 chan->band_type, chan->band_width);
810
811 addr = R_BE_PWR_LMT;
812 for (i = 0; i <= RTW89_NSS_2; i++) {
813 rtw89_phy_fill_limit_be(rtwdev, chan, lmt: &lmt, ntx: i);
814
815 ptr = (s8 *)&lmt;
816 for (j = 0; j < RTW89_TXPWR_LMT_PAGE_SIZE_BE;
817 j += 4, addr += 4, ptr += 4) {
818 val = u32_encode_bits(v: ptr[0], GENMASK(7, 0)) |
819 u32_encode_bits(v: ptr[1], GENMASK(15, 8)) |
820 u32_encode_bits(v: ptr[2], GENMASK(23, 16)) |
821 u32_encode_bits(v: ptr[3], GENMASK(31, 24));
822
823 rtw89_mac_txpwr_write32(rtwdev, phy_idx, reg_base: addr, val);
824 }
825 }
826}
827
828static void fill_limit_ru_each(struct rtw89_dev *rtwdev, u8 index,
829 struct rtw89_txpwr_limit_ru_be *lmt_ru,
830 u8 band, u8 ntx, u8 ch)
831{
832 lmt_ru->ru26[index] =
833 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, ru: RTW89_RU26, ntx, ch);
834 lmt_ru->ru52[index] =
835 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, ru: RTW89_RU52, ntx, ch);
836 lmt_ru->ru106[index] =
837 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, ru: RTW89_RU106, ntx, ch);
838 lmt_ru->ru52_26[index] =
839 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, ru: RTW89_RU52_26, ntx, ch);
840 lmt_ru->ru106_26[index] =
841 rtw89_phy_read_txpwr_limit_ru(rtwdev, band, ru: RTW89_RU106_26, ntx, ch);
842}
843
844static void phy_fill_limit_ru_20m_be(struct rtw89_dev *rtwdev,
845 struct rtw89_txpwr_limit_ru_be *lmt_ru,
846 u8 band, u8 ntx, u8 ch)
847{
848 fill_limit_ru_each(rtwdev, index: 0, lmt_ru, band, ntx, ch);
849}
850
851static void phy_fill_limit_ru_40m_be(struct rtw89_dev *rtwdev,
852 struct rtw89_txpwr_limit_ru_be *lmt_ru,
853 u8 band, u8 ntx, u8 ch)
854{
855 fill_limit_ru_each(rtwdev, index: 0, lmt_ru, band, ntx, ch: ch - 2);
856 fill_limit_ru_each(rtwdev, index: 1, lmt_ru, band, ntx, ch: ch + 2);
857}
858
859static void phy_fill_limit_ru_80m_be(struct rtw89_dev *rtwdev,
860 struct rtw89_txpwr_limit_ru_be *lmt_ru,
861 u8 band, u8 ntx, u8 ch)
862{
863 fill_limit_ru_each(rtwdev, index: 0, lmt_ru, band, ntx, ch: ch - 6);
864 fill_limit_ru_each(rtwdev, index: 1, lmt_ru, band, ntx, ch: ch - 2);
865 fill_limit_ru_each(rtwdev, index: 2, lmt_ru, band, ntx, ch: ch + 2);
866 fill_limit_ru_each(rtwdev, index: 3, lmt_ru, band, ntx, ch: ch + 6);
867}
868
869static void phy_fill_limit_ru_160m_be(struct rtw89_dev *rtwdev,
870 struct rtw89_txpwr_limit_ru_be *lmt_ru,
871 u8 band, u8 ntx, u8 ch)
872{
873 fill_limit_ru_each(rtwdev, index: 0, lmt_ru, band, ntx, ch: ch - 14);
874 fill_limit_ru_each(rtwdev, index: 1, lmt_ru, band, ntx, ch: ch - 10);
875 fill_limit_ru_each(rtwdev, index: 2, lmt_ru, band, ntx, ch: ch - 6);
876 fill_limit_ru_each(rtwdev, index: 3, lmt_ru, band, ntx, ch: ch - 2);
877 fill_limit_ru_each(rtwdev, index: 4, lmt_ru, band, ntx, ch: ch + 2);
878 fill_limit_ru_each(rtwdev, index: 5, lmt_ru, band, ntx, ch: ch + 6);
879 fill_limit_ru_each(rtwdev, index: 6, lmt_ru, band, ntx, ch: ch + 10);
880 fill_limit_ru_each(rtwdev, index: 7, lmt_ru, band, ntx, ch: ch + 14);
881}
882
883static void phy_fill_limit_ru_320m_be(struct rtw89_dev *rtwdev,
884 struct rtw89_txpwr_limit_ru_be *lmt_ru,
885 u8 band, u8 ntx, u8 ch)
886{
887 fill_limit_ru_each(rtwdev, index: 0, lmt_ru, band, ntx, ch: ch - 30);
888 fill_limit_ru_each(rtwdev, index: 1, lmt_ru, band, ntx, ch: ch - 26);
889 fill_limit_ru_each(rtwdev, index: 2, lmt_ru, band, ntx, ch: ch - 22);
890 fill_limit_ru_each(rtwdev, index: 3, lmt_ru, band, ntx, ch: ch - 18);
891 fill_limit_ru_each(rtwdev, index: 4, lmt_ru, band, ntx, ch: ch - 14);
892 fill_limit_ru_each(rtwdev, index: 5, lmt_ru, band, ntx, ch: ch - 10);
893 fill_limit_ru_each(rtwdev, index: 6, lmt_ru, band, ntx, ch: ch - 6);
894 fill_limit_ru_each(rtwdev, index: 7, lmt_ru, band, ntx, ch: ch - 2);
895 fill_limit_ru_each(rtwdev, index: 8, lmt_ru, band, ntx, ch: ch + 2);
896 fill_limit_ru_each(rtwdev, index: 9, lmt_ru, band, ntx, ch: ch + 6);
897 fill_limit_ru_each(rtwdev, index: 10, lmt_ru, band, ntx, ch: ch + 10);
898 fill_limit_ru_each(rtwdev, index: 11, lmt_ru, band, ntx, ch: ch + 14);
899 fill_limit_ru_each(rtwdev, index: 12, lmt_ru, band, ntx, ch: ch + 18);
900 fill_limit_ru_each(rtwdev, index: 13, lmt_ru, band, ntx, ch: ch + 22);
901 fill_limit_ru_each(rtwdev, index: 14, lmt_ru, band, ntx, ch: ch + 26);
902 fill_limit_ru_each(rtwdev, index: 15, lmt_ru, band, ntx, ch: ch + 30);
903}
904
905static void rtw89_phy_fill_limit_ru_be(struct rtw89_dev *rtwdev,
906 const struct rtw89_chan *chan,
907 struct rtw89_txpwr_limit_ru_be *lmt_ru,
908 u8 ntx)
909{
910 u8 band = chan->band_type;
911 u8 ch = chan->channel;
912 u8 bw = chan->band_width;
913
914 memset(lmt_ru, 0, sizeof(*lmt_ru));
915
916 switch (bw) {
917 case RTW89_CHANNEL_WIDTH_20:
918 phy_fill_limit_ru_20m_be(rtwdev, lmt_ru, band, ntx, ch);
919 break;
920 case RTW89_CHANNEL_WIDTH_40:
921 phy_fill_limit_ru_40m_be(rtwdev, lmt_ru, band, ntx, ch);
922 break;
923 case RTW89_CHANNEL_WIDTH_80:
924 phy_fill_limit_ru_80m_be(rtwdev, lmt_ru, band, ntx, ch);
925 break;
926 case RTW89_CHANNEL_WIDTH_160:
927 phy_fill_limit_ru_160m_be(rtwdev, lmt_ru, band, ntx, ch);
928 break;
929 case RTW89_CHANNEL_WIDTH_320:
930 phy_fill_limit_ru_320m_be(rtwdev, lmt_ru, band, ntx, ch);
931 break;
932 }
933}
934
935static void rtw89_phy_set_txpwr_limit_ru_be(struct rtw89_dev *rtwdev,
936 const struct rtw89_chan *chan,
937 enum rtw89_phy_idx phy_idx)
938{
939 struct rtw89_txpwr_limit_ru_be lmt_ru;
940 const s8 *ptr;
941 u32 addr, val;
942 u8 i, j;
943
944 BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_ru_be) !=
945 RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE);
946
947 rtw89_debug(rtwdev, mask: RTW89_DBG_TXPWR,
948 fmt: "[TXPWR] set txpwr limit ru on band %d bw %d\n",
949 chan->band_type, chan->band_width);
950
951 addr = R_BE_PWR_RU_LMT;
952 for (i = 0; i <= RTW89_NSS_2; i++) {
953 rtw89_phy_fill_limit_ru_be(rtwdev, chan, lmt_ru: &lmt_ru, ntx: i);
954
955 ptr = (s8 *)&lmt_ru;
956 for (j = 0; j < RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE;
957 j += 4, addr += 4, ptr += 4) {
958 val = u32_encode_bits(v: ptr[0], GENMASK(7, 0)) |
959 u32_encode_bits(v: ptr[1], GENMASK(15, 8)) |
960 u32_encode_bits(v: ptr[2], GENMASK(23, 16)) |
961 u32_encode_bits(v: ptr[3], GENMASK(31, 24));
962
963 rtw89_mac_txpwr_write32(rtwdev, phy_idx, reg_base: addr, val);
964 }
965 }
966}
967
968const struct rtw89_phy_gen_def rtw89_phy_gen_be = {
969 .cr_base = 0x20000,
970 .ccx = &rtw89_ccx_regs_be,
971 .physts = &rtw89_physts_regs_be,
972 .cfo = &rtw89_cfo_regs_be,
973 .phy0_phy1_offset = rtw89_phy0_phy1_offset_be,
974 .config_bb_gain = rtw89_phy_config_bb_gain_be,
975 .preinit_rf_nctl = rtw89_phy_preinit_rf_nctl_be,
976 .bb_wrap_init = rtw89_phy_bb_wrap_init_be,
977 .ch_info_init = rtw89_phy_ch_info_init_be,
978
979 .set_txpwr_byrate = rtw89_phy_set_txpwr_byrate_be,
980 .set_txpwr_offset = rtw89_phy_set_txpwr_offset_be,
981 .set_txpwr_limit = rtw89_phy_set_txpwr_limit_be,
982 .set_txpwr_limit_ru = rtw89_phy_set_txpwr_limit_ru_be,
983};
984EXPORT_SYMBOL(rtw89_phy_gen_be);
985

source code of linux/drivers/net/wireless/realtek/rtw89/phy_be.c