1// SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2/* Copyright(c) 2023 Realtek Corporation
3 */
4
5#include "coex.h"
6#include "debug.h"
7#include "efuse.h"
8#include "fw.h"
9#include "mac.h"
10#include "phy.h"
11#include "reg.h"
12#include "rtw8922a.h"
13#include "rtw8922a_rfk.h"
14#include "util.h"
15
16#define RTW8922A_FW_FORMAT_MAX 0
17#define RTW8922A_FW_BASENAME "rtw89/rtw8922a_fw"
18#define RTW8922A_MODULE_FIRMWARE \
19 RTW8922A_FW_BASENAME ".bin"
20
21#define HE_N_USER_MAX_8922A 4
22
23static const struct rtw89_hfc_ch_cfg rtw8922a_hfc_chcfg_pcie[] = {
24 {2, 1641, grp_0}, /* ACH 0 */
25 {2, 1641, grp_0}, /* ACH 1 */
26 {2, 1641, grp_0}, /* ACH 2 */
27 {2, 1641, grp_0}, /* ACH 3 */
28 {2, 1641, grp_1}, /* ACH 4 */
29 {2, 1641, grp_1}, /* ACH 5 */
30 {2, 1641, grp_1}, /* ACH 6 */
31 {2, 1641, grp_1}, /* ACH 7 */
32 {2, 1641, grp_0}, /* B0MGQ */
33 {2, 1641, grp_0}, /* B0HIQ */
34 {2, 1641, grp_1}, /* B1MGQ */
35 {2, 1641, grp_1}, /* B1HIQ */
36 {0, 0, 0}, /* FWCMDQ */
37 {0, 0, 0}, /* BMC */
38 {0, 0, 0}, /* H2D */
39};
40
41static const struct rtw89_hfc_pub_cfg rtw8922a_hfc_pubcfg_pcie = {
42 1651, /* Group 0 */
43 1651, /* Group 1 */
44 3302, /* Public Max */
45 0, /* WP threshold */
46};
47
48static const struct rtw89_hfc_param_ini rtw8922a_hfc_param_ini_pcie[] = {
49 [RTW89_QTA_SCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
50 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
51 [RTW89_QTA_DBCC] = {.ch_cfg: rtw8922a_hfc_chcfg_pcie, .pub_cfg: &rtw8922a_hfc_pubcfg_pcie,
52 .prec_cfg: &rtw89_mac_size.hfc_prec_cfg_c0, .mode: RTW89_HCIFC_POH},
53 [RTW89_QTA_DLFW] = {NULL, NULL, .prec_cfg: &rtw89_mac_size.hfc_prec_cfg_c2,
54 .mode: RTW89_HCIFC_POH},
55 [RTW89_QTA_INVALID] = {NULL},
56};
57
58static const struct rtw89_dle_mem rtw8922a_dle_mem_pcie[] = {
59 [RTW89_QTA_SCC] = {.mode: RTW89_QTA_SCC, .wde_size: &rtw89_mac_size.wde_size0_v1,
60 .ple_size: &rtw89_mac_size.ple_size0_v1, .wde_min_qt: &rtw89_mac_size.wde_qt0_v1,
61 .wde_max_qt: &rtw89_mac_size.wde_qt0_v1, .ple_min_qt: &rtw89_mac_size.ple_qt0,
62 .ple_max_qt: &rtw89_mac_size.ple_qt1, .rsvd_qt: &rtw89_mac_size.ple_rsvd_qt0,
63 .rsvd0_size: &rtw89_mac_size.rsvd0_size0, .rsvd1_size: &rtw89_mac_size.rsvd1_size0},
64 [RTW89_QTA_DBCC] = {.mode: RTW89_QTA_DBCC, .wde_size: &rtw89_mac_size.wde_size0_v1,
65 .ple_size: &rtw89_mac_size.ple_size0_v1, .wde_min_qt: &rtw89_mac_size.wde_qt0_v1,
66 .wde_max_qt: &rtw89_mac_size.wde_qt0_v1, .ple_min_qt: &rtw89_mac_size.ple_qt0,
67 .ple_max_qt: &rtw89_mac_size.ple_qt1, .rsvd_qt: &rtw89_mac_size.ple_rsvd_qt0,
68 .rsvd0_size: &rtw89_mac_size.rsvd0_size0, .rsvd1_size: &rtw89_mac_size.rsvd1_size0},
69 [RTW89_QTA_DLFW] = {.mode: RTW89_QTA_DLFW, .wde_size: &rtw89_mac_size.wde_size4_v1,
70 .ple_size: &rtw89_mac_size.ple_size3_v1, .wde_min_qt: &rtw89_mac_size.wde_qt4,
71 .wde_max_qt: &rtw89_mac_size.wde_qt4, .ple_min_qt: &rtw89_mac_size.ple_qt9,
72 .ple_max_qt: &rtw89_mac_size.ple_qt9, .rsvd_qt: &rtw89_mac_size.ple_rsvd_qt1,
73 .rsvd0_size: &rtw89_mac_size.rsvd0_size0, .rsvd1_size: &rtw89_mac_size.rsvd1_size0},
74 [RTW89_QTA_INVALID] = {.mode: RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
75 NULL},
76};
77
78static const u32 rtw8922a_h2c_regs[RTW89_H2CREG_MAX] = {
79 R_BE_H2CREG_DATA0, R_BE_H2CREG_DATA1, R_BE_H2CREG_DATA2,
80 R_BE_H2CREG_DATA3
81};
82
83static const u32 rtw8922a_c2h_regs[RTW89_H2CREG_MAX] = {
84 R_BE_C2HREG_DATA0, R_BE_C2HREG_DATA1, R_BE_C2HREG_DATA2,
85 R_BE_C2HREG_DATA3
86};
87
88static const struct rtw89_page_regs rtw8922a_page_regs = {
89 .hci_fc_ctrl = R_BE_HCI_FC_CTRL,
90 .ch_page_ctrl = R_BE_CH_PAGE_CTRL,
91 .ach_page_ctrl = R_BE_CH0_PAGE_CTRL,
92 .ach_page_info = R_BE_CH0_PAGE_INFO,
93 .pub_page_info3 = R_BE_PUB_PAGE_INFO3,
94 .pub_page_ctrl1 = R_BE_PUB_PAGE_CTRL1,
95 .pub_page_ctrl2 = R_BE_PUB_PAGE_CTRL2,
96 .pub_page_info1 = R_BE_PUB_PAGE_INFO1,
97 .pub_page_info2 = R_BE_PUB_PAGE_INFO2,
98 .wp_page_ctrl1 = R_BE_WP_PAGE_CTRL1,
99 .wp_page_ctrl2 = R_BE_WP_PAGE_CTRL2,
100 .wp_page_info1 = R_BE_WP_PAGE_INFO1,
101};
102
103static const struct rtw89_reg_imr rtw8922a_imr_dmac_regs[] = {
104 {R_BE_DISP_HOST_IMR, B_BE_DISP_HOST_IMR_CLR, B_BE_DISP_HOST_IMR_SET},
105 {R_BE_DISP_CPU_IMR, B_BE_DISP_CPU_IMR_CLR, B_BE_DISP_CPU_IMR_SET},
106 {R_BE_DISP_OTHER_IMR, B_BE_DISP_OTHER_IMR_CLR, B_BE_DISP_OTHER_IMR_SET},
107 {R_BE_PKTIN_ERR_IMR, B_BE_PKTIN_ERR_IMR_CLR, B_BE_PKTIN_ERR_IMR_SET},
108 {R_BE_INTERRUPT_MASK_REG, B_BE_INTERRUPT_MASK_REG_CLR, B_BE_INTERRUPT_MASK_REG_SET},
109 {R_BE_MLO_ERR_IDCT_IMR, B_BE_MLO_ERR_IDCT_IMR_CLR, B_BE_MLO_ERR_IDCT_IMR_SET},
110 {R_BE_MPDU_TX_ERR_IMR, B_BE_MPDU_TX_ERR_IMR_CLR, B_BE_MPDU_TX_ERR_IMR_SET},
111 {R_BE_MPDU_RX_ERR_IMR, B_BE_MPDU_RX_ERR_IMR_CLR, B_BE_MPDU_RX_ERR_IMR_SET},
112 {R_BE_SEC_ERROR_IMR, B_BE_SEC_ERROR_IMR_CLR, B_BE_SEC_ERROR_IMR_SET},
113 {R_BE_CPUIO_ERR_IMR, B_BE_CPUIO_ERR_IMR_CLR, B_BE_CPUIO_ERR_IMR_SET},
114 {R_BE_WDE_ERR_IMR, B_BE_WDE_ERR_IMR_CLR, B_BE_WDE_ERR_IMR_SET},
115 {R_BE_WDE_ERR1_IMR, B_BE_WDE_ERR1_IMR_CLR, B_BE_WDE_ERR1_IMR_SET},
116 {R_BE_PLE_ERR_IMR, B_BE_PLE_ERR_IMR_CLR, B_BE_PLE_ERR_IMR_SET},
117 {R_BE_PLE_ERRFLAG1_IMR, B_BE_PLE_ERRFLAG1_IMR_CLR, B_BE_PLE_ERRFLAG1_IMR_SET},
118 {R_BE_WDRLS_ERR_IMR, B_BE_WDRLS_ERR_IMR_CLR, B_BE_WDRLS_ERR_IMR_SET},
119 {R_BE_TXPKTCTL_B0_ERRFLAG_IMR, B_BE_TXPKTCTL_B0_ERRFLAG_IMR_CLR,
120 B_BE_TXPKTCTL_B0_ERRFLAG_IMR_SET},
121 {R_BE_TXPKTCTL_B1_ERRFLAG_IMR, B_BE_TXPKTCTL_B1_ERRFLAG_IMR_CLR,
122 B_BE_TXPKTCTL_B1_ERRFLAG_IMR_SET},
123 {R_BE_BBRPT_COM_ERR_IMR, B_BE_BBRPT_COM_ERR_IMR_CLR, B_BE_BBRPT_COM_ERR_IMR_SET},
124 {R_BE_BBRPT_CHINFO_ERR_IMR, B_BE_BBRPT_CHINFO_ERR_IMR_CLR,
125 B_BE_BBRPT_CHINFO_ERR_IMR_SET},
126 {R_BE_BBRPT_DFS_ERR_IMR, B_BE_BBRPT_DFS_ERR_IMR_CLR, B_BE_BBRPT_DFS_ERR_IMR_SET},
127 {R_BE_LA_ERRFLAG_IMR, B_BE_LA_ERRFLAG_IMR_CLR, B_BE_LA_ERRFLAG_IMR_SET},
128 {R_BE_CH_INFO_DBGFLAG_IMR, B_BE_CH_INFO_DBGFLAG_IMR_CLR, B_BE_CH_INFO_DBGFLAG_IMR_SET},
129 {R_BE_PLRLS_ERR_IMR, B_BE_PLRLS_ERR_IMR_CLR, B_BE_PLRLS_ERR_IMR_SET},
130 {R_BE_HAXI_IDCT_MSK, B_BE_HAXI_IDCT_MSK_CLR, B_BE_HAXI_IDCT_MSK_SET},
131};
132
133static const struct rtw89_imr_table rtw8922a_imr_dmac_table = {
134 .regs = rtw8922a_imr_dmac_regs,
135 .n_regs = ARRAY_SIZE(rtw8922a_imr_dmac_regs),
136};
137
138static const struct rtw89_reg_imr rtw8922a_imr_cmac_regs[] = {
139 {R_BE_RESP_IMR, B_BE_RESP_IMR_CLR, B_BE_RESP_IMR_SET},
140 {R_BE_RX_ERROR_FLAG_IMR, B_BE_RX_ERROR_FLAG_IMR_CLR, B_BE_RX_ERROR_FLAG_IMR_SET},
141 {R_BE_TX_ERROR_FLAG_IMR, B_BE_TX_ERROR_FLAG_IMR_CLR, B_BE_TX_ERROR_FLAG_IMR_SET},
142 {R_BE_RX_ERROR_FLAG_IMR_1, B_BE_TX_ERROR_FLAG_IMR_1_CLR, B_BE_TX_ERROR_FLAG_IMR_1_SET},
143 {R_BE_PTCL_IMR1, B_BE_PTCL_IMR1_CLR, B_BE_PTCL_IMR1_SET},
144 {R_BE_PTCL_IMR0, B_BE_PTCL_IMR0_CLR, B_BE_PTCL_IMR0_SET},
145 {R_BE_PTCL_IMR_2, B_BE_PTCL_IMR_2_CLR, B_BE_PTCL_IMR_2_SET},
146 {R_BE_SCHEDULE_ERR_IMR, B_BE_SCHEDULE_ERR_IMR_CLR, B_BE_SCHEDULE_ERR_IMR_SET},
147 {R_BE_C0_TXPWR_IMR, B_BE_C0_TXPWR_IMR_CLR, B_BE_C0_TXPWR_IMR_SET},
148 {R_BE_TRXPTCL_ERROR_INDICA_MASK, B_BE_TRXPTCL_ERROR_INDICA_MASK_CLR,
149 B_BE_TRXPTCL_ERROR_INDICA_MASK_SET},
150 {R_BE_RX_ERR_IMR, B_BE_RX_ERR_IMR_CLR, B_BE_RX_ERR_IMR_SET},
151 {R_BE_PHYINFO_ERR_IMR_V1, B_BE_PHYINFO_ERR_IMR_V1_CLR, B_BE_PHYINFO_ERR_IMR_V1_SET},
152};
153
154static const struct rtw89_imr_table rtw8922a_imr_cmac_table = {
155 .regs = rtw8922a_imr_cmac_regs,
156 .n_regs = ARRAY_SIZE(rtw8922a_imr_cmac_regs),
157};
158
159static const struct rtw89_rrsr_cfgs rtw8922a_rrsr_cfgs = {
160 .ref_rate = {R_BE_TRXPTCL_RESP_1, B_BE_WMAC_RESP_REF_RATE_SEL, 0},
161 .rsc = {R_BE_PTCL_RRSR1, B_BE_RSC_MASK, 2},
162};
163
164static const struct rtw89_dig_regs rtw8922a_dig_regs = {
165 .seg0_pd_reg = R_SEG0R_PD_V2,
166 .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
167 .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1,
168 .bmode_pd_reg = R_BMODE_PDTH_EN_V2,
169 .bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1,
170 .bmode_pd_lower_bound_reg = R_BMODE_PDTH_V2,
171 .bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1,
172 .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
173 .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
174 .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
175 .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
176 .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
177 .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
178 .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V3,
179 B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
180 .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V3,
181 B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
182 .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V3,
183 B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
184 .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V3,
185 B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
186};
187
188static const struct rtw89_edcca_regs rtw8922a_edcca_regs = {
189 .edcca_level = R_SEG0R_EDCCA_LVL_BE,
190 .edcca_mask = B_EDCCA_LVL_MSK0,
191 .edcca_p_mask = B_EDCCA_LVL_MSK1,
192 .ppdu_level = R_SEG0R_PPDU_LVL_BE,
193 .ppdu_mask = B_EDCCA_LVL_MSK1,
194 .rpt_a = R_EDCCA_RPT_A_BE,
195 .rpt_b = R_EDCCA_RPT_B_BE,
196 .rpt_sel = R_EDCCA_RPT_SEL_BE,
197 .rpt_sel_mask = B_EDCCA_RPT_SEL_MSK,
198 .rpt_sel_be = R_EDCCA_RPTREG_SEL_BE,
199 .rpt_sel_be_mask = B_EDCCA_RPTREG_SEL_BE_MSK,
200 .tx_collision_t2r_st = R_TX_COLLISION_T2R_ST_BE,
201 .tx_collision_t2r_st_mask = B_TX_COLLISION_T2R_ST_BE_M,
202};
203
204static const struct rtw89_efuse_block_cfg rtw8922a_efuse_blocks[] = {
205 [RTW89_EFUSE_BLOCK_SYS] = {.offset = 0x00000, .size = 0x310},
206 [RTW89_EFUSE_BLOCK_RF] = {.offset = 0x10000, .size = 0x240},
207 [RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO] = {.offset = 0x20000, .size = 0x4800},
208 [RTW89_EFUSE_BLOCK_HCI_DIG_USB] = {.offset = 0x30000, .size = 0x890},
209 [RTW89_EFUSE_BLOCK_HCI_PHY_PCIE] = {.offset = 0x40000, .size = 0x200},
210 [RTW89_EFUSE_BLOCK_HCI_PHY_USB3] = {.offset = 0x50000, .size = 0x80},
211 [RTW89_EFUSE_BLOCK_HCI_PHY_USB2] = {.offset = 0x60000, .size = 0x0},
212 [RTW89_EFUSE_BLOCK_ADIE] = {.offset = 0x70000, .size = 0x10},
213};
214
215static void rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev *rtwdev, bool en,
216 enum rtw89_phy_idx phy_idx)
217{
218 if (en) {
219 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, data: 0x1, phy_idx);
220 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, data: 0x0, phy_idx);
221 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, data: 0x1, phy_idx);
222 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, data: 0x1, phy_idx);
223 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, data: 0x20, phy_idx);
224 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, data: 0x30, phy_idx);
225 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, data: 0x0, phy_idx);
226 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, data: 0x1, phy_idx);
227 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, data: 0x2, phy_idx);
228 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
229 data: 0x1, phy_idx);
230 } else {
231 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, data: 0x0, phy_idx);
232 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, data: 0x0, phy_idx);
233 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, data: 0x0, phy_idx);
234 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, data: 0x0, phy_idx);
235 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, data: 0x1a, phy_idx);
236 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, data: 0x2a, phy_idx);
237 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, data: 0xc, phy_idx);
238 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, data: 0x0, phy_idx);
239 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, data: 0x0, phy_idx);
240 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
241 data: 0x0, phy_idx);
242 }
243}
244
245static int rtw8922a_pwr_on_func(struct rtw89_dev *rtwdev)
246{
247 struct rtw89_hal *hal = &rtwdev->hal;
248 u32 val32;
249 int ret;
250
251 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_AFSM_WLSUS_EN |
252 B_BE_AFSM_PCIE_SUS_EN);
253 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_DIS_WLBT_PDNSUSEN_SOPC);
254 rtw89_write32_set(rtwdev, R_BE_WLLPS_CTRL, B_BE_DIS_WLBT_LPSEN_LOPC);
255 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APDM_HPDN);
256 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
257
258 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_RDY_SYSPWR,
259 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
260 if (ret)
261 return ret;
262
263 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
264 rtw89_write32_set(rtwdev, R_BE_WLRESUME_CTRL, B_BE_LPSROP_CMAC0 |
265 B_BE_LPSROP_CMAC1);
266 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFN_ONMAC);
267
268 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFN_ONMAC),
269 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
270 if (ret)
271 return ret;
272
273 rtw89_write32_clr(rtwdev, R_BE_AFE_ON_CTRL1, B_BE_REG_CK_MON_CK960M_EN);
274 rtw89_write8_set(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
275 B_BE_POW_PC_LDO_PORT1);
276 rtw89_write32_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
277 B_BE_R_SYM_ISO_ADDA_P12PP);
278 rtw89_write8_set(rtwdev, R_BE_PLATFORM_ENABLE, B_BE_PLATFORM_EN);
279 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
280
281 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HAXIDMA_IO_ST,
282 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
283 if (ret)
284 return ret;
285
286 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
287 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
288 if (ret)
289 return ret;
290
291 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
292
293 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HCI_WLAN_IO_ST,
294 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
295 if (ret)
296 return ret;
297
298 rtw89_write32_clr(rtwdev, R_BE_SYS_SDIO_CTRL, B_BE_PCIE_FORCE_IBX_EN);
299
300 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_PLL, val: 0x02, mask: 0x02);
301 if (ret)
302 return ret;
303 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_PLL, val: 0x01, mask: 0x01);
304 if (ret)
305 return ret;
306
307 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
308
309 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x40, mask: 0x40);
310 if (ret)
311 return ret;
312
313 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
314
315 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x20, mask: 0x20);
316 if (ret)
317 return ret;
318 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x04, mask: 0x04);
319 if (ret)
320 return ret;
321 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x08, mask: 0x08);
322 if (ret)
323 return ret;
324 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x10);
325 if (ret)
326 return ret;
327 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_WL_RFC_S0, val: 0xEB, mask: 0xFF);
328 if (ret)
329 return ret;
330 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_WL_RFC_S1, val: 0xEB, mask: 0xFF);
331 if (ret)
332 return ret;
333 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x01, mask: 0x01);
334 if (ret)
335 return ret;
336 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x02, mask: 0x02);
337 if (ret)
338 return ret;
339 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x80);
340 if (ret)
341 return ret;
342 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_XREF_RF1, val: 0, mask: 0x40);
343 if (ret)
344 return ret;
345 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_XREF_RF2, val: 0, mask: 0x40);
346 if (ret)
347 return ret;
348 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_PLL_1, val: 0x40, mask: 0x60);
349 if (ret)
350 return ret;
351
352 if (hal->cv != CHIP_CAV) {
353 rtw89_write32_set(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
354 rtw89_write32_set(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_ISO_EB2CORE);
355 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_B);
356
357 mdelay(1);
358
359 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_S);
360 rtw89_write32_clr(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
361 }
362
363 rtw89_write32_set(rtwdev, R_BE_DMAC_FUNC_EN,
364 B_BE_MAC_FUNC_EN | B_BE_DMAC_FUNC_EN | B_BE_MPDU_PROC_EN |
365 B_BE_WD_RLS_EN | B_BE_DLE_WDE_EN | B_BE_TXPKT_CTRL_EN |
366 B_BE_STA_SCH_EN | B_BE_DLE_PLE_EN | B_BE_PKT_BUF_EN |
367 B_BE_DMAC_TBL_EN | B_BE_PKT_IN_EN | B_BE_DLE_CPUIO_EN |
368 B_BE_DISPATCHER_EN | B_BE_BBRPT_EN | B_BE_MAC_SEC_EN |
369 B_BE_H_AXIDMA_EN | B_BE_DMAC_MLO_EN | B_BE_PLRLS_EN |
370 B_BE_P_AXIDMA_EN | B_BE_DLE_DATACPUIO_EN | B_BE_LTR_CTL_EN);
371
372 set_bit(nr: RTW89_FLAG_DMAC_FUNC, addr: rtwdev->flags);
373
374 rtw89_write32_set(rtwdev, R_BE_CMAC_SHARE_FUNC_EN,
375 B_BE_CMAC_SHARE_EN | B_BE_RESPBA_EN | B_BE_ADDRSRCH_EN |
376 B_BE_BTCOEX_EN);
377 rtw89_write32_set(rtwdev, R_BE_CMAC_FUNC_EN,
378 B_BE_CMAC_EN | B_BE_CMAC_TXEN | B_BE_CMAC_RXEN |
379 B_BE_SIGB_EN | B_BE_PHYINTF_EN | B_BE_CMAC_DMA_EN |
380 B_BE_PTCLTOP_EN | B_BE_SCHEDULER_EN | B_BE_TMAC_EN |
381 B_BE_RMAC_EN | B_BE_TXTIME_EN | B_BE_RESP_PKTCTL_EN);
382
383 set_bit(nr: RTW89_FLAG_CMAC0_FUNC, addr: rtwdev->flags);
384
385 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
386 B_BE_FEN_BBPLAT_RSTB);
387
388 if (!test_bit(RTW89_FLAG_PROBE_DONE, rtwdev->flags))
389 rtw89_efuse_read_fw_secure_be(rtwdev);
390
391 return 0;
392}
393
394static int rtw8922a_pwr_off_func(struct rtw89_dev *rtwdev)
395{
396 u32 val32;
397 int ret;
398
399 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x10, mask: 0x10);
400 if (ret)
401 return ret;
402 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x08);
403 if (ret)
404 return ret;
405 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x04);
406 if (ret)
407 return ret;
408 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_WL_RFC_S0, val: 0xC6, mask: 0xFF);
409 if (ret)
410 return ret;
411 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_WL_RFC_S1, val: 0xC6, mask: 0xFF);
412 if (ret)
413 return ret;
414 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0x80, mask: 0x80);
415 if (ret)
416 return ret;
417 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x02);
418 if (ret)
419 return ret;
420 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x01);
421 if (ret)
422 return ret;
423 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_PLL, val: 0x02, mask: 0xFF);
424 if (ret)
425 return ret;
426 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_PLL, val: 0x00, mask: 0xFF);
427 if (ret)
428 return ret;
429
430 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
431 B_BE_R_SYM_ISO_ADDA_P12PP);
432 rtw89_write8_clr(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
433 B_BE_POW_PC_LDO_PORT1);
434 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
435 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
436 B_BE_FEN_BBPLAT_RSTB);
437 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
438
439 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x20);
440 if (ret)
441 return ret;
442
443 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
444
445 ret = rtw89_mac_write_xtal_si(rtwdev, offset: XTAL_SI_ANAPAR_WL, val: 0, mask: 0x40);
446 if (ret)
447 return ret;
448
449 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
450
451 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_IO_ST),
452 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
453 if (ret)
454 return ret;
455
456 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
457 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
458 if (ret)
459 return ret;
460
461 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
462
463 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HCI_WLAN_IO_ST),
464 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
465 if (ret)
466 return ret;
467
468 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_OFFMAC);
469
470 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFM_OFFMAC),
471 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
472 if (ret)
473 return ret;
474
475 rtw89_write32(rtwdev, R_BE_WLLPS_CTRL, data: 0x0000A1B2);
476 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_XTAL_OFF_A_DIE);
477 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
478 rtw89_write32(rtwdev, R_BE_UDM1, data: 0);
479
480 return 0;
481}
482
483static void rtw8922a_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
484 struct rtw8922a_efuse *map)
485{
486 struct rtw8922a_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
487 u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
488 struct rtw89_tssi_info *tssi = &rtwdev->tssi;
489 u8 i, j;
490
491 tssi->thermal[RF_PATH_A] = map->path_a_therm;
492 tssi->thermal[RF_PATH_B] = map->path_b_therm;
493
494 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
495 memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
496 sizeof(ofst[i]->cck_tssi));
497
498 for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
499 rtw89_debug(rtwdev, mask: RTW89_DBG_TSSI,
500 fmt: "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
501 i, j, tssi->tssi_cck[i][j]);
502
503 memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
504 sizeof(ofst[i]->bw40_tssi));
505 memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
506 ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
507 memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
508 sizeof(tssi->tssi_6g_mcs[i]));
509
510 for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
511 rtw89_debug(rtwdev, mask: RTW89_DBG_TSSI,
512 fmt: "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
513 i, j, tssi->tssi_mcs[i][j]);
514 }
515}
516
517static void rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
518 struct rtw8922a_efuse *map)
519{
520 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
521 bool all_0xff = true, all_0x00 = true;
522 int i, j;
523 u8 t;
524
525 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_a._2g_cck;
526 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_b._2g_cck;
527 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_a._2g_ofdm;
528 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_b._2g_ofdm;
529 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_a._5g_low;
530 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_b._5g_low;
531 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_a._5g_mid;
532 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_b._5g_mid;
533 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_a._5g_high;
534 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_b._5g_high;
535 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_a._6g_l0;
536 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_b._6g_l0;
537 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_a._6g_l1;
538 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_b._6g_l1;
539 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_a._6g_m0;
540 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_b._6g_m0;
541 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_a._6g_m1;
542 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_b._6g_m1;
543 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_a._6g_h0;
544 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_b._6g_h0;
545 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_a._6g_h1;
546 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_b._6g_h1;
547 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_a._6g_uh0;
548 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_b._6g_uh0;
549 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_a._6g_uh1;
550 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_b._6g_uh1;
551
552 for (i = RF_PATH_A; i <= RF_PATH_B; i++)
553 for (j = 0; j < RTW89_GAIN_OFFSET_NR; j++) {
554 t = gain->offset[i][j];
555 if (t != 0xff)
556 all_0xff = false;
557 if (t != 0x0)
558 all_0x00 = false;
559
560 /* transform: sign-bit + U(7,2) to S(8,2) */
561 if (t & 0x80)
562 gain->offset[i][j] = (t ^ 0x7f) + 1;
563 }
564
565 gain->offset_valid = !all_0xff && !all_0x00;
566}
567
568static void rtw8922a_read_efuse_mac_addr(struct rtw89_dev *rtwdev, u32 addr)
569{
570 struct rtw89_efuse *efuse = &rtwdev->efuse;
571 u16 val;
572 int i;
573
574 for (i = 0; i < ETH_ALEN; i += 2, addr += 2) {
575 val = rtw89_read16(rtwdev, addr);
576 efuse->addr[i] = val & 0xff;
577 efuse->addr[i + 1] = val >> 8;
578 }
579}
580
581static int rtw8922a_read_efuse_pci_sdio(struct rtw89_dev *rtwdev, u8 *log_map)
582{
583 struct rtw89_efuse *efuse = &rtwdev->efuse;
584
585 if (rtwdev->hci.type == RTW89_HCI_TYPE_PCIE)
586 rtw8922a_read_efuse_mac_addr(rtwdev, addr: 0x3104);
587 else
588 ether_addr_copy(dst: efuse->addr, src: log_map + 0x001A);
589
590 return 0;
591}
592
593static int rtw8922a_read_efuse_usb(struct rtw89_dev *rtwdev, u8 *log_map)
594{
595 rtw8922a_read_efuse_mac_addr(rtwdev, addr: 0x4078);
596
597 return 0;
598}
599
600static int rtw8922a_read_efuse_rf(struct rtw89_dev *rtwdev, u8 *log_map)
601{
602 struct rtw8922a_efuse *map = (struct rtw8922a_efuse *)log_map;
603 struct rtw89_efuse *efuse = &rtwdev->efuse;
604
605 efuse->rfe_type = map->rfe_type;
606 efuse->xtal_cap = map->xtal_k;
607 efuse->country_code[0] = map->country_code[0];
608 efuse->country_code[1] = map->country_code[1];
609 rtw8922a_efuse_parsing_tssi(rtwdev, map);
610 rtw8922a_efuse_parsing_gain_offset(rtwdev, map);
611
612 rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
613
614 return 0;
615}
616
617static int rtw8922a_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map,
618 enum rtw89_efuse_block block)
619{
620 switch (block) {
621 case RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO:
622 return rtw8922a_read_efuse_pci_sdio(rtwdev, log_map);
623 case RTW89_EFUSE_BLOCK_HCI_DIG_USB:
624 return rtw8922a_read_efuse_usb(rtwdev, log_map);
625 case RTW89_EFUSE_BLOCK_RF:
626 return rtw8922a_read_efuse_rf(rtwdev, log_map);
627 default:
628 return 0;
629 }
630}
631
632#define THM_TRIM_POSITIVE_MASK BIT(6)
633#define THM_TRIM_MAGNITUDE_MASK GENMASK(5, 0)
634
635static void rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
636 u8 *phycap_map)
637{
638 static const u32 thm_trim_addr[RF_PATH_NUM_8922A] = {0x1706, 0x1733};
639 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
640 u32 addr = rtwdev->chip->phycap_addr;
641 bool pg = true;
642 u8 pg_th;
643 s8 val;
644 u8 i;
645
646 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
647 pg_th = phycap_map[thm_trim_addr[i] - addr];
648 if (pg_th == 0xff) {
649 info->thermal_trim[i] = 0;
650 pg = false;
651 break;
652 }
653
654 val = u8_get_bits(v: pg_th, THM_TRIM_MAGNITUDE_MASK);
655
656 if (!(pg_th & THM_TRIM_POSITIVE_MASK))
657 val *= -1;
658
659 info->thermal_trim[i] = val;
660
661 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
662 fmt: "[THERMAL][TRIM] path=%d thermal_trim=0x%x (%d)\n",
663 i, pg_th, val);
664 }
665
666 info->pg_thermal_trim = pg;
667}
668
669static void rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
670 u8 *phycap_map)
671{
672 static const u32 pabias_trim_addr[RF_PATH_NUM_8922A] = {0x1707, 0x1734};
673 static const u32 check_pa_pad_trim_addr = 0x1700;
674 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
675 u32 addr = rtwdev->chip->phycap_addr;
676 u8 val;
677 u8 i;
678
679 val = phycap_map[check_pa_pad_trim_addr - addr];
680 if (val != 0xff)
681 info->pg_pa_bias_trim = true;
682
683 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
684 info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
685
686 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
687 fmt: "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
688 i, info->pa_bias_trim[i]);
689 }
690}
691
692static void rtw8922a_pa_bias_trim(struct rtw89_dev *rtwdev)
693{
694 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
695 u8 pabias_2g, pabias_5g;
696 u8 i;
697
698 if (!info->pg_pa_bias_trim) {
699 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
700 fmt: "[PA_BIAS][TRIM] no PG, do nothing\n");
701
702 return;
703 }
704
705 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
706 pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
707 pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
708
709 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
710 fmt: "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
711 i, pabias_2g, pabias_5g);
712
713 rtw89_write_rf(rtwdev, rf_path: i, RR_BIASA, RR_BIASA_TXG_V1, data: pabias_2g);
714 rtw89_write_rf(rtwdev, rf_path: i, RR_BIASA, RR_BIASA_TXA_V1, data: pabias_5g);
715 }
716}
717
718static void rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev *rtwdev,
719 u8 *phycap_map)
720{
721 static const u32 pad_bias_trim_addr[RF_PATH_NUM_8922A] = {0x1708, 0x1735};
722 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
723 u32 addr = rtwdev->chip->phycap_addr;
724 u8 i;
725
726 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
727 info->pad_bias_trim[i] = phycap_map[pad_bias_trim_addr[i] - addr];
728
729 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
730 fmt: "[PAD_BIAS][TRIM] path=%d pad_bias_trim=0x%x\n",
731 i, info->pad_bias_trim[i]);
732 }
733}
734
735static void rtw8922a_pad_bias_trim(struct rtw89_dev *rtwdev)
736{
737 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
738 u8 pad_bias_2g, pad_bias_5g;
739 u8 i;
740
741 if (!info->pg_pa_bias_trim) {
742 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
743 fmt: "[PAD_BIAS][TRIM] no PG, do nothing\n");
744 return;
745 }
746
747 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
748 pad_bias_2g = u8_get_bits(v: info->pad_bias_trim[i], GENMASK(3, 0));
749 pad_bias_5g = u8_get_bits(v: info->pad_bias_trim[i], GENMASK(7, 4));
750
751 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
752 fmt: "[PAD_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
753 i, pad_bias_2g, pad_bias_5g);
754
755 rtw89_write_rf(rtwdev, rf_path: i, RR_BIASA, RR_BIASD_TXG_V1, data: pad_bias_2g);
756 rtw89_write_rf(rtwdev, rf_path: i, RR_BIASA, RR_BIASD_TXA_V1, data: pad_bias_5g);
757 }
758}
759
760static int rtw8922a_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
761{
762 rtw8922a_phycap_parsing_thermal_trim(rtwdev, phycap_map);
763 rtw8922a_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
764 rtw8922a_phycap_parsing_pad_bias_trim(rtwdev, phycap_map);
765
766 return 0;
767}
768
769static void rtw8922a_power_trim(struct rtw89_dev *rtwdev)
770{
771 rtw8922a_pa_bias_trim(rtwdev);
772 rtw8922a_pad_bias_trim(rtwdev);
773}
774
775static void rtw8922a_set_channel_mac(struct rtw89_dev *rtwdev,
776 const struct rtw89_chan *chan,
777 u8 mac_idx)
778{
779 u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_BE_TX_SUB_BAND_VALUE, band: mac_idx);
780 u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_BE_TXRATE_CHK, band: mac_idx);
781 u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_BE_WMAC_RFMOD, band: mac_idx);
782 u8 txsb20 = 0, txsb40 = 0, txsb80 = 0;
783 u8 rf_mod_val, chk_rate_mask;
784 u32 txsb;
785 u32 reg;
786
787 switch (chan->band_width) {
788 case RTW89_CHANNEL_WIDTH_160:
789 txsb80 = rtw89_phy_get_txsb(rtwdev, chan, dbw: RTW89_CHANNEL_WIDTH_80);
790 fallthrough;
791 case RTW89_CHANNEL_WIDTH_80:
792 txsb40 = rtw89_phy_get_txsb(rtwdev, chan, dbw: RTW89_CHANNEL_WIDTH_40);
793 fallthrough;
794 case RTW89_CHANNEL_WIDTH_40:
795 txsb20 = rtw89_phy_get_txsb(rtwdev, chan, dbw: RTW89_CHANNEL_WIDTH_20);
796 break;
797 default:
798 break;
799 }
800
801 switch (chan->band_width) {
802 case RTW89_CHANNEL_WIDTH_160:
803 rf_mod_val = BE_WMAC_RFMOD_160M;
804 txsb = u32_encode_bits(v: txsb20, B_BE_TXSB_20M_MASK) |
805 u32_encode_bits(v: txsb40, B_BE_TXSB_40M_MASK) |
806 u32_encode_bits(v: txsb80, B_BE_TXSB_80M_MASK);
807 break;
808 case RTW89_CHANNEL_WIDTH_80:
809 rf_mod_val = BE_WMAC_RFMOD_80M;
810 txsb = u32_encode_bits(v: txsb20, B_BE_TXSB_20M_MASK) |
811 u32_encode_bits(v: txsb40, B_BE_TXSB_40M_MASK);
812 break;
813 case RTW89_CHANNEL_WIDTH_40:
814 rf_mod_val = BE_WMAC_RFMOD_40M;
815 txsb = u32_encode_bits(v: txsb20, B_BE_TXSB_20M_MASK);
816 break;
817 case RTW89_CHANNEL_WIDTH_20:
818 default:
819 rf_mod_val = BE_WMAC_RFMOD_20M;
820 txsb = 0;
821 break;
822 }
823
824 if (txsb20 <= BE_PRI20_BITMAP_MAX)
825 txsb |= u32_encode_bits(BIT(txsb20), B_BE_PRI20_BITMAP_MASK);
826
827 rtw89_write8_mask(rtwdev, addr: rf_mod, B_BE_WMAC_RFMOD_MASK, data: rf_mod_val);
828 rtw89_write32(rtwdev, addr: sub_carr, data: txsb);
829
830 switch (chan->band_type) {
831 case RTW89_BAND_2G:
832 chk_rate_mask = B_BE_BAND_MODE;
833 break;
834 case RTW89_BAND_5G:
835 case RTW89_BAND_6G:
836 chk_rate_mask = B_BE_CHECK_CCK_EN | B_BE_RTS_LIMIT_IN_OFDM6;
837 break;
838 default:
839 rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
840 return;
841 }
842
843 rtw89_write8_clr(rtwdev, addr: chk_rate, B_BE_BAND_MODE | B_BE_CHECK_CCK_EN |
844 B_BE_RTS_LIMIT_IN_OFDM6);
845 rtw89_write8_set(rtwdev, addr: chk_rate, bit: chk_rate_mask);
846
847 switch (chan->band_width) {
848 case RTW89_CHANNEL_WIDTH_320:
849 case RTW89_CHANNEL_WIDTH_160:
850 case RTW89_CHANNEL_WIDTH_80:
851 case RTW89_CHANNEL_WIDTH_40:
852 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, band: mac_idx);
853 rtw89_write32_mask(rtwdev, addr: reg, B_BE_SIFS_MACTXEN_T1_MASK, data: 0x41);
854 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, band: mac_idx);
855 rtw89_write32_mask(rtwdev, addr: reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, data: 0x41);
856 break;
857 default:
858 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, band: mac_idx);
859 rtw89_write32_mask(rtwdev, addr: reg, B_BE_SIFS_MACTXEN_T1_MASK, data: 0x3f);
860 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, band: mac_idx);
861 rtw89_write32_mask(rtwdev, addr: reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, data: 0x3e);
862 break;
863 }
864}
865
866static const u32 rtw8922a_sco_barker_threshold[14] = {
867 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
868 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
869};
870
871static const u32 rtw8922a_sco_cck_threshold[14] = {
872 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
873 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
874};
875
876static int rtw8922a_ctrl_sco_cck(struct rtw89_dev *rtwdev,
877 u8 primary_ch, enum rtw89_bandwidth bw,
878 enum rtw89_phy_idx phy_idx)
879{
880 u8 ch_element;
881
882 if (primary_ch >= 14)
883 return -EINVAL;
884
885 ch_element = primary_ch - 1;
886
887 rtw89_phy_write32_idx(rtwdev, R_BK_FC0INV, B_BK_FC0INV,
888 data: rtw8922a_sco_barker_threshold[ch_element],
889 phy_idx);
890 rtw89_phy_write32_idx(rtwdev, R_CCK_FC0INV, B_CCK_FC0INV,
891 data: rtw8922a_sco_cck_threshold[ch_element],
892 phy_idx);
893
894 return 0;
895}
896
897struct rtw8922a_bb_gain {
898 u32 gain_g[BB_PATH_NUM_8922A];
899 u32 gain_a[BB_PATH_NUM_8922A];
900 u32 gain_g_mask;
901 u32 gain_a_mask;
902};
903
904static const struct rtw89_reg_def rpl_comp_bw160[RTW89_BW20_SC_160M] = {
905 { .addr = 0x41E8, .mask = 0xFF00},
906 { .addr = 0x41E8, .mask = 0xFF0000},
907 { .addr = 0x41E8, .mask = 0xFF000000},
908 { .addr = 0x41EC, .mask = 0xFF},
909 { .addr = 0x41EC, .mask = 0xFF00},
910 { .addr = 0x41EC, .mask = 0xFF0000},
911 { .addr = 0x41EC, .mask = 0xFF000000},
912 { .addr = 0x41F0, .mask = 0xFF}
913};
914
915static const struct rtw89_reg_def rpl_comp_bw80[RTW89_BW20_SC_80M] = {
916 { .addr = 0x41F4, .mask = 0xFF},
917 { .addr = 0x41F4, .mask = 0xFF00},
918 { .addr = 0x41F4, .mask = 0xFF0000},
919 { .addr = 0x41F4, .mask = 0xFF000000}
920};
921
922static const struct rtw89_reg_def rpl_comp_bw40[RTW89_BW20_SC_40M] = {
923 { .addr = 0x41F0, .mask = 0xFF0000},
924 { .addr = 0x41F0, .mask = 0xFF000000}
925};
926
927static const struct rtw89_reg_def rpl_comp_bw20[RTW89_BW20_SC_20M] = {
928 { .addr = 0x41F0, .mask = 0xFF00}
929};
930
931static const struct rtw8922a_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
932 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
933 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
934 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
935 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
936 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
937 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
938 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
939 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
940 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
941 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
942 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
943 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
944 { .gain_g = {0x40a8, 0x44a8}, .gain_a = {0x4078, 0x4478},
945 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
946};
947
948static const struct rtw8922a_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
949 { .gain_g = {0x4054, 0x4454}, .gain_a = {0x4054, 0x4454},
950 .gain_g_mask = 0x7FC0000, .gain_a_mask = 0x1FF},
951 { .gain_g = {0x4058, 0x4458}, .gain_a = {0x4054, 0x4454},
952 .gain_g_mask = 0x1FF, .gain_a_mask = 0x3FE00 },
953};
954
955struct rtw8922a_bb_gain_bypass {
956 u32 gain_g[BB_PATH_NUM_8922A];
957 u32 gain_a[BB_PATH_NUM_8922A];
958 u32 gain_mask_g;
959 u32 gain_mask_a;
960};
961
962static void rtw8922a_set_rpl_gain(struct rtw89_dev *rtwdev,
963 const struct rtw89_chan *chan,
964 enum rtw89_rf_path path,
965 enum rtw89_phy_idx phy_idx)
966{
967 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
968 u8 gain_band = rtw89_subband_to_gain_band_be(subband: chan->subband_type);
969 u32 reg_path_ofst = 0;
970 u32 mask;
971 s32 val;
972 u32 reg;
973 int i;
974
975 if (path == RF_PATH_B)
976 reg_path_ofst = 0x400;
977
978 for (i = 0; i < RTW89_BW20_SC_160M; i++) {
979 reg = rpl_comp_bw160[i].addr | reg_path_ofst;
980 mask = rpl_comp_bw160[i].mask;
981 val = gain->rpl_ofst_160[gain_band][path][i];
982 rtw89_phy_write32_idx(rtwdev, addr: reg, mask, data: val, phy_idx);
983 }
984
985 for (i = 0; i < RTW89_BW20_SC_80M; i++) {
986 reg = rpl_comp_bw80[i].addr | reg_path_ofst;
987 mask = rpl_comp_bw80[i].mask;
988 val = gain->rpl_ofst_80[gain_band][path][i];
989 rtw89_phy_write32_idx(rtwdev, addr: reg, mask, data: val, phy_idx);
990 }
991
992 for (i = 0; i < RTW89_BW20_SC_40M; i++) {
993 reg = rpl_comp_bw40[i].addr | reg_path_ofst;
994 mask = rpl_comp_bw40[i].mask;
995 val = gain->rpl_ofst_40[gain_band][path][i];
996 rtw89_phy_write32_idx(rtwdev, addr: reg, mask, data: val, phy_idx);
997 }
998
999 for (i = 0; i < RTW89_BW20_SC_20M; i++) {
1000 reg = rpl_comp_bw20[i].addr | reg_path_ofst;
1001 mask = rpl_comp_bw20[i].mask;
1002 val = gain->rpl_ofst_20[gain_band][path][i];
1003 rtw89_phy_write32_idx(rtwdev, addr: reg, mask, data: val, phy_idx);
1004 }
1005}
1006
1007static void rtw8922a_set_lna_tia_gain(struct rtw89_dev *rtwdev,
1008 const struct rtw89_chan *chan,
1009 enum rtw89_rf_path path,
1010 enum rtw89_phy_idx phy_idx)
1011{
1012 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1013 u8 gain_band = rtw89_subband_to_gain_band_be(subband: chan->subband_type);
1014 enum rtw89_phy_bb_bw_be bw_type;
1015 s32 val;
1016 u32 reg;
1017 u32 mask;
1018 int i;
1019
1020 bw_type = chan->band_width <= RTW89_CHANNEL_WIDTH_40 ?
1021 RTW89_BB_BW_20_40 : RTW89_BB_BW_80_160_320;
1022
1023 for (i = 0; i < LNA_GAIN_NUM; i++) {
1024 if (chan->band_type == RTW89_BAND_2G) {
1025 reg = bb_gain_lna[i].gain_g[path];
1026 mask = bb_gain_lna[i].gain_g_mask;
1027 } else {
1028 reg = bb_gain_lna[i].gain_a[path];
1029 mask = bb_gain_lna[i].gain_a_mask;
1030 }
1031 val = gain->lna_gain[gain_band][bw_type][path][i];
1032 rtw89_phy_write32_idx(rtwdev, addr: reg, mask, data: val, phy_idx);
1033 }
1034
1035 for (i = 0; i < TIA_GAIN_NUM; i++) {
1036 if (chan->band_type == RTW89_BAND_2G) {
1037 reg = bb_gain_tia[i].gain_g[path];
1038 mask = bb_gain_tia[i].gain_g_mask;
1039 } else {
1040 reg = bb_gain_tia[i].gain_a[path];
1041 mask = bb_gain_tia[i].gain_a_mask;
1042 }
1043 val = gain->tia_gain[gain_band][bw_type][path][i];
1044 rtw89_phy_write32_idx(rtwdev, addr: reg, mask, data: val, phy_idx);
1045 }
1046}
1047
1048static void rtw8922a_set_gain(struct rtw89_dev *rtwdev,
1049 const struct rtw89_chan *chan,
1050 enum rtw89_rf_path path,
1051 enum rtw89_phy_idx phy_idx)
1052{
1053 rtw8922a_set_lna_tia_gain(rtwdev, chan, path, phy_idx);
1054 rtw8922a_set_rpl_gain(rtwdev, chan, path, phy_idx);
1055}
1056
1057static void rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev *rtwdev,
1058 const struct rtw89_chan *chan,
1059 enum rtw89_rf_path path)
1060{
1061 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1062 s8 value = -gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK]; /* S(8,2) */
1063 u8 fraction = value & 0x3;
1064
1065 if (fraction) {
1066 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20,
1067 data: (0x4 - fraction) << 1);
1068 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40,
1069 data: (0x4 - fraction) << 1);
1070
1071 value >>= 2;
1072 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1073 data: value + 1 + 0xdc);
1074 } else {
1075 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, data: 0);
1076 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, data: 0);
1077
1078 value >>= 2;
1079 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1080 data: value + 0xdc);
1081 }
1082}
1083
1084static void rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev *rtwdev,
1085 const struct rtw89_chan *chan,
1086 enum rtw89_rf_path path)
1087{
1088 static const u32 rssi_tb_bias_comp[2] = {0x41f8, 0x45f8};
1089 static const u32 rssi_tb_ext_comp[2] = {0x4208, 0x4608};
1090 static const u32 rssi_ofst_addr[2] = {0x40c8, 0x44c8};
1091 static const u32 rpl_bias_comp[2] = {0x41e8, 0x45e8};
1092 static const u32 rpl_ext_comp[2] = {0x41f8, 0x45f8};
1093 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1094 enum rtw89_gain_offset gain_band;
1095 s8 v1, v2, v3;
1096 s32 value;
1097
1098 gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(subband: chan->subband_type);
1099 value = gain->offset[path][gain_band];
1100 rtw89_phy_write32_mask(rtwdev, addr: rssi_ofst_addr[path], mask: 0xff000000, data: value + 0xF8);
1101
1102 value *= -4;
1103 v1 = clamp_t(s32, value, S8_MIN, S8_MAX);
1104 value -= v1;
1105 v2 = clamp_t(s32, value, S8_MIN, S8_MAX);
1106 value -= v2;
1107 v3 = clamp_t(s32, value, S8_MIN, S8_MAX);
1108
1109 rtw89_phy_write32_mask(rtwdev, addr: rpl_bias_comp[path], mask: 0xff, data: v1);
1110 rtw89_phy_write32_mask(rtwdev, addr: rpl_ext_comp[path], mask: 0xff, data: v2);
1111 rtw89_phy_write32_mask(rtwdev, addr: rpl_ext_comp[path], mask: 0xff00, data: v3);
1112
1113 rtw89_phy_write32_mask(rtwdev, addr: rssi_tb_bias_comp[path], mask: 0xff0000, data: v1);
1114 rtw89_phy_write32_mask(rtwdev, addr: rssi_tb_ext_comp[path], mask: 0xff0000, data: v2);
1115 rtw89_phy_write32_mask(rtwdev, addr: rssi_tb_ext_comp[path], mask: 0xff000000, data: v3);
1116}
1117
1118static void rtw8922a_set_rx_gain_normal(struct rtw89_dev *rtwdev,
1119 const struct rtw89_chan *chan,
1120 enum rtw89_rf_path path)
1121{
1122 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1123
1124 if (!gain->offset_valid)
1125 return;
1126
1127 if (chan->band_type == RTW89_BAND_2G)
1128 rtw8922a_set_rx_gain_normal_cck(rtwdev, chan, path);
1129
1130 rtw8922a_set_rx_gain_normal_ofdm(rtwdev, chan, path);
1131}
1132
1133static void rtw8922a_set_cck_parameters(struct rtw89_dev *rtwdev, u8 central_ch,
1134 enum rtw89_phy_idx phy_idx)
1135{
1136 if (central_ch == 14) {
1137 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, data: 0x3b13ff, phy_idx);
1138 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, data: 0x1c42de, phy_idx);
1139 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, data: 0xfdb0ad, phy_idx);
1140 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, data: 0xf60f6e, phy_idx);
1141 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, data: 0xfd8f92, phy_idx);
1142 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, data: 0x02d011, phy_idx);
1143 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, data: 0x01c02c, phy_idx);
1144 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, data: 0xfff00a, phy_idx);
1145 } else {
1146 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, data: 0x3a63ca, phy_idx);
1147 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, data: 0x2a833f, phy_idx);
1148 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, data: 0x1491f8, phy_idx);
1149 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, data: 0x03c0b0, phy_idx);
1150 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, data: 0xfccff1, phy_idx);
1151 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, data: 0xfccfc3, phy_idx);
1152 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, data: 0xfebfdc, phy_idx);
1153 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, data: 0xffdff7, phy_idx);
1154 }
1155}
1156
1157static void rtw8922a_ctrl_ch(struct rtw89_dev *rtwdev,
1158 const struct rtw89_chan *chan,
1159 enum rtw89_phy_idx phy_idx)
1160{
1161 static const u32 band_sel[2] = {0x4160, 0x4560};
1162 u16 central_freq = chan->freq;
1163 u8 central_ch = chan->channel;
1164 u8 band = chan->band_type;
1165 bool is_2g = band == RTW89_BAND_2G;
1166 u8 chan_idx;
1167 u8 path;
1168 u8 sco;
1169
1170 if (!central_freq) {
1171 rtw89_warn(rtwdev, "Invalid central_freq\n");
1172 return;
1173 }
1174
1175 rtw8922a_set_gain(rtwdev, chan, path: RF_PATH_A, phy_idx);
1176 rtw8922a_set_gain(rtwdev, chan, path: RF_PATH_B, phy_idx);
1177
1178 for (path = RF_PATH_A; path < BB_PATH_NUM_8922A; path++)
1179 rtw89_phy_write32_idx(rtwdev, addr: band_sel[path], BIT((26)), data: is_2g, phy_idx);
1180
1181 rtw8922a_set_rx_gain_normal(rtwdev, chan, path: RF_PATH_A);
1182 rtw8922a_set_rx_gain_normal(rtwdev, chan, path: RF_PATH_B);
1183
1184 rtw89_phy_write32_idx(rtwdev, R_FC0, B_FC0, data: central_freq, phy_idx);
1185 sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
1186 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_FC0_INV, data: sco, phy_idx);
1187
1188 if (band == RTW89_BAND_2G)
1189 rtw8922a_set_cck_parameters(rtwdev, central_ch, phy_idx);
1190
1191 chan_idx = rtw89_encode_chan_idx(rtwdev, central_ch: chan->primary_channel, band);
1192 rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, data: chan_idx, phy_idx);
1193}
1194
1195static void
1196rtw8922a_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_sb, u8 bw,
1197 enum rtw89_phy_idx phy_idx)
1198{
1199 switch (bw) {
1200 case RTW89_CHANNEL_WIDTH_5:
1201 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, data: 0x0, phy_idx);
1202 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, data: 0x1, phy_idx);
1203 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, data: 0x0, phy_idx);
1204 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, data: 0x1, phy_idx);
1205 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, data: 0x0, phy_idx);
1206 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, data: 0x0, phy_idx);
1207 break;
1208 case RTW89_CHANNEL_WIDTH_10:
1209 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, data: 0x0, phy_idx);
1210 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, data: 0x2, phy_idx);
1211 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, data: 0x0, phy_idx);
1212 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, data: 0x1, phy_idx);
1213 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, data: 0x0, phy_idx);
1214 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, data: 0x0, phy_idx);
1215 break;
1216 case RTW89_CHANNEL_WIDTH_20:
1217 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, data: 0x0, phy_idx);
1218 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, data: 0x0, phy_idx);
1219 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, data: 0x0, phy_idx);
1220 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, data: 0x1, phy_idx);
1221 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, data: 0x0, phy_idx);
1222 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, data: 0x0, phy_idx);
1223 break;
1224 case RTW89_CHANNEL_WIDTH_40:
1225 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, data: 0x1, phy_idx);
1226 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, data: 0x0, phy_idx);
1227 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, data: pri_sb, phy_idx);
1228 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, data: 0x1, phy_idx);
1229 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, data: 0x0, phy_idx);
1230 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, data: 0x0, phy_idx);
1231 break;
1232 case RTW89_CHANNEL_WIDTH_80:
1233 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, data: 0x2, phy_idx);
1234 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, data: 0x0, phy_idx);
1235 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, data: pri_sb, phy_idx);
1236 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, data: 0x1, phy_idx);
1237 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, data: 0x1, phy_idx);
1238 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, data: 0x1, phy_idx);
1239 break;
1240 case RTW89_CHANNEL_WIDTH_160:
1241 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, data: 0x3, phy_idx);
1242 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, data: 0x0, phy_idx);
1243 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, data: pri_sb, phy_idx);
1244 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, data: 0x1, phy_idx);
1245 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, data: 0x1, phy_idx);
1246 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, data: 0x1, phy_idx);
1247 break;
1248 default:
1249 rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri_sb:%d)\n", bw,
1250 pri_sb);
1251 break;
1252 }
1253
1254 if (bw == RTW89_CHANNEL_WIDTH_40)
1255 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, data: 1, phy_idx);
1256 else
1257 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, data: 0, phy_idx);
1258}
1259
1260static u32 rtw8922a_spur_freq(struct rtw89_dev *rtwdev,
1261 const struct rtw89_chan *chan)
1262{
1263 return 0;
1264}
1265
1266#define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
1267#define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
1268#define MAX_TONE_NUM 2048
1269
1270static void rtw8922a_set_csi_tone_idx(struct rtw89_dev *rtwdev,
1271 const struct rtw89_chan *chan,
1272 enum rtw89_phy_idx phy_idx)
1273{
1274 s32 freq_diff, csi_idx, csi_tone_idx;
1275 u32 spur_freq;
1276
1277 spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1278 if (spur_freq == 0) {
1279 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN,
1280 data: 0, phy_idx);
1281 return;
1282 }
1283
1284 freq_diff = (spur_freq - chan->freq) * 1000000;
1285 csi_idx = s32_div_u32_round_closest(dividend: freq_diff, CARRIER_SPACING_78_125);
1286 s32_div_u32_round_down(dividend: csi_idx, MAX_TONE_NUM, remainder: &csi_tone_idx);
1287
1288 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_TONE_IDX,
1289 data: csi_tone_idx, phy_idx);
1290 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, data: 1, phy_idx);
1291}
1292
1293static const struct rtw89_nbi_reg_def rtw8922a_nbi_reg_def[] = {
1294 [RF_PATH_A] = {
1295 .notch1_idx = {0x41a0, 0xFF},
1296 .notch1_frac_idx = {0x41a0, 0xC00},
1297 .notch1_en = {0x41a0, 0x1000},
1298 .notch2_idx = {0x41ac, 0xFF},
1299 .notch2_frac_idx = {0x41ac, 0xC00},
1300 .notch2_en = {0x41ac, 0x1000},
1301 },
1302 [RF_PATH_B] = {
1303 .notch1_idx = {0x45a0, 0xFF},
1304 .notch1_frac_idx = {0x45a0, 0xC00},
1305 .notch1_en = {0x45a0, 0x1000},
1306 .notch2_idx = {0x45ac, 0xFF},
1307 .notch2_frac_idx = {0x45ac, 0xC00},
1308 .notch2_en = {0x45ac, 0x1000},
1309 },
1310};
1311
1312static void rtw8922a_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
1313 const struct rtw89_chan *chan,
1314 enum rtw89_rf_path path,
1315 enum rtw89_phy_idx phy_idx)
1316{
1317 const struct rtw89_nbi_reg_def *nbi = &rtw8922a_nbi_reg_def[path];
1318 s32 nbi_frac_idx, nbi_frac_tone_idx;
1319 s32 nbi_idx, nbi_tone_idx;
1320 bool notch2_chk = false;
1321 u32 spur_freq, fc;
1322 s32 freq_diff;
1323
1324 spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1325 if (spur_freq == 0) {
1326 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch1_en.addr,
1327 mask: nbi->notch1_en.mask, data: 0, phy_idx);
1328 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch2_en.addr,
1329 mask: nbi->notch2_en.mask, data: 0, phy_idx);
1330 return;
1331 }
1332
1333 fc = chan->freq;
1334 if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
1335 fc = (spur_freq > fc) ? fc + 40 : fc - 40;
1336 if ((fc > spur_freq &&
1337 chan->channel < chan->primary_channel) ||
1338 (fc < spur_freq &&
1339 chan->channel > chan->primary_channel))
1340 notch2_chk = true;
1341 }
1342
1343 freq_diff = (spur_freq - fc) * 1000000;
1344 nbi_idx = s32_div_u32_round_down(dividend: freq_diff, CARRIER_SPACING_312_5,
1345 remainder: &nbi_frac_idx);
1346
1347 if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
1348 s32_div_u32_round_down(dividend: nbi_idx + 32, divisor: 64, remainder: &nbi_tone_idx);
1349 } else {
1350 u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
1351 128 : 256;
1352
1353 s32_div_u32_round_down(dividend: nbi_idx, divisor: tone_para, remainder: &nbi_tone_idx);
1354 }
1355 nbi_frac_tone_idx =
1356 s32_div_u32_round_closest(dividend: nbi_frac_idx, CARRIER_SPACING_78_125);
1357
1358 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
1359 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch2_idx.addr,
1360 mask: nbi->notch2_idx.mask, data: nbi_tone_idx, phy_idx);
1361 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch2_frac_idx.addr,
1362 mask: nbi->notch2_frac_idx.mask, data: nbi_frac_tone_idx,
1363 phy_idx);
1364 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch2_en.addr,
1365 mask: nbi->notch2_en.mask, data: 0, phy_idx);
1366 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch2_en.addr,
1367 mask: nbi->notch2_en.mask, data: 1, phy_idx);
1368 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch1_en.addr,
1369 mask: nbi->notch1_en.mask, data: 0, phy_idx);
1370 } else {
1371 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch1_idx.addr,
1372 mask: nbi->notch1_idx.mask, data: nbi_tone_idx, phy_idx);
1373 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch1_frac_idx.addr,
1374 mask: nbi->notch1_frac_idx.mask, data: nbi_frac_tone_idx,
1375 phy_idx);
1376 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch1_en.addr,
1377 mask: nbi->notch1_en.mask, data: 0, phy_idx);
1378 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch1_en.addr,
1379 mask: nbi->notch1_en.mask, data: 1, phy_idx);
1380 rtw89_phy_write32_idx(rtwdev, addr: nbi->notch2_en.addr,
1381 mask: nbi->notch2_en.mask, data: 0, phy_idx);
1382 }
1383}
1384
1385static void rtw8922a_spur_elimination(struct rtw89_dev *rtwdev,
1386 const struct rtw89_chan *chan,
1387 enum rtw89_phy_idx phy_idx)
1388{
1389 rtw8922a_set_csi_tone_idx(rtwdev, chan, phy_idx);
1390 rtw8922a_set_nbi_tone_idx(rtwdev, chan, path: RF_PATH_A, phy_idx);
1391 rtw8922a_set_nbi_tone_idx(rtwdev, chan, path: RF_PATH_B, phy_idx);
1392}
1393
1394static void rtw8922a_ctrl_afe_dac(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw,
1395 enum rtw89_rf_path path)
1396{
1397 u32 cr_ofst = 0x0;
1398
1399 if (path == RF_PATH_B)
1400 cr_ofst = 0x100;
1401
1402 switch (bw) {
1403 case RTW89_CHANNEL_WIDTH_5:
1404 case RTW89_CHANNEL_WIDTH_10:
1405 case RTW89_CHANNEL_WIDTH_20:
1406 case RTW89_CHANNEL_WIDTH_40:
1407 case RTW89_CHANNEL_WIDTH_80:
1408 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, data: 0xE);
1409 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, data: 0x7);
1410 break;
1411 case RTW89_CHANNEL_WIDTH_160:
1412 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, data: 0xD);
1413 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, data: 0x6);
1414 break;
1415 default:
1416 break;
1417 }
1418}
1419
1420static const struct rtw89_reg2_def bb_mcu0_init_reg[] = {
1421 {0x6990, 0x00000000},
1422 {0x6994, 0x00000000},
1423 {0x6998, 0x00000000},
1424 {0x6820, 0xFFFFFFFE},
1425 {0x6800, 0xC0000FFE},
1426 {0x6808, 0x76543210},
1427 {0x6814, 0xBFBFB000},
1428 {0x6818, 0x0478C009},
1429 {0x6800, 0xC0000FFF},
1430 {0x6820, 0xFFFFFFFF},
1431};
1432
1433static const struct rtw89_reg2_def bb_mcu1_init_reg[] = {
1434 {0x6990, 0x00000000},
1435 {0x6994, 0x00000000},
1436 {0x6998, 0x00000000},
1437 {0x6820, 0xFFFFFFFE},
1438 {0x6800, 0xC0000FFE},
1439 {0x6808, 0x76543210},
1440 {0x6814, 0xBFBFB000},
1441 {0x6818, 0x0478C009},
1442 {0x6800, 0xC0000FFF},
1443 {0x6820, 0xFFFFFFFF},
1444};
1445
1446static void rtw8922a_bbmcu_cr_init(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1447{
1448 const struct rtw89_reg2_def *reg;
1449 int size;
1450 int i;
1451
1452 if (phy_idx == RTW89_PHY_0) {
1453 reg = bb_mcu0_init_reg;
1454 size = ARRAY_SIZE(bb_mcu0_init_reg);
1455 } else {
1456 reg = bb_mcu1_init_reg;
1457 size = ARRAY_SIZE(bb_mcu1_init_reg);
1458 }
1459
1460 for (i = 0; i < size; i++, reg++)
1461 rtw89_bbmcu_write32(rtwdev, addr: reg->addr, data: reg->data, phy_idx);
1462}
1463
1464static const u32 dmac_sys_mask[2] = {B_BE_DMAC_BB_PHY0_MASK, B_BE_DMAC_BB_PHY1_MASK};
1465static const u32 bbrst_mask[2] = {B_BE_FEN_BBPLAT_RSTB, B_BE_FEN_BB1PLAT_RSTB};
1466static const u32 glbrst_mask[2] = {B_BE_FEN_BB_IP_RSTN, B_BE_FEN_BB1_IP_RSTN};
1467static const u32 mcu_bootrdy_mask[2] = {B_BE_BOOT_RDY0, B_BE_BOOT_RDY1};
1468
1469static void rtw8922a_bb_preinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1470{
1471 u32 rdy = 0;
1472
1473 if (phy_idx == RTW89_PHY_1)
1474 rdy = 1;
1475
1476 rtw89_write32_mask(rtwdev, R_BE_DMAC_SYS_CR32B, mask: dmac_sys_mask[phy_idx], data: 0x7FF9);
1477 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mask: glbrst_mask[phy_idx], data: 0x0);
1478 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mask: bbrst_mask[phy_idx], data: 0x0);
1479 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mask: glbrst_mask[phy_idx], data: 0x1);
1480 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mask: mcu_bootrdy_mask[phy_idx], data: rdy);
1481 rtw89_write32_mask(rtwdev, R_BE_MEM_PWR_CTRL, B_BE_MEM_BBMCU0_DS_V1, data: 0);
1482
1483 fsleep(usecs: 1);
1484 rtw8922a_bbmcu_cr_init(rtwdev, phy_idx);
1485}
1486
1487static void rtw8922a_bb_postinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1488{
1489 if (phy_idx == RTW89_PHY_0)
1490 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bit: mcu_bootrdy_mask[phy_idx]);
1491 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bit: bbrst_mask[phy_idx]);
1492
1493 rtw89_phy_write32_set(rtwdev, R_BBCLK, B_CLK_640M);
1494 rtw89_phy_write32_clr(rtwdev, R_TXSCALE, B_TXFCTR_EN);
1495 rtw89_phy_set_phy_regs(rtwdev, R_TXFCTR, B_TXFCTR_THD, val: 0x200);
1496 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_EHT_RATE_TH, val: 0xA);
1497 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_HE_RATE_TH, val: 0xA);
1498 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_HT_VHT_TH, val: 0xAAA);
1499 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_EHT_MCS14, val: 0x1);
1500 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_EHT_MCS15, val: 0x1);
1501 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_EHTTB_EN, val: 0x0);
1502 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEERSU_EN, val: 0x0);
1503 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEMU_EN, val: 0x0);
1504 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_TB_EN, val: 0x0);
1505 rtw89_phy_set_phy_regs(rtwdev, R_SU_PUNC, B_SU_PUNC_EN, val: 0x1);
1506 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_HWGEN_EN, val: 0x1);
1507 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_PWROFST_COMP, val: 0x1);
1508 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_BY_SLOPE, val: 0x1);
1509 rtw89_phy_set_phy_regs(rtwdev, R_MAG_A, B_MGA_AEND, val: 0xe0);
1510 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_MAG_AB, val: 0xe0c000);
1511 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_A, val: 0x3FE0);
1512 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_B, val: 0x3FE0);
1513 rtw89_phy_set_phy_regs(rtwdev, R_SC_CORNER, B_SC_CORNER, val: 0x200);
1514 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, data: 0x0, phy_idx);
1515 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, data: 0x1, phy_idx);
1516}
1517
1518static void rtw8922a_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
1519 bool en, enum rtw89_phy_idx phy_idx)
1520{
1521 if (en) {
1522 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, data: 1, phy_idx);
1523 if (band == RTW89_BAND_2G)
1524 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1,
1525 B_RXCCA_BE1_DIS, data: 0x0, phy_idx);
1526 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, data: 0x0, phy_idx);
1527 } else {
1528 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, data: 0x1, phy_idx);
1529 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, data: 0x1, phy_idx);
1530 fsleep(usecs: 1);
1531 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, data: 0, phy_idx);
1532 }
1533}
1534
1535static int rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev,
1536 enum rtw89_rf_path tx_path,
1537 enum rtw89_phy_idx phy_idx)
1538{
1539 struct rtw89_reg2_def path_com_cr[] = {
1540 {0x11A00, 0x21C86900},
1541 {0x11A04, 0x00E4E433},
1542 {0x11A08, 0x39390CC9},
1543 {0x11A0C, 0x4E433240},
1544 {0x11A10, 0x90CC900E},
1545 {0x11A14, 0x00240393},
1546 {0x11A18, 0x201C8600},
1547 };
1548 int ret = 0;
1549 u32 reg;
1550 int i;
1551
1552 rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL, data: 0x0, phy_idx);
1553
1554 if (phy_idx == RTW89_PHY_1 && !rtwdev->dbcc_en)
1555 return 0;
1556
1557 if (tx_path == RF_PATH_A) {
1558 path_com_cr[0].data = 0x21C82900;
1559 path_com_cr[1].data = 0x00E4E431;
1560 path_com_cr[2].data = 0x39390C49;
1561 path_com_cr[3].data = 0x4E431240;
1562 path_com_cr[4].data = 0x90C4900E;
1563 path_com_cr[6].data = 0x201C8200;
1564 } else if (tx_path == RF_PATH_B) {
1565 path_com_cr[0].data = 0x21C04900;
1566 path_com_cr[1].data = 0x00E4E032;
1567 path_com_cr[2].data = 0x39380C89;
1568 path_com_cr[3].data = 0x4E032240;
1569 path_com_cr[4].data = 0x80C8900E;
1570 path_com_cr[6].data = 0x201C0400;
1571 } else if (tx_path == RF_PATH_AB) {
1572 path_com_cr[0].data = 0x21C86900;
1573 path_com_cr[1].data = 0x00E4E433;
1574 path_com_cr[2].data = 0x39390CC9;
1575 path_com_cr[3].data = 0x4E433240;
1576 path_com_cr[4].data = 0x90CC900E;
1577 path_com_cr[6].data = 0x201C8600;
1578 } else {
1579 ret = -EINVAL;
1580 }
1581
1582 for (i = 0; i < ARRAY_SIZE(path_com_cr); i++) {
1583 reg = rtw89_mac_reg_by_idx(rtwdev, reg_base: path_com_cr[i].addr, band: phy_idx);
1584 rtw89_write32(rtwdev, addr: reg, data: path_com_cr[i].data);
1585 }
1586
1587 return ret;
1588}
1589
1590static void rtw8922a_bb_reset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1591{
1592}
1593
1594static int rtw8922a_cfg_rx_nss_limit(struct rtw89_dev *rtwdev, u8 rx_nss,
1595 enum rtw89_phy_idx phy_idx)
1596{
1597 if (rx_nss == 1) {
1598 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, data: 0, phy_idx);
1599 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, data: 0, phy_idx);
1600 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1601 HE_N_USER_MAX_8922A, phy_idx);
1602 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, data: 0, phy_idx);
1603 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, data: 0, phy_idx);
1604 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, data: 0, phy_idx);
1605 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, data: 0,
1606 phy_idx);
1607 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1608 HE_N_USER_MAX_8922A, phy_idx);
1609 } else if (rx_nss == 2) {
1610 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, data: 1, phy_idx);
1611 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, data: 1, phy_idx);
1612 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1613 HE_N_USER_MAX_8922A, phy_idx);
1614 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, data: 1, phy_idx);
1615 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, data: 1, phy_idx);
1616 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, data: 1, phy_idx);
1617 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, data: 1,
1618 phy_idx);
1619 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1620 HE_N_USER_MAX_8922A, phy_idx);
1621 } else {
1622 return -EINVAL;
1623 }
1624
1625 return 0;
1626}
1627
1628static void rtw8922a_tssi_reset(struct rtw89_dev *rtwdev,
1629 enum rtw89_rf_path path,
1630 enum rtw89_phy_idx phy_idx)
1631{
1632 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1633 if (phy_idx == RTW89_PHY_0) {
1634 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, data: 0x0);
1635 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, data: 0x1);
1636 } else {
1637 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, data: 0x0);
1638 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, data: 0x1);
1639 }
1640 } else {
1641 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, data: 0x0);
1642 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, data: 0x1);
1643 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, data: 0x0);
1644 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, data: 0x1);
1645 }
1646}
1647
1648static int rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev *rtwdev,
1649 enum rtw89_rf_path rx_path,
1650 enum rtw89_phy_idx phy_idx)
1651{
1652 u8 rx_nss = (rx_path == RF_PATH_AB) ? 2 : 1;
1653
1654 /* Set to 0 first to avoid abnormal EDCCA report */
1655 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, data: 0x0, phy_idx);
1656
1657 if (rx_path == RF_PATH_A) {
1658 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, data: 0x1, phy_idx);
1659 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, data: 1, phy_idx);
1660 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1661 rtw8922a_tssi_reset(rtwdev, path: rx_path, phy_idx);
1662 } else if (rx_path == RF_PATH_B) {
1663 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, data: 0x2, phy_idx);
1664 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, data: 2, phy_idx);
1665 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1666 rtw8922a_tssi_reset(rtwdev, path: rx_path, phy_idx);
1667 } else if (rx_path == RF_PATH_AB) {
1668 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, data: 0x3, phy_idx);
1669 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, data: 3, phy_idx);
1670 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1671 rtw8922a_tssi_reset(rtwdev, path: rx_path, phy_idx);
1672 } else {
1673 return -EINVAL;
1674 }
1675
1676 return 0;
1677}
1678
1679static int rtw8922a_ctrl_mlo(struct rtw89_dev *rtwdev, enum rtw89_mlo_dbcc_mode mode)
1680{
1681 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, idx: RTW89_SUB_ENTITY_0);
1682
1683 if (mode == MLO_1_PLUS_1_1RF || mode == DBCC_LEGACY) {
1684 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, data: 0x1);
1685 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, data: 0x0);
1686 } else if (mode == MLO_2_PLUS_0_1RF || mode == MLO_0_PLUS_2_1RF ||
1687 mode == MLO_DBCC_NOT_SUPPORT) {
1688 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, data: 0x0);
1689 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, data: 0x1);
1690 } else {
1691 return -EOPNOTSUPP;
1692 }
1693
1694 if (mode == MLO_2_PLUS_0_1RF) {
1695 rtw8922a_ctrl_afe_dac(rtwdev, bw: chan->band_width, path: RF_PATH_A);
1696 rtw8922a_ctrl_afe_dac(rtwdev, bw: chan->band_width, path: RF_PATH_B);
1697 } else {
1698 rtw89_warn(rtwdev, "unsupported MLO mode %d\n", mode);
1699 }
1700
1701 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x6180);
1702
1703 if (mode == MLO_2_PLUS_0_1RF) {
1704 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xBBAB);
1705 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xABA9);
1706 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEBA9);
1707 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEAA9);
1708 } else if (mode == MLO_0_PLUS_2_1RF) {
1709 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xBBAB);
1710 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xAFFF);
1711 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEFFF);
1712 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEEFF);
1713 } else if ((mode == MLO_1_PLUS_1_1RF) || (mode == DBCC_LEGACY)) {
1714 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x7BAB);
1715 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x3BAB);
1716 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x3AAB);
1717 } else {
1718 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x180);
1719 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x0);
1720 }
1721
1722 return 0;
1723}
1724
1725static void rtw8922a_bb_sethw(struct rtw89_dev *rtwdev)
1726{
1727 u32 reg;
1728
1729 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP, B_EN_SND_WO_NDP);
1730 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP_C1, B_EN_SND_WO_NDP);
1731
1732 rtw89_write32_mask(rtwdev, R_BE_PWR_BOOST, B_BE_PWR_CTRL_SEL, data: 0);
1733 if (rtwdev->dbcc_en) {
1734 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, band: RTW89_MAC_1);
1735 rtw89_write32_mask(rtwdev, addr: reg, B_BE_PWR_CTRL_SEL, data: 0);
1736 }
1737
1738 rtw8922a_ctrl_mlo(rtwdev, mode: rtwdev->mlo_dbcc_mode);
1739}
1740
1741static void rtw8922a_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en,
1742 enum rtw89_phy_idx phy_idx)
1743{
1744 if (cck_en) {
1745 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, data: 0, phy_idx);
1746 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, data: 1, phy_idx);
1747 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1748 data: 0, phy_idx);
1749 } else {
1750 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, data: 1, phy_idx);
1751 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, data: 0, phy_idx);
1752 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1753 data: 1, phy_idx);
1754 }
1755}
1756
1757static void rtw8922a_set_channel_bb(struct rtw89_dev *rtwdev,
1758 const struct rtw89_chan *chan,
1759 enum rtw89_phy_idx phy_idx)
1760{
1761 bool cck_en = chan->band_type == RTW89_BAND_2G;
1762 u8 pri_sb = chan->pri_sb_idx;
1763
1764 if (cck_en)
1765 rtw8922a_ctrl_sco_cck(rtwdev, primary_ch: chan->primary_channel,
1766 bw: chan->band_width, phy_idx);
1767
1768 rtw8922a_ctrl_ch(rtwdev, chan, phy_idx);
1769 rtw8922a_ctrl_bw(rtwdev, pri_sb, bw: chan->band_width, phy_idx);
1770 rtw8922a_ctrl_cck_en(rtwdev, cck_en, phy_idx);
1771 rtw8922a_spur_elimination(rtwdev, chan, phy_idx);
1772
1773 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, data: 1, phy_idx);
1774 rtw8922a_tssi_reset(rtwdev, path: RF_PATH_AB, phy_idx);
1775}
1776
1777static void rtw8922a_pre_set_channel_bb(struct rtw89_dev *rtwdev,
1778 enum rtw89_phy_idx phy_idx)
1779{
1780 if (!rtwdev->dbcc_en)
1781 return;
1782
1783 if (phy_idx == RTW89_PHY_0) {
1784 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, data: 0x0);
1785 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0x6180);
1786 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xBBAB);
1787 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xABA9);
1788 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEBA9);
1789 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEAA9);
1790 } else {
1791 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, data: 0x0);
1792 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xBBAB);
1793 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xAFFF);
1794 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEFFF);
1795 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, data: 0xEEFF);
1796 }
1797}
1798
1799static void rtw8922a_post_set_channel_bb(struct rtw89_dev *rtwdev,
1800 enum rtw89_mlo_dbcc_mode mode)
1801{
1802 if (!rtwdev->dbcc_en)
1803 return;
1804
1805 rtw8922a_ctrl_mlo(rtwdev, mode);
1806}
1807
1808static void rtw8922a_set_channel(struct rtw89_dev *rtwdev,
1809 const struct rtw89_chan *chan,
1810 enum rtw89_mac_idx mac_idx,
1811 enum rtw89_phy_idx phy_idx)
1812{
1813 rtw8922a_set_channel_mac(rtwdev, chan, mac_idx);
1814 rtw8922a_set_channel_bb(rtwdev, chan, phy_idx);
1815 rtw8922a_set_channel_rf(rtwdev, chan, phy_idx);
1816}
1817
1818static void rtw8922a_dfs_en_idx(struct rtw89_dev *rtwdev,
1819 enum rtw89_phy_idx phy_idx, enum rtw89_rf_path path,
1820 bool en)
1821{
1822 u32 path_ofst = (path == RF_PATH_B) ? 0x100 : 0x0;
1823
1824 if (en)
1825 rtw89_phy_write32_idx(rtwdev, addr: 0x2800 + path_ofst, BIT(1), data: 1,
1826 phy_idx);
1827 else
1828 rtw89_phy_write32_idx(rtwdev, addr: 0x2800 + path_ofst, BIT(1), data: 0,
1829 phy_idx);
1830}
1831
1832static void rtw8922a_dfs_en(struct rtw89_dev *rtwdev, bool en,
1833 enum rtw89_phy_idx phy_idx)
1834{
1835 rtw8922a_dfs_en_idx(rtwdev, phy_idx, path: RF_PATH_A, en);
1836 rtw8922a_dfs_en_idx(rtwdev, phy_idx, path: RF_PATH_B, en);
1837}
1838
1839static void rtw8922a_adc_en_path(struct rtw89_dev *rtwdev,
1840 enum rtw89_rf_path path, bool en)
1841{
1842 u32 val;
1843
1844 val = rtw89_phy_read32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1);
1845
1846 if (en) {
1847 if (path == RF_PATH_A)
1848 val &= ~0x1;
1849 else
1850 val &= ~0x2;
1851 } else {
1852 if (path == RF_PATH_A)
1853 val |= 0x1;
1854 else
1855 val |= 0x2;
1856 }
1857
1858 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1, data: val);
1859}
1860
1861static void rtw8922a_adc_en(struct rtw89_dev *rtwdev, bool en, u8 phy_idx)
1862{
1863 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1864 if (phy_idx == RTW89_PHY_0)
1865 rtw8922a_adc_en_path(rtwdev, path: RF_PATH_A, en);
1866 else
1867 rtw8922a_adc_en_path(rtwdev, path: RF_PATH_B, en);
1868 } else {
1869 rtw8922a_adc_en_path(rtwdev, path: RF_PATH_A, en);
1870 rtw8922a_adc_en_path(rtwdev, path: RF_PATH_B, en);
1871 }
1872}
1873
1874static
1875void rtw8922a_hal_reset(struct rtw89_dev *rtwdev,
1876 enum rtw89_phy_idx phy_idx, enum rtw89_mac_idx mac_idx,
1877 enum rtw89_band band, u32 *tx_en, bool enter)
1878{
1879 if (enter) {
1880 rtw89_chip_stop_sch_tx(rtwdev, mac_idx, tx_en, sel: RTW89_SCH_TX_SEL_ALL);
1881 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, enable: false);
1882 rtw8922a_dfs_en(rtwdev, en: false, phy_idx);
1883 rtw8922a_tssi_cont_en_phyidx(rtwdev, en: false, phy_idx);
1884 rtw8922a_adc_en(rtwdev, en: false, phy_idx);
1885 fsleep(usecs: 40);
1886 rtw8922a_bb_reset_en(rtwdev, band, en: false, phy_idx);
1887 } else {
1888 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, enable: true);
1889 rtw8922a_adc_en(rtwdev, en: true, phy_idx);
1890 rtw8922a_dfs_en(rtwdev, en: true, phy_idx);
1891 rtw8922a_tssi_cont_en_phyidx(rtwdev, en: true, phy_idx);
1892 rtw8922a_bb_reset_en(rtwdev, band, en: true, phy_idx);
1893 rtw89_chip_resume_sch_tx(rtwdev, mac_idx, tx_en: *tx_en);
1894 }
1895}
1896
1897static void rtw8922a_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
1898 struct rtw89_channel_help_params *p,
1899 const struct rtw89_chan *chan,
1900 enum rtw89_mac_idx mac_idx,
1901 enum rtw89_phy_idx phy_idx)
1902{
1903 if (enter) {
1904 rtw8922a_pre_set_channel_bb(rtwdev, phy_idx);
1905 rtw8922a_pre_set_channel_rf(rtwdev, phy_idx);
1906 }
1907
1908 rtw8922a_hal_reset(rtwdev, phy_idx, mac_idx, band: chan->band_type, tx_en: &p->tx_en, enter);
1909
1910 if (!enter) {
1911 rtw8922a_post_set_channel_bb(rtwdev, mode: rtwdev->mlo_dbcc_mode);
1912 rtw8922a_post_set_channel_rf(rtwdev, phy_idx);
1913 }
1914}
1915
1916static void rtw8922a_rfk_init(struct rtw89_dev *rtwdev)
1917{
1918 struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc;
1919
1920 rtwdev->is_tssi_mode[RF_PATH_A] = false;
1921 rtwdev->is_tssi_mode[RF_PATH_B] = false;
1922 memset(rfk_mcc, 0, sizeof(*rfk_mcc));
1923}
1924
1925static void rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev)
1926{
1927 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx: RTW89_PHY_0, ms: 5);
1928
1929 rtw89_phy_rfk_dack_and_wait(rtwdev, phy_idx: RTW89_PHY_0, ms: 58);
1930 rtw89_phy_rfk_rxdck_and_wait(rtwdev, phy_idx: RTW89_PHY_0, ms: 32);
1931}
1932
1933static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath)
1934{
1935 u32 rf_mode;
1936 u8 path;
1937 int ret;
1938
1939 for (path = 0; path < RF_PATH_NUM_8922A; path++) {
1940 if (!(kpath & BIT(path)))
1941 continue;
1942
1943 ret = read_poll_timeout_atomic(rtw89_read_rf, rf_mode, rf_mode != 2,
1944 2, 5000, false, rtwdev, path, 0x00,
1945 RR_MOD_MASK);
1946 rtw89_debug(rtwdev, mask: RTW89_DBG_RFK,
1947 fmt: "[RFK] Wait S%d to Rx mode!! (ret = %d)\n",
1948 path, ret);
1949 }
1950}
1951
1952static void rtw8922a_rfk_channel(struct rtw89_dev *rtwdev)
1953{
1954 enum rtw89_phy_idx phy_idx = RTW89_PHY_0;
1955 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, paths: RF_AB);
1956 u32 tx_en;
1957
1958 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, type: BTC_WRFKT_CHLK, state: BTC_WRFK_START);
1959 rtw89_chip_stop_sch_tx(rtwdev, mac_idx: phy_idx, tx_en: &tx_en, sel: RTW89_SCH_TX_SEL_ALL);
1960 _wait_rx_mode(rtwdev, kpath: RF_AB);
1961
1962 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, ms: 5);
1963 rtw89_phy_rfk_txgapk_and_wait(rtwdev, phy_idx, ms: 54);
1964 rtw89_phy_rfk_iqk_and_wait(rtwdev, phy_idx, ms: 84);
1965 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, tssi_mode: RTW89_TSSI_NORMAL, ms: 6);
1966 rtw89_phy_rfk_dpk_and_wait(rtwdev, phy_idx, ms: 34);
1967 rtw89_phy_rfk_rxdck_and_wait(rtwdev, phy_idx: RTW89_PHY_0, ms: 32);
1968
1969 rtw89_chip_resume_sch_tx(rtwdev, mac_idx: phy_idx, tx_en);
1970 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, type: BTC_WRFKT_CHLK, state: BTC_WRFK_STOP);
1971}
1972
1973static void rtw8922a_rfk_band_changed(struct rtw89_dev *rtwdev,
1974 enum rtw89_phy_idx phy_idx)
1975{
1976 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, tssi_mode: RTW89_TSSI_SCAN, ms: 6);
1977}
1978
1979static void rtw8922a_rfk_scan(struct rtw89_dev *rtwdev, bool start)
1980{
1981}
1982
1983static void rtw8922a_rfk_track(struct rtw89_dev *rtwdev)
1984{
1985}
1986
1987static void rtw8922a_set_txpwr_ref(struct rtw89_dev *rtwdev,
1988 enum rtw89_phy_idx phy_idx)
1989{
1990 s16 ref_ofdm = 0;
1991 s16 ref_cck = 0;
1992
1993 rtw89_debug(rtwdev, mask: RTW89_DBG_TXPWR, fmt: "[TXPWR] set txpwr reference\n");
1994
1995 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
1996 B_BE_PWR_REF_CTRL_OFDM, val: ref_ofdm);
1997 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
1998 B_BE_PWR_REF_CTRL_CCK, val: ref_cck);
1999}
2000
2001static void rtw8922a_bb_tx_triangular(struct rtw89_dev *rtwdev, bool en,
2002 enum rtw89_phy_idx phy_idx)
2003{
2004 u8 ctrl = en ? 0x1 : 0x0;
2005
2006 rtw89_phy_write32_idx(rtwdev, R_BEDGE3, B_BEDGE_CFG, data: ctrl, phy_idx);
2007}
2008
2009static void rtw8922a_set_tx_shape(struct rtw89_dev *rtwdev,
2010 const struct rtw89_chan *chan,
2011 enum rtw89_phy_idx phy_idx)
2012{
2013 const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
2014 const struct rtw89_tx_shape *tx_shape = &rfe_parms->tx_shape;
2015 u8 tx_shape_idx;
2016 u8 band, regd;
2017
2018 band = chan->band_type;
2019 regd = rtw89_regd_get(rtwdev, band);
2020 tx_shape_idx = (*tx_shape->lmt)[band][RTW89_RS_OFDM][regd];
2021
2022 if (tx_shape_idx == 0)
2023 rtw8922a_bb_tx_triangular(rtwdev, en: false, phy_idx);
2024 else
2025 rtw8922a_bb_tx_triangular(rtwdev, en: true, phy_idx);
2026}
2027
2028static void rtw8922a_set_txpwr(struct rtw89_dev *rtwdev,
2029 const struct rtw89_chan *chan,
2030 enum rtw89_phy_idx phy_idx)
2031{
2032 rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx);
2033 rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx);
2034 rtw8922a_set_tx_shape(rtwdev, chan, phy_idx);
2035 rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx);
2036 rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
2037}
2038
2039static void rtw8922a_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
2040 enum rtw89_phy_idx phy_idx)
2041{
2042 rtw8922a_set_txpwr_ref(rtwdev, phy_idx);
2043}
2044
2045static void rtw8922a_ctrl_trx_path(struct rtw89_dev *rtwdev,
2046 enum rtw89_rf_path tx_path, u8 tx_nss,
2047 enum rtw89_rf_path rx_path, u8 rx_nss)
2048{
2049 enum rtw89_phy_idx phy_idx;
2050
2051 for (phy_idx = RTW89_PHY_0; phy_idx <= RTW89_PHY_1; phy_idx++) {
2052 rtw8922a_ctrl_tx_path_tmac(rtwdev, tx_path, phy_idx);
2053 rtw8922a_ctrl_rx_path_tmac(rtwdev, rx_path, phy_idx);
2054 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
2055 }
2056}
2057
2058static void rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev *rtwdev, bool en,
2059 enum rtw89_phy_idx phy_idx)
2060{
2061 if (en) {
2062 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, data: 0x3, phy_idx);
2063 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2064 data: 0xf, phy_idx);
2065 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2066 data: 0x0, phy_idx);
2067 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, data: 0x0, phy_idx);
2068 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, data: 0x80, phy_idx);
2069 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, data: 0x8080, phy_idx);
2070 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, data: 0x34, phy_idx);
2071 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, data: 0x34, phy_idx);
2072 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, data: 0x3, phy_idx);
2073 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2074 data: 0xf, phy_idx);
2075 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2076 data: 0x0, phy_idx);
2077 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, data: 0x0, phy_idx);
2078 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, data: 0x80, phy_idx);
2079 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, data: 0x8080, phy_idx);
2080 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, data: 0x34, phy_idx);
2081 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, data: 0x34, phy_idx);
2082 } else {
2083 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, data: 0x0, phy_idx);
2084 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2085 data: 0x0, phy_idx);
2086 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2087 data: 0x1, phy_idx);
2088 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, data: 0x1, phy_idx);
2089 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, data: 0x1a, phy_idx);
2090 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, data: 0x2a2a, phy_idx);
2091 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, data: 0x7a6, phy_idx);
2092 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, data: 0x26, phy_idx);
2093 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, data: 0x0, phy_idx);
2094 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2095 data: 0x0, phy_idx);
2096 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2097 data: 0x1, phy_idx);
2098 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, data: 0x1, phy_idx);
2099 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, data: 0x20, phy_idx);
2100 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, data: 0x2a30, phy_idx);
2101 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, data: 0x7a6, phy_idx);
2102 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, data: 0x26, phy_idx);
2103 }
2104}
2105
2106static void rtw8922a_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
2107{
2108 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, idx: RTW89_SUB_ENTITY_0);
2109 enum rtw89_band band = chan->band_type;
2110 struct rtw89_hal *hal = &rtwdev->hal;
2111 u8 ntx_path = RF_PATH_AB;
2112 u32 tx_en0, tx_en1;
2113
2114 if (hal->antenna_tx == RF_A)
2115 ntx_path = RF_PATH_A;
2116 else if (hal->antenna_tx == RF_B)
2117 ntx_path = RF_PATH_B;
2118
2119 rtw8922a_hal_reset(rtwdev, phy_idx: RTW89_PHY_0, mac_idx: RTW89_MAC_0, band, tx_en: &tx_en0, enter: true);
2120 if (rtwdev->dbcc_en)
2121 rtw8922a_hal_reset(rtwdev, phy_idx: RTW89_PHY_1, mac_idx: RTW89_MAC_1, band,
2122 tx_en: &tx_en1, enter: true);
2123
2124 rtw8922a_ctrl_trx_path(rtwdev, tx_path: ntx_path, tx_nss: 2, rx_path: RF_PATH_AB, rx_nss: 2);
2125
2126 rtw8922a_hal_reset(rtwdev, phy_idx: RTW89_PHY_0, mac_idx: RTW89_MAC_0, band, tx_en: &tx_en0, enter: false);
2127 if (rtwdev->dbcc_en)
2128 rtw8922a_hal_reset(rtwdev, phy_idx: RTW89_PHY_1, mac_idx: RTW89_MAC_1, band,
2129 tx_en: &tx_en0, enter: false);
2130}
2131
2132static u8 rtw8922a_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
2133{
2134 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
2135 int th;
2136
2137 /* read thermal only if debugging */
2138 if (!rtw89_debug_is_enabled(rtwdev, mask: RTW89_DBG_CFO | RTW89_DBG_RFK_TRACK))
2139 return 80;
2140
2141 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, data: 0x1);
2142 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, data: 0x0);
2143 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, data: 0x1);
2144
2145 fsleep(usecs: 200);
2146
2147 th = rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL_V1);
2148 th += (s8)info->thermal_trim[rf_path];
2149
2150 return clamp_t(int, th, 0, U8_MAX);
2151}
2152
2153static void rtw8922a_btc_set_rfe(struct rtw89_dev *rtwdev)
2154{
2155 union rtw89_btc_module_info *md = &rtwdev->btc.mdinfo;
2156 struct rtw89_btc_module_v7 *module = &md->md_v7;
2157
2158 module->rfe_type = rtwdev->efuse.rfe_type;
2159 module->kt_ver = rtwdev->hal.cv;
2160 module->bt_solo = 0;
2161 module->switch_type = BTC_SWITCH_INTERNAL;
2162 module->wa_type = 0;
2163
2164 module->ant.type = BTC_ANT_SHARED;
2165 module->ant.num = 2;
2166 module->ant.isolation = 10;
2167 module->ant.diversity = 0;
2168 module->ant.single_pos = RF_PATH_A;
2169 module->ant.btg_pos = RF_PATH_B;
2170
2171 if (module->kt_ver <= 1)
2172 module->wa_type |= BTC_WA_HFP_ZB;
2173
2174 rtwdev->btc.cx.other.type = BTC_3CX_NONE;
2175
2176 if (module->rfe_type == 0) {
2177 rtwdev->btc.dm.error.map.rfe_type0 = true;
2178 return;
2179 }
2180
2181 module->ant.num = (module->rfe_type % 2) ? 2 : 3;
2182
2183 if (module->kt_ver == 0)
2184 module->ant.num = 2;
2185
2186 if (module->ant.num == 3) {
2187 module->ant.type = BTC_ANT_DEDICATED;
2188 module->bt_pos = BTC_BT_ALONE;
2189 } else {
2190 module->ant.type = BTC_ANT_SHARED;
2191 module->bt_pos = BTC_BT_BTG;
2192 }
2193 rtwdev->btc.btg_pos = module->ant.btg_pos;
2194 rtwdev->btc.ant_type = module->ant.type;
2195}
2196
2197static
2198void rtw8922a_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
2199{
2200 rtw89_write_rf(rtwdev, rf_path: path, RR_LUTWA, RFREG_MASK, data: group);
2201 rtw89_write_rf(rtwdev, rf_path: path, RR_LUTWD0, RFREG_MASK, data: val);
2202}
2203
2204static void rtw8922a_btc_init_cfg(struct rtw89_dev *rtwdev)
2205{
2206 struct rtw89_btc *btc = &rtwdev->btc;
2207 struct rtw89_btc_ant_info_v7 *ant = &btc->mdinfo.md_v7.ant;
2208 u32 wl_pri, path_min, path_max;
2209 u8 path;
2210
2211 /* for 1-Ant && 1-ss case: only 1-path */
2212 if (ant->num == 1) {
2213 path_min = ant->single_pos;
2214 path_max = path_min;
2215 } else {
2216 path_min = RF_PATH_A;
2217 path_max = RF_PATH_B;
2218 }
2219
2220 path = path_min;
2221
2222 for (path = path_min; path <= path_max; path++) {
2223 /* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */
2224 rtw89_write_rf(rtwdev, rf_path: path, RR_LUTWE, RFREG_MASK, BIT(17));
2225
2226 /* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU */
2227 rtw8922a_set_trx_mask(rtwdev, path, group: BTC_BT_SS_GROUP, val: 0x5ff);
2228
2229 /* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */
2230 rtw8922a_set_trx_mask(rtwdev, path, group: BTC_BT_RX_GROUP, val: 0x5df);
2231
2232 /* if GNT_WL = 0 && BT = Tx_group -->
2233 * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff)
2234 */
2235 if (btc->ant_type == BTC_ANT_SHARED && btc->btg_pos == path)
2236 rtw8922a_set_trx_mask(rtwdev, path, group: BTC_BT_TX_GROUP, val: 0x55f);
2237 else
2238 rtw8922a_set_trx_mask(rtwdev, path, group: BTC_BT_TX_GROUP, val: 0x5ff);
2239
2240 rtw89_write_rf(rtwdev, rf_path: path, RR_LUTWE, RFREG_MASK, data: 0);
2241 }
2242
2243 /* set WL PTA Hi-Pri: Ack-Tx, beacon-tx, Trig-frame-Tx, Null-Tx*/
2244 wl_pri = B_BTC_RSP_ACK_HI | B_BTC_TX_BCN_HI | B_BTC_TX_TRI_HI |
2245 B_BTC_TX_NULL_HI;
2246 rtw89_write32(rtwdev, R_BTC_COEX_WL_REQ_BE, data: wl_pri);
2247
2248 /* set PTA break table */
2249 rtw89_write32(rtwdev, R_BE_BT_BREAK_TABLE, BTC_BREAK_PARAM);
2250
2251 /* ZB coex table init for HFP PTA req-cmd bit-4 define issue COEX-900*/
2252 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_0, data: 0xda5a5a5a);
2253
2254 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_1, data: 0xda5a5a5a);
2255
2256 rtw89_write32(rtwdev, R_BTC_ZB_BREAK_TBL, data: 0xf0ffffff);
2257 btc->cx.wl.status.map.init_ok = true;
2258}
2259
2260static void rtw8922a_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
2261 struct rtw89_rx_phy_ppdu *phy_ppdu,
2262 struct ieee80211_rx_status *status)
2263{
2264 u8 chan_idx = phy_ppdu->chan_idx;
2265 enum nl80211_band band;
2266 u8 ch;
2267
2268 if (chan_idx == 0)
2269 return;
2270
2271 rtw89_decode_chan_idx(rtwdev, chan_idx, ch: &ch, band: &band);
2272 status->freq = ieee80211_channel_to_frequency(chan: ch, band);
2273 status->band = band;
2274}
2275
2276static void rtw8922a_query_ppdu(struct rtw89_dev *rtwdev,
2277 struct rtw89_rx_phy_ppdu *phy_ppdu,
2278 struct ieee80211_rx_status *status)
2279{
2280 u8 path;
2281 u8 *rx_power = phy_ppdu->rssi;
2282
2283 status->signal =
2284 RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B]));
2285 for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
2286 status->chains |= BIT(path);
2287 status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
2288 }
2289 if (phy_ppdu->valid)
2290 rtw8922a_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
2291}
2292
2293static int rtw8922a_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
2294{
2295 rtw89_write8_set(rtwdev, R_BE_FEN_RST_ENABLE,
2296 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2297 rtw89_write32(rtwdev, R_BE_DMAC_SYS_CR32B, data: 0x7FF97FF9);
2298
2299 return 0;
2300}
2301
2302static int rtw8922a_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
2303{
2304 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE,
2305 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2306
2307 return 0;
2308}
2309
2310#ifdef CONFIG_PM
2311static const struct wiphy_wowlan_support rtw_wowlan_stub_8922a = {
2312 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT,
2313 .n_patterns = RTW89_MAX_PATTERN_NUM,
2314 .pattern_max_len = RTW89_MAX_PATTERN_SIZE,
2315 .pattern_min_len = 1,
2316};
2317#endif
2318
2319static const struct rtw89_chip_ops rtw8922a_chip_ops = {
2320 .enable_bb_rf = rtw8922a_mac_enable_bb_rf,
2321 .disable_bb_rf = rtw8922a_mac_disable_bb_rf,
2322 .bb_preinit = rtw8922a_bb_preinit,
2323 .bb_postinit = rtw8922a_bb_postinit,
2324 .bb_reset = rtw8922a_bb_reset,
2325 .bb_sethw = rtw8922a_bb_sethw,
2326 .read_rf = rtw89_phy_read_rf_v2,
2327 .write_rf = rtw89_phy_write_rf_v2,
2328 .set_channel = rtw8922a_set_channel,
2329 .set_channel_help = rtw8922a_set_channel_help,
2330 .read_efuse = rtw8922a_read_efuse,
2331 .read_phycap = rtw8922a_read_phycap,
2332 .fem_setup = NULL,
2333 .rfe_gpio = NULL,
2334 .rfk_hw_init = rtw8922a_rfk_hw_init,
2335 .rfk_init = rtw8922a_rfk_init,
2336 .rfk_init_late = rtw8922a_rfk_init_late,
2337 .rfk_channel = rtw8922a_rfk_channel,
2338 .rfk_band_changed = rtw8922a_rfk_band_changed,
2339 .rfk_scan = rtw8922a_rfk_scan,
2340 .rfk_track = rtw8922a_rfk_track,
2341 .power_trim = rtw8922a_power_trim,
2342 .set_txpwr = rtw8922a_set_txpwr,
2343 .set_txpwr_ctrl = rtw8922a_set_txpwr_ctrl,
2344 .init_txpwr_unit = NULL,
2345 .get_thermal = rtw8922a_get_thermal,
2346 .ctrl_btg_bt_rx = rtw8922a_ctrl_btg_bt_rx,
2347 .query_ppdu = rtw8922a_query_ppdu,
2348 .ctrl_nbtg_bt_tx = rtw8922a_ctrl_nbtg_bt_tx,
2349 .cfg_txrx_path = rtw8922a_bb_cfg_txrx_path,
2350 .set_txpwr_ul_tb_offset = NULL,
2351 .pwr_on_func = rtw8922a_pwr_on_func,
2352 .pwr_off_func = rtw8922a_pwr_off_func,
2353 .query_rxdesc = rtw89_core_query_rxdesc_v2,
2354 .fill_txdesc = rtw89_core_fill_txdesc_v2,
2355 .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v2,
2356 .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v2,
2357 .mac_cfg_gnt = rtw89_mac_cfg_gnt_v2,
2358 .stop_sch_tx = rtw89_mac_stop_sch_tx_v2,
2359 .resume_sch_tx = rtw89_mac_resume_sch_tx_v2,
2360 .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v2,
2361 .h2c_default_cmac_tbl = rtw89_fw_h2c_default_cmac_tbl_g7,
2362 .h2c_assoc_cmac_tbl = rtw89_fw_h2c_assoc_cmac_tbl_g7,
2363 .h2c_ampdu_cmac_tbl = rtw89_fw_h2c_ampdu_cmac_tbl_g7,
2364 .h2c_default_dmac_tbl = rtw89_fw_h2c_default_dmac_tbl_v2,
2365 .h2c_update_beacon = rtw89_fw_h2c_update_beacon_be,
2366 .h2c_ba_cam = rtw89_fw_h2c_ba_cam_v1,
2367
2368 .btc_set_rfe = rtw8922a_btc_set_rfe,
2369 .btc_init_cfg = rtw8922a_btc_init_cfg,
2370};
2371
2372const struct rtw89_chip_info rtw8922a_chip_info = {
2373 .chip_id = RTL8922A,
2374 .chip_gen = RTW89_CHIP_BE,
2375 .ops = &rtw8922a_chip_ops,
2376 .mac_def = &rtw89_mac_gen_be,
2377 .phy_def = &rtw89_phy_gen_be,
2378 .fw_basename = RTW8922A_FW_BASENAME,
2379 .fw_format_max = RTW8922A_FW_FORMAT_MAX,
2380 .try_ce_fw = false,
2381 .bbmcu_nr = 1,
2382 .needed_fw_elms = RTW89_BE_GEN_DEF_NEEDED_FW_ELEMENTS,
2383 .fifo_size = 589824,
2384 .small_fifo_size = false,
2385 .dle_scc_rsvd_size = 0,
2386 .max_amsdu_limit = 8000,
2387 .dis_2g_40m_ul_ofdma = false,
2388 .rsvd_ple_ofst = 0x8f800,
2389 .hfc_param_ini = rtw8922a_hfc_param_ini_pcie,
2390 .dle_mem = rtw8922a_dle_mem_pcie,
2391 .wde_qempty_acq_grpnum = 4,
2392 .wde_qempty_mgq_grpsel = 4,
2393 .rf_base_addr = {0xe000, 0xf000},
2394 .pwr_on_seq = NULL,
2395 .pwr_off_seq = NULL,
2396 .bb_table = NULL,
2397 .bb_gain_table = NULL,
2398 .rf_table = {},
2399 .nctl_table = NULL,
2400 .nctl_post_table = NULL,
2401 .dflt_parms = NULL, /* load parm from fw */
2402 .rfe_parms_conf = NULL, /* load parm from fw */
2403 .txpwr_factor_rf = 2,
2404 .txpwr_factor_mac = 1,
2405 .dig_table = NULL,
2406 .dig_regs = &rtw8922a_dig_regs,
2407 .tssi_dbw_table = NULL,
2408 .support_chanctx_num = 2,
2409 .support_bands = BIT(NL80211_BAND_2GHZ) |
2410 BIT(NL80211_BAND_5GHZ) |
2411 BIT(NL80211_BAND_6GHZ),
2412 .support_bandwidths = BIT(NL80211_CHAN_WIDTH_20) |
2413 BIT(NL80211_CHAN_WIDTH_40) |
2414 BIT(NL80211_CHAN_WIDTH_80) |
2415 BIT(NL80211_CHAN_WIDTH_160),
2416 .support_unii4 = true,
2417 .ul_tb_waveform_ctrl = false,
2418 .ul_tb_pwr_diff = false,
2419 .hw_sec_hdr = true,
2420 .rf_path_num = 2,
2421 .tx_nss = 2,
2422 .rx_nss = 2,
2423 .acam_num = 128,
2424 .bcam_num = 20,
2425 .scam_num = 32,
2426 .bacam_num = 24,
2427 .bacam_dynamic_num = 8,
2428 .bacam_ver = RTW89_BACAM_V1,
2429 .ppdu_max_usr = 16,
2430 .sec_ctrl_efuse_size = 4,
2431 .physical_efuse_size = 0x1300,
2432 .logical_efuse_size = 0x70000,
2433 .limit_efuse_size = 0x40000,
2434 .dav_phy_efuse_size = 0,
2435 .dav_log_efuse_size = 0,
2436 .efuse_blocks = rtw8922a_efuse_blocks,
2437 .phycap_addr = 0x1700,
2438 .phycap_size = 0x38,
2439
2440 .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) |
2441 BIT(RTW89_PS_MODE_CLK_GATED) |
2442 BIT(RTW89_PS_MODE_PWR_GATED),
2443 .low_power_hci_modes = 0,
2444 .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_G7,
2445 .hci_func_en_addr = R_BE_HCI_FUNC_EN,
2446 .h2c_desc_size = sizeof(struct rtw89_rxdesc_short_v2),
2447 .txwd_body_size = sizeof(struct rtw89_txwd_body_v2),
2448 .txwd_info_size = sizeof(struct rtw89_txwd_info_v2),
2449 .h2c_ctrl_reg = R_BE_H2CREG_CTRL,
2450 .h2c_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8},
2451 .h2c_regs = rtw8922a_h2c_regs,
2452 .c2h_ctrl_reg = R_BE_C2HREG_CTRL,
2453 .c2h_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8},
2454 .c2h_regs = rtw8922a_c2h_regs,
2455 .page_regs = &rtw8922a_page_regs,
2456 .wow_reason_reg = R_AX_C2HREG_DATA3_V1 + 3,
2457 .cfo_src_fd = true,
2458 .cfo_hw_comp = true,
2459 .dcfo_comp = NULL,
2460 .dcfo_comp_sft = 0,
2461 .imr_info = NULL,
2462 .imr_dmac_table = &rtw8922a_imr_dmac_table,
2463 .imr_cmac_table = &rtw8922a_imr_cmac_table,
2464 .rrsr_cfgs = &rtw8922a_rrsr_cfgs,
2465 .bss_clr_vld = {R_BSS_CLR_VLD_V2, B_BSS_CLR_VLD0_V2},
2466 .bss_clr_map_reg = R_BSS_CLR_MAP_V2,
2467 .dma_ch_mask = 0,
2468 .edcca_regs = &rtw8922a_edcca_regs,
2469#ifdef CONFIG_PM
2470 .wowlan_stub = &rtw_wowlan_stub_8922a,
2471#endif
2472 .xtal_info = NULL,
2473};
2474EXPORT_SYMBOL(rtw8922a_chip_info);
2475
2476MODULE_FIRMWARE(RTW8922A_MODULE_FIRMWARE);
2477MODULE_AUTHOR("Realtek Corporation");
2478MODULE_DESCRIPTION("Realtek 802.11be wireless 8922A driver");
2479MODULE_LICENSE("Dual BSD/GPL");
2480

source code of linux/drivers/net/wireless/realtek/rtw89/rtw8922a.c