1 | /* QLogic qedr NIC Driver |
2 | * Copyright (c) 2015-2016 QLogic Corporation |
3 | * |
4 | * This software is available to you under a choice of one of two |
5 | * licenses. You may choose to be licensed under the terms of the GNU |
6 | * General Public License (GPL) Version 2, available from the file |
7 | * COPYING in the main directory of this source tree, or the |
8 | * OpenIB.org BSD license below: |
9 | * |
10 | * Redistribution and use in source and binary forms, with or |
11 | * without modification, are permitted provided that the following |
12 | * conditions are met: |
13 | * |
14 | * - Redistributions of source code must retain the above |
15 | * copyright notice, this list of conditions and the following |
16 | * disclaimer. |
17 | * |
18 | * - Redistributions in binary form must reproduce the above |
19 | * copyright notice, this list of conditions and the following |
20 | * disclaimer in the documentation and /or other materials |
21 | * provided with the distribution. |
22 | * |
23 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
24 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
25 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
26 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS |
27 | * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN |
28 | * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN |
29 | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
30 | * SOFTWARE. |
31 | */ |
32 | #ifndef __QED_HSI_RDMA__ |
33 | #define __QED_HSI_RDMA__ |
34 | |
35 | #include <linux/qed/rdma_common.h> |
36 | |
37 | /* rdma completion notification queue element */ |
38 | struct rdma_cnqe { |
39 | struct regpair cq_handle; |
40 | }; |
41 | |
42 | struct rdma_cqe_responder { |
43 | struct regpair srq_wr_id; |
44 | struct regpair qp_handle; |
45 | __le32 imm_data_or_inv_r_Key; |
46 | __le32 length; |
47 | __le32 imm_data_hi; |
48 | __le16 rq_cons_or_srq_id; |
49 | u8 flags; |
50 | #define RDMA_CQE_RESPONDER_TOGGLE_BIT_MASK 0x1 |
51 | #define RDMA_CQE_RESPONDER_TOGGLE_BIT_SHIFT 0 |
52 | #define RDMA_CQE_RESPONDER_TYPE_MASK 0x3 |
53 | #define RDMA_CQE_RESPONDER_TYPE_SHIFT 1 |
54 | #define RDMA_CQE_RESPONDER_INV_FLG_MASK 0x1 |
55 | #define RDMA_CQE_RESPONDER_INV_FLG_SHIFT 3 |
56 | #define RDMA_CQE_RESPONDER_IMM_FLG_MASK 0x1 |
57 | #define RDMA_CQE_RESPONDER_IMM_FLG_SHIFT 4 |
58 | #define RDMA_CQE_RESPONDER_RDMA_FLG_MASK 0x1 |
59 | #define RDMA_CQE_RESPONDER_RDMA_FLG_SHIFT 5 |
60 | #define RDMA_CQE_RESPONDER_RESERVED2_MASK 0x3 |
61 | #define RDMA_CQE_RESPONDER_RESERVED2_SHIFT 6 |
62 | u8 status; |
63 | }; |
64 | |
65 | struct rdma_cqe_requester { |
66 | __le16 sq_cons; |
67 | __le16 reserved0; |
68 | __le32 reserved1; |
69 | struct regpair qp_handle; |
70 | struct regpair reserved2; |
71 | __le32 reserved3; |
72 | __le16 reserved4; |
73 | u8 flags; |
74 | #define RDMA_CQE_REQUESTER_TOGGLE_BIT_MASK 0x1 |
75 | #define RDMA_CQE_REQUESTER_TOGGLE_BIT_SHIFT 0 |
76 | #define RDMA_CQE_REQUESTER_TYPE_MASK 0x3 |
77 | #define RDMA_CQE_REQUESTER_TYPE_SHIFT 1 |
78 | #define RDMA_CQE_REQUESTER_RESERVED5_MASK 0x1F |
79 | #define RDMA_CQE_REQUESTER_RESERVED5_SHIFT 3 |
80 | u8 status; |
81 | }; |
82 | |
83 | struct rdma_cqe_common { |
84 | struct regpair reserved0; |
85 | struct regpair qp_handle; |
86 | __le16 reserved1[7]; |
87 | u8 flags; |
88 | #define RDMA_CQE_COMMON_TOGGLE_BIT_MASK 0x1 |
89 | #define RDMA_CQE_COMMON_TOGGLE_BIT_SHIFT 0 |
90 | #define RDMA_CQE_COMMON_TYPE_MASK 0x3 |
91 | #define RDMA_CQE_COMMON_TYPE_SHIFT 1 |
92 | #define RDMA_CQE_COMMON_RESERVED2_MASK 0x1F |
93 | #define RDMA_CQE_COMMON_RESERVED2_SHIFT 3 |
94 | u8 status; |
95 | }; |
96 | |
97 | /* rdma completion queue element */ |
98 | union rdma_cqe { |
99 | struct rdma_cqe_responder resp; |
100 | struct rdma_cqe_requester req; |
101 | struct rdma_cqe_common cmn; |
102 | }; |
103 | |
104 | /* * CQE requester status enumeration */ |
105 | enum rdma_cqe_requester_status_enum { |
106 | RDMA_CQE_REQ_STS_OK, |
107 | RDMA_CQE_REQ_STS_BAD_RESPONSE_ERR, |
108 | RDMA_CQE_REQ_STS_LOCAL_LENGTH_ERR, |
109 | RDMA_CQE_REQ_STS_LOCAL_QP_OPERATION_ERR, |
110 | RDMA_CQE_REQ_STS_LOCAL_PROTECTION_ERR, |
111 | RDMA_CQE_REQ_STS_MEMORY_MGT_OPERATION_ERR, |
112 | RDMA_CQE_REQ_STS_REMOTE_INVALID_REQUEST_ERR, |
113 | RDMA_CQE_REQ_STS_REMOTE_ACCESS_ERR, |
114 | RDMA_CQE_REQ_STS_REMOTE_OPERATION_ERR, |
115 | RDMA_CQE_REQ_STS_RNR_NAK_RETRY_CNT_ERR, |
116 | RDMA_CQE_REQ_STS_TRANSPORT_RETRY_CNT_ERR, |
117 | RDMA_CQE_REQ_STS_WORK_REQUEST_FLUSHED_ERR, |
118 | RDMA_CQE_REQ_STS_XRC_VOILATION_ERR, |
119 | RDMA_CQE_REQ_STS_SIG_ERR, |
120 | MAX_RDMA_CQE_REQUESTER_STATUS_ENUM |
121 | }; |
122 | |
123 | /* CQE responder status enumeration */ |
124 | enum rdma_cqe_responder_status_enum { |
125 | RDMA_CQE_RESP_STS_OK, |
126 | RDMA_CQE_RESP_STS_LOCAL_ACCESS_ERR, |
127 | RDMA_CQE_RESP_STS_LOCAL_LENGTH_ERR, |
128 | RDMA_CQE_RESP_STS_LOCAL_QP_OPERATION_ERR, |
129 | RDMA_CQE_RESP_STS_LOCAL_PROTECTION_ERR, |
130 | RDMA_CQE_RESP_STS_MEMORY_MGT_OPERATION_ERR, |
131 | RDMA_CQE_RESP_STS_REMOTE_INVALID_REQUEST_ERR, |
132 | RDMA_CQE_RESP_STS_WORK_REQUEST_FLUSHED_ERR, |
133 | MAX_RDMA_CQE_RESPONDER_STATUS_ENUM |
134 | }; |
135 | |
136 | /* CQE type enumeration */ |
137 | enum rdma_cqe_type { |
138 | RDMA_CQE_TYPE_REQUESTER, |
139 | RDMA_CQE_TYPE_RESPONDER_RQ, |
140 | RDMA_CQE_TYPE_RESPONDER_SRQ, |
141 | RDMA_CQE_TYPE_RESPONDER_XRC_SRQ, |
142 | RDMA_CQE_TYPE_INVALID, |
143 | MAX_RDMA_CQE_TYPE |
144 | }; |
145 | |
146 | struct rdma_sq_sge { |
147 | __le32 length; |
148 | struct regpair addr; |
149 | __le32 l_key; |
150 | }; |
151 | |
152 | struct rdma_rq_sge { |
153 | struct regpair addr; |
154 | __le32 length; |
155 | __le32 flags; |
156 | #define RDMA_RQ_SGE_L_KEY_LO_MASK 0x3FFFFFF |
157 | #define RDMA_RQ_SGE_L_KEY_LO_SHIFT 0 |
158 | #define RDMA_RQ_SGE_NUM_SGES_MASK 0x7 |
159 | #define RDMA_RQ_SGE_NUM_SGES_SHIFT 26 |
160 | #define RDMA_RQ_SGE_L_KEY_HI_MASK 0x7 |
161 | #define RDMA_RQ_SGE_L_KEY_HI_SHIFT 29 |
162 | }; |
163 | |
164 | struct { |
165 | struct regpair ; |
166 | u8 /* number of SGEs in WQE */; |
167 | u8 [7]; |
168 | }; |
169 | |
170 | struct rdma_srq_sge { |
171 | struct regpair addr; |
172 | __le32 length; |
173 | __le32 l_key; |
174 | }; |
175 | |
176 | union rdma_srq_elm { |
177 | struct rdma_srq_wqe_header ; |
178 | struct rdma_srq_sge sge; |
179 | }; |
180 | |
181 | /* Rdma doorbell data for flags update */ |
182 | struct rdma_pwm_flags_data { |
183 | __le16 icid; /* internal CID */ |
184 | u8 agg_flags; /* aggregative flags */ |
185 | u8 reserved; |
186 | }; |
187 | |
188 | /* Rdma doorbell data for SQ and RQ */ |
189 | struct rdma_pwm_val16_data { |
190 | __le16 icid; |
191 | __le16 value; |
192 | }; |
193 | |
194 | union rdma_pwm_val16_data_union { |
195 | struct rdma_pwm_val16_data as_struct; |
196 | __le32 as_dword; |
197 | }; |
198 | |
199 | /* Rdma doorbell data for CQ */ |
200 | struct rdma_pwm_val32_data { |
201 | __le16 icid; |
202 | u8 agg_flags; |
203 | u8 params; |
204 | #define RDMA_PWM_VAL32_DATA_AGG_CMD_MASK 0x3 |
205 | #define RDMA_PWM_VAL32_DATA_AGG_CMD_SHIFT 0 |
206 | #define RDMA_PWM_VAL32_DATA_BYPASS_EN_MASK 0x1 |
207 | #define RDMA_PWM_VAL32_DATA_BYPASS_EN_SHIFT 2 |
208 | #define RDMA_PWM_VAL32_DATA_CONN_TYPE_IS_IWARP_MASK 0x1 |
209 | #define RDMA_PWM_VAL32_DATA_CONN_TYPE_IS_IWARP_SHIFT 3 |
210 | #define RDMA_PWM_VAL32_DATA_SET_16B_VAL_MASK 0x1 |
211 | #define RDMA_PWM_VAL32_DATA_SET_16B_VAL_SHIFT 4 |
212 | #define RDMA_PWM_VAL32_DATA_RESERVED_MASK 0x7 |
213 | #define RDMA_PWM_VAL32_DATA_RESERVED_SHIFT 5 |
214 | __le32 value; |
215 | }; |
216 | |
217 | /* DIF Block size options */ |
218 | enum rdma_dif_block_size { |
219 | RDMA_DIF_BLOCK_512 = 0, |
220 | RDMA_DIF_BLOCK_4096 = 1, |
221 | MAX_RDMA_DIF_BLOCK_SIZE |
222 | }; |
223 | |
224 | /* DIF CRC initial value */ |
225 | enum rdma_dif_crc_seed { |
226 | RDMA_DIF_CRC_SEED_0000 = 0, |
227 | RDMA_DIF_CRC_SEED_FFFF = 1, |
228 | MAX_RDMA_DIF_CRC_SEED |
229 | }; |
230 | |
231 | /* RDMA DIF Error Result Structure */ |
232 | struct rdma_dif_error_result { |
233 | __le32 error_intervals; |
234 | __le32 dif_error_1st_interval; |
235 | u8 flags; |
236 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_CRC_MASK 0x1 |
237 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_CRC_SHIFT 0 |
238 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_APP_TAG_MASK 0x1 |
239 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_APP_TAG_SHIFT 1 |
240 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_REF_TAG_MASK 0x1 |
241 | #define RDMA_DIF_ERROR_RESULT_DIF_ERROR_TYPE_REF_TAG_SHIFT 2 |
242 | #define RDMA_DIF_ERROR_RESULT_RESERVED0_MASK 0xF |
243 | #define RDMA_DIF_ERROR_RESULT_RESERVED0_SHIFT 3 |
244 | #define RDMA_DIF_ERROR_RESULT_TOGGLE_BIT_MASK 0x1 |
245 | #define RDMA_DIF_ERROR_RESULT_TOGGLE_BIT_SHIFT 7 |
246 | u8 reserved1[55]; |
247 | }; |
248 | |
249 | /* DIF IO direction */ |
250 | enum rdma_dif_io_direction_flg { |
251 | RDMA_DIF_DIR_RX = 0, |
252 | RDMA_DIF_DIR_TX = 1, |
253 | MAX_RDMA_DIF_IO_DIRECTION_FLG |
254 | }; |
255 | |
256 | struct rdma_dif_params { |
257 | __le32 base_ref_tag; |
258 | __le16 app_tag; |
259 | __le16 app_tag_mask; |
260 | __le16 runt_crc_value; |
261 | __le16 flags; |
262 | #define RDMA_DIF_PARAMS_IO_DIRECTION_FLG_MASK 0x1 |
263 | #define RDMA_DIF_PARAMS_IO_DIRECTION_FLG_SHIFT 0 |
264 | #define RDMA_DIF_PARAMS_BLOCK_SIZE_MASK 0x1 |
265 | #define RDMA_DIF_PARAMS_BLOCK_SIZE_SHIFT 1 |
266 | #define RDMA_DIF_PARAMS_RUNT_VALID_FLG_MASK 0x1 |
267 | #define RDMA_DIF_PARAMS_RUNT_VALID_FLG_SHIFT 2 |
268 | #define RDMA_DIF_PARAMS_VALIDATE_CRC_GUARD_MASK 0x1 |
269 | #define RDMA_DIF_PARAMS_VALIDATE_CRC_GUARD_SHIFT 3 |
270 | #define RDMA_DIF_PARAMS_VALIDATE_REF_TAG_MASK 0x1 |
271 | #define RDMA_DIF_PARAMS_VALIDATE_REF_TAG_SHIFT 4 |
272 | #define RDMA_DIF_PARAMS_VALIDATE_APP_TAG_MASK 0x1 |
273 | #define RDMA_DIF_PARAMS_VALIDATE_APP_TAG_SHIFT 5 |
274 | #define RDMA_DIF_PARAMS_CRC_SEED_MASK 0x1 |
275 | #define RDMA_DIF_PARAMS_CRC_SEED_SHIFT 6 |
276 | #define RDMA_DIF_PARAMS_RX_REF_TAG_CONST_MASK 0x1 |
277 | #define RDMA_DIF_PARAMS_RX_REF_TAG_CONST_SHIFT 7 |
278 | #define RDMA_DIF_PARAMS_BLOCK_GUARD_TYPE_MASK 0x1 |
279 | #define RDMA_DIF_PARAMS_BLOCK_GUARD_TYPE_SHIFT 8 |
280 | #define RDMA_DIF_PARAMS_APP_ESCAPE_MASK 0x1 |
281 | #define RDMA_DIF_PARAMS_APP_ESCAPE_SHIFT 9 |
282 | #define RDMA_DIF_PARAMS_REF_ESCAPE_MASK 0x1 |
283 | #define RDMA_DIF_PARAMS_REF_ESCAPE_SHIFT 10 |
284 | #define RDMA_DIF_PARAMS_RESERVED4_MASK 0x1F |
285 | #define RDMA_DIF_PARAMS_RESERVED4_SHIFT 11 |
286 | __le32 reserved5; |
287 | }; |
288 | |
289 | |
290 | struct rdma_sq_atomic_wqe { |
291 | __le32 reserved1; |
292 | __le32 length; |
293 | __le32 xrc_srq; |
294 | u8 req_type; |
295 | u8 flags; |
296 | #define RDMA_SQ_ATOMIC_WQE_COMP_FLG_MASK 0x1 |
297 | #define RDMA_SQ_ATOMIC_WQE_COMP_FLG_SHIFT 0 |
298 | #define RDMA_SQ_ATOMIC_WQE_RD_FENCE_FLG_MASK 0x1 |
299 | #define RDMA_SQ_ATOMIC_WQE_RD_FENCE_FLG_SHIFT 1 |
300 | #define RDMA_SQ_ATOMIC_WQE_INV_FENCE_FLG_MASK 0x1 |
301 | #define RDMA_SQ_ATOMIC_WQE_INV_FENCE_FLG_SHIFT 2 |
302 | #define RDMA_SQ_ATOMIC_WQE_SE_FLG_MASK 0x1 |
303 | #define RDMA_SQ_ATOMIC_WQE_SE_FLG_SHIFT 3 |
304 | #define RDMA_SQ_ATOMIC_WQE_INLINE_FLG_MASK 0x1 |
305 | #define RDMA_SQ_ATOMIC_WQE_INLINE_FLG_SHIFT 4 |
306 | #define RDMA_SQ_ATOMIC_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
307 | #define RDMA_SQ_ATOMIC_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
308 | #define RDMA_SQ_ATOMIC_WQE_RESERVED0_MASK 0x3 |
309 | #define RDMA_SQ_ATOMIC_WQE_RESERVED0_SHIFT 6 |
310 | u8 wqe_size; |
311 | u8 prev_wqe_size; |
312 | struct regpair remote_va; |
313 | __le32 r_key; |
314 | __le32 reserved2; |
315 | struct regpair cmp_data; |
316 | struct regpair swap_data; |
317 | }; |
318 | |
319 | /* First element (16 bytes) of atomic wqe */ |
320 | struct rdma_sq_atomic_wqe_1st { |
321 | __le32 reserved1; |
322 | __le32 length; |
323 | __le32 xrc_srq; |
324 | u8 req_type; |
325 | u8 flags; |
326 | #define RDMA_SQ_ATOMIC_WQE_1ST_COMP_FLG_MASK 0x1 |
327 | #define RDMA_SQ_ATOMIC_WQE_1ST_COMP_FLG_SHIFT 0 |
328 | #define RDMA_SQ_ATOMIC_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
329 | #define RDMA_SQ_ATOMIC_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
330 | #define RDMA_SQ_ATOMIC_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
331 | #define RDMA_SQ_ATOMIC_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
332 | #define RDMA_SQ_ATOMIC_WQE_1ST_SE_FLG_MASK 0x1 |
333 | #define RDMA_SQ_ATOMIC_WQE_1ST_SE_FLG_SHIFT 3 |
334 | #define RDMA_SQ_ATOMIC_WQE_1ST_INLINE_FLG_MASK 0x1 |
335 | #define RDMA_SQ_ATOMIC_WQE_1ST_INLINE_FLG_SHIFT 4 |
336 | #define RDMA_SQ_ATOMIC_WQE_1ST_RESERVED0_MASK 0x7 |
337 | #define RDMA_SQ_ATOMIC_WQE_1ST_RESERVED0_SHIFT 5 |
338 | u8 wqe_size; |
339 | u8 prev_wqe_size; |
340 | }; |
341 | |
342 | /* Second element (16 bytes) of atomic wqe */ |
343 | struct rdma_sq_atomic_wqe_2nd { |
344 | struct regpair remote_va; |
345 | __le32 r_key; |
346 | __le32 reserved2; |
347 | }; |
348 | |
349 | /* Third element (16 bytes) of atomic wqe */ |
350 | struct rdma_sq_atomic_wqe_3rd { |
351 | struct regpair cmp_data; |
352 | struct regpair swap_data; |
353 | }; |
354 | |
355 | struct rdma_sq_bind_wqe { |
356 | struct regpair addr; |
357 | __le32 l_key; |
358 | u8 req_type; |
359 | u8 flags; |
360 | #define RDMA_SQ_BIND_WQE_COMP_FLG_MASK 0x1 |
361 | #define RDMA_SQ_BIND_WQE_COMP_FLG_SHIFT 0 |
362 | #define RDMA_SQ_BIND_WQE_RD_FENCE_FLG_MASK 0x1 |
363 | #define RDMA_SQ_BIND_WQE_RD_FENCE_FLG_SHIFT 1 |
364 | #define RDMA_SQ_BIND_WQE_INV_FENCE_FLG_MASK 0x1 |
365 | #define RDMA_SQ_BIND_WQE_INV_FENCE_FLG_SHIFT 2 |
366 | #define RDMA_SQ_BIND_WQE_SE_FLG_MASK 0x1 |
367 | #define RDMA_SQ_BIND_WQE_SE_FLG_SHIFT 3 |
368 | #define RDMA_SQ_BIND_WQE_INLINE_FLG_MASK 0x1 |
369 | #define RDMA_SQ_BIND_WQE_INLINE_FLG_SHIFT 4 |
370 | #define RDMA_SQ_BIND_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
371 | #define RDMA_SQ_BIND_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
372 | #define RDMA_SQ_BIND_WQE_RESERVED0_MASK 0x3 |
373 | #define RDMA_SQ_BIND_WQE_RESERVED0_SHIFT 6 |
374 | u8 wqe_size; |
375 | u8 prev_wqe_size; |
376 | u8 bind_ctrl; |
377 | #define RDMA_SQ_BIND_WQE_ZERO_BASED_MASK 0x1 |
378 | #define RDMA_SQ_BIND_WQE_ZERO_BASED_SHIFT 0 |
379 | #define RDMA_SQ_BIND_WQE_RESERVED1_MASK 0x7F |
380 | #define RDMA_SQ_BIND_WQE_RESERVED1_SHIFT 1 |
381 | u8 access_ctrl; |
382 | #define RDMA_SQ_BIND_WQE_REMOTE_READ_MASK 0x1 |
383 | #define RDMA_SQ_BIND_WQE_REMOTE_READ_SHIFT 0 |
384 | #define RDMA_SQ_BIND_WQE_REMOTE_WRITE_MASK 0x1 |
385 | #define RDMA_SQ_BIND_WQE_REMOTE_WRITE_SHIFT 1 |
386 | #define RDMA_SQ_BIND_WQE_ENABLE_ATOMIC_MASK 0x1 |
387 | #define RDMA_SQ_BIND_WQE_ENABLE_ATOMIC_SHIFT 2 |
388 | #define RDMA_SQ_BIND_WQE_LOCAL_READ_MASK 0x1 |
389 | #define RDMA_SQ_BIND_WQE_LOCAL_READ_SHIFT 3 |
390 | #define RDMA_SQ_BIND_WQE_LOCAL_WRITE_MASK 0x1 |
391 | #define RDMA_SQ_BIND_WQE_LOCAL_WRITE_SHIFT 4 |
392 | #define RDMA_SQ_BIND_WQE_RESERVED2_MASK 0x7 |
393 | #define RDMA_SQ_BIND_WQE_RESERVED2_SHIFT 5 |
394 | u8 reserved3; |
395 | u8 length_hi; |
396 | __le32 length_lo; |
397 | __le32 parent_l_key; |
398 | __le32 reserved4; |
399 | struct rdma_dif_params dif_params; |
400 | }; |
401 | |
402 | /* First element (16 bytes) of bind wqe */ |
403 | struct rdma_sq_bind_wqe_1st { |
404 | struct regpair addr; |
405 | __le32 l_key; |
406 | u8 req_type; |
407 | u8 flags; |
408 | #define RDMA_SQ_BIND_WQE_1ST_COMP_FLG_MASK 0x1 |
409 | #define RDMA_SQ_BIND_WQE_1ST_COMP_FLG_SHIFT 0 |
410 | #define RDMA_SQ_BIND_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
411 | #define RDMA_SQ_BIND_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
412 | #define RDMA_SQ_BIND_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
413 | #define RDMA_SQ_BIND_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
414 | #define RDMA_SQ_BIND_WQE_1ST_SE_FLG_MASK 0x1 |
415 | #define RDMA_SQ_BIND_WQE_1ST_SE_FLG_SHIFT 3 |
416 | #define RDMA_SQ_BIND_WQE_1ST_INLINE_FLG_MASK 0x1 |
417 | #define RDMA_SQ_BIND_WQE_1ST_INLINE_FLG_SHIFT 4 |
418 | #define RDMA_SQ_BIND_WQE_1ST_RESERVED0_MASK 0x7 |
419 | #define RDMA_SQ_BIND_WQE_1ST_RESERVED0_SHIFT 5 |
420 | u8 wqe_size; |
421 | u8 prev_wqe_size; |
422 | }; |
423 | |
424 | /* Second element (16 bytes) of bind wqe */ |
425 | struct rdma_sq_bind_wqe_2nd { |
426 | u8 bind_ctrl; |
427 | #define RDMA_SQ_BIND_WQE_2ND_ZERO_BASED_MASK 0x1 |
428 | #define RDMA_SQ_BIND_WQE_2ND_ZERO_BASED_SHIFT 0 |
429 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED1_MASK 0x7F |
430 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED1_SHIFT 1 |
431 | u8 access_ctrl; |
432 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_READ_MASK 0x1 |
433 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_READ_SHIFT 0 |
434 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_WRITE_MASK 0x1 |
435 | #define RDMA_SQ_BIND_WQE_2ND_REMOTE_WRITE_SHIFT 1 |
436 | #define RDMA_SQ_BIND_WQE_2ND_ENABLE_ATOMIC_MASK 0x1 |
437 | #define RDMA_SQ_BIND_WQE_2ND_ENABLE_ATOMIC_SHIFT 2 |
438 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_READ_MASK 0x1 |
439 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_READ_SHIFT 3 |
440 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_WRITE_MASK 0x1 |
441 | #define RDMA_SQ_BIND_WQE_2ND_LOCAL_WRITE_SHIFT 4 |
442 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED2_MASK 0x7 |
443 | #define RDMA_SQ_BIND_WQE_2ND_RESERVED2_SHIFT 5 |
444 | u8 reserved3; |
445 | u8 length_hi; |
446 | __le32 length_lo; |
447 | __le32 parent_l_key; |
448 | __le32 reserved4; |
449 | }; |
450 | |
451 | /* Third element (16 bytes) of bind wqe */ |
452 | struct rdma_sq_bind_wqe_3rd { |
453 | struct rdma_dif_params dif_params; |
454 | }; |
455 | |
456 | /* Structure with only the SQ WQE common |
457 | * fields. Size is of one SQ element (16B) |
458 | */ |
459 | struct rdma_sq_common_wqe { |
460 | __le32 reserved1[3]; |
461 | u8 req_type; |
462 | u8 flags; |
463 | #define RDMA_SQ_COMMON_WQE_COMP_FLG_MASK 0x1 |
464 | #define RDMA_SQ_COMMON_WQE_COMP_FLG_SHIFT 0 |
465 | #define RDMA_SQ_COMMON_WQE_RD_FENCE_FLG_MASK 0x1 |
466 | #define RDMA_SQ_COMMON_WQE_RD_FENCE_FLG_SHIFT 1 |
467 | #define RDMA_SQ_COMMON_WQE_INV_FENCE_FLG_MASK 0x1 |
468 | #define RDMA_SQ_COMMON_WQE_INV_FENCE_FLG_SHIFT 2 |
469 | #define RDMA_SQ_COMMON_WQE_SE_FLG_MASK 0x1 |
470 | #define RDMA_SQ_COMMON_WQE_SE_FLG_SHIFT 3 |
471 | #define RDMA_SQ_COMMON_WQE_INLINE_FLG_MASK 0x1 |
472 | #define RDMA_SQ_COMMON_WQE_INLINE_FLG_SHIFT 4 |
473 | #define RDMA_SQ_COMMON_WQE_RESERVED0_MASK 0x7 |
474 | #define RDMA_SQ_COMMON_WQE_RESERVED0_SHIFT 5 |
475 | u8 wqe_size; |
476 | u8 prev_wqe_size; |
477 | }; |
478 | |
479 | struct rdma_sq_fmr_wqe { |
480 | struct regpair addr; |
481 | __le32 l_key; |
482 | u8 req_type; |
483 | u8 flags; |
484 | #define RDMA_SQ_FMR_WQE_COMP_FLG_MASK 0x1 |
485 | #define RDMA_SQ_FMR_WQE_COMP_FLG_SHIFT 0 |
486 | #define RDMA_SQ_FMR_WQE_RD_FENCE_FLG_MASK 0x1 |
487 | #define RDMA_SQ_FMR_WQE_RD_FENCE_FLG_SHIFT 1 |
488 | #define RDMA_SQ_FMR_WQE_INV_FENCE_FLG_MASK 0x1 |
489 | #define RDMA_SQ_FMR_WQE_INV_FENCE_FLG_SHIFT 2 |
490 | #define RDMA_SQ_FMR_WQE_SE_FLG_MASK 0x1 |
491 | #define RDMA_SQ_FMR_WQE_SE_FLG_SHIFT 3 |
492 | #define RDMA_SQ_FMR_WQE_INLINE_FLG_MASK 0x1 |
493 | #define RDMA_SQ_FMR_WQE_INLINE_FLG_SHIFT 4 |
494 | #define RDMA_SQ_FMR_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
495 | #define RDMA_SQ_FMR_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
496 | #define RDMA_SQ_FMR_WQE_RESERVED0_MASK 0x3 |
497 | #define RDMA_SQ_FMR_WQE_RESERVED0_SHIFT 6 |
498 | u8 wqe_size; |
499 | u8 prev_wqe_size; |
500 | u8 fmr_ctrl; |
501 | #define RDMA_SQ_FMR_WQE_PAGE_SIZE_LOG_MASK 0x1F |
502 | #define RDMA_SQ_FMR_WQE_PAGE_SIZE_LOG_SHIFT 0 |
503 | #define RDMA_SQ_FMR_WQE_ZERO_BASED_MASK 0x1 |
504 | #define RDMA_SQ_FMR_WQE_ZERO_BASED_SHIFT 5 |
505 | #define RDMA_SQ_FMR_WQE_BIND_EN_MASK 0x1 |
506 | #define RDMA_SQ_FMR_WQE_BIND_EN_SHIFT 6 |
507 | #define RDMA_SQ_FMR_WQE_RESERVED1_MASK 0x1 |
508 | #define RDMA_SQ_FMR_WQE_RESERVED1_SHIFT 7 |
509 | u8 access_ctrl; |
510 | #define RDMA_SQ_FMR_WQE_REMOTE_READ_MASK 0x1 |
511 | #define RDMA_SQ_FMR_WQE_REMOTE_READ_SHIFT 0 |
512 | #define RDMA_SQ_FMR_WQE_REMOTE_WRITE_MASK 0x1 |
513 | #define RDMA_SQ_FMR_WQE_REMOTE_WRITE_SHIFT 1 |
514 | #define RDMA_SQ_FMR_WQE_ENABLE_ATOMIC_MASK 0x1 |
515 | #define RDMA_SQ_FMR_WQE_ENABLE_ATOMIC_SHIFT 2 |
516 | #define RDMA_SQ_FMR_WQE_LOCAL_READ_MASK 0x1 |
517 | #define RDMA_SQ_FMR_WQE_LOCAL_READ_SHIFT 3 |
518 | #define RDMA_SQ_FMR_WQE_LOCAL_WRITE_MASK 0x1 |
519 | #define RDMA_SQ_FMR_WQE_LOCAL_WRITE_SHIFT 4 |
520 | #define RDMA_SQ_FMR_WQE_RESERVED2_MASK 0x7 |
521 | #define RDMA_SQ_FMR_WQE_RESERVED2_SHIFT 5 |
522 | u8 reserved3; |
523 | u8 length_hi; |
524 | __le32 length_lo; |
525 | struct regpair pbl_addr; |
526 | }; |
527 | |
528 | /* First element (16 bytes) of fmr wqe */ |
529 | struct rdma_sq_fmr_wqe_1st { |
530 | struct regpair addr; |
531 | __le32 l_key; |
532 | u8 req_type; |
533 | u8 flags; |
534 | #define RDMA_SQ_FMR_WQE_1ST_COMP_FLG_MASK 0x1 |
535 | #define RDMA_SQ_FMR_WQE_1ST_COMP_FLG_SHIFT 0 |
536 | #define RDMA_SQ_FMR_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
537 | #define RDMA_SQ_FMR_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
538 | #define RDMA_SQ_FMR_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
539 | #define RDMA_SQ_FMR_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
540 | #define RDMA_SQ_FMR_WQE_1ST_SE_FLG_MASK 0x1 |
541 | #define RDMA_SQ_FMR_WQE_1ST_SE_FLG_SHIFT 3 |
542 | #define RDMA_SQ_FMR_WQE_1ST_INLINE_FLG_MASK 0x1 |
543 | #define RDMA_SQ_FMR_WQE_1ST_INLINE_FLG_SHIFT 4 |
544 | #define RDMA_SQ_FMR_WQE_1ST_DIF_ON_HOST_FLG_MASK 0x1 |
545 | #define RDMA_SQ_FMR_WQE_1ST_DIF_ON_HOST_FLG_SHIFT 5 |
546 | #define RDMA_SQ_FMR_WQE_1ST_RESERVED0_MASK 0x3 |
547 | #define RDMA_SQ_FMR_WQE_1ST_RESERVED0_SHIFT 6 |
548 | u8 wqe_size; |
549 | u8 prev_wqe_size; |
550 | }; |
551 | |
552 | /* Second element (16 bytes) of fmr wqe */ |
553 | struct rdma_sq_fmr_wqe_2nd { |
554 | u8 fmr_ctrl; |
555 | #define RDMA_SQ_FMR_WQE_2ND_PAGE_SIZE_LOG_MASK 0x1F |
556 | #define RDMA_SQ_FMR_WQE_2ND_PAGE_SIZE_LOG_SHIFT 0 |
557 | #define RDMA_SQ_FMR_WQE_2ND_ZERO_BASED_MASK 0x1 |
558 | #define RDMA_SQ_FMR_WQE_2ND_ZERO_BASED_SHIFT 5 |
559 | #define RDMA_SQ_FMR_WQE_2ND_BIND_EN_MASK 0x1 |
560 | #define RDMA_SQ_FMR_WQE_2ND_BIND_EN_SHIFT 6 |
561 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED1_MASK 0x1 |
562 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED1_SHIFT 7 |
563 | u8 access_ctrl; |
564 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_READ_MASK 0x1 |
565 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_READ_SHIFT 0 |
566 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_WRITE_MASK 0x1 |
567 | #define RDMA_SQ_FMR_WQE_2ND_REMOTE_WRITE_SHIFT 1 |
568 | #define RDMA_SQ_FMR_WQE_2ND_ENABLE_ATOMIC_MASK 0x1 |
569 | #define RDMA_SQ_FMR_WQE_2ND_ENABLE_ATOMIC_SHIFT 2 |
570 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_READ_MASK 0x1 |
571 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_READ_SHIFT 3 |
572 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_WRITE_MASK 0x1 |
573 | #define RDMA_SQ_FMR_WQE_2ND_LOCAL_WRITE_SHIFT 4 |
574 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED2_MASK 0x7 |
575 | #define RDMA_SQ_FMR_WQE_2ND_RESERVED2_SHIFT 5 |
576 | u8 reserved3; |
577 | u8 length_hi; |
578 | __le32 length_lo; |
579 | struct regpair pbl_addr; |
580 | }; |
581 | |
582 | |
583 | struct rdma_sq_local_inv_wqe { |
584 | struct regpair reserved; |
585 | __le32 inv_l_key; |
586 | u8 req_type; |
587 | u8 flags; |
588 | #define RDMA_SQ_LOCAL_INV_WQE_COMP_FLG_MASK 0x1 |
589 | #define RDMA_SQ_LOCAL_INV_WQE_COMP_FLG_SHIFT 0 |
590 | #define RDMA_SQ_LOCAL_INV_WQE_RD_FENCE_FLG_MASK 0x1 |
591 | #define RDMA_SQ_LOCAL_INV_WQE_RD_FENCE_FLG_SHIFT 1 |
592 | #define RDMA_SQ_LOCAL_INV_WQE_INV_FENCE_FLG_MASK 0x1 |
593 | #define RDMA_SQ_LOCAL_INV_WQE_INV_FENCE_FLG_SHIFT 2 |
594 | #define RDMA_SQ_LOCAL_INV_WQE_SE_FLG_MASK 0x1 |
595 | #define RDMA_SQ_LOCAL_INV_WQE_SE_FLG_SHIFT 3 |
596 | #define RDMA_SQ_LOCAL_INV_WQE_INLINE_FLG_MASK 0x1 |
597 | #define RDMA_SQ_LOCAL_INV_WQE_INLINE_FLG_SHIFT 4 |
598 | #define RDMA_SQ_LOCAL_INV_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
599 | #define RDMA_SQ_LOCAL_INV_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
600 | #define RDMA_SQ_LOCAL_INV_WQE_RESERVED0_MASK 0x3 |
601 | #define RDMA_SQ_LOCAL_INV_WQE_RESERVED0_SHIFT 6 |
602 | u8 wqe_size; |
603 | u8 prev_wqe_size; |
604 | }; |
605 | |
606 | struct rdma_sq_rdma_wqe { |
607 | __le32 imm_data; |
608 | __le32 length; |
609 | __le32 xrc_srq; |
610 | u8 req_type; |
611 | u8 flags; |
612 | #define RDMA_SQ_RDMA_WQE_COMP_FLG_MASK 0x1 |
613 | #define RDMA_SQ_RDMA_WQE_COMP_FLG_SHIFT 0 |
614 | #define RDMA_SQ_RDMA_WQE_RD_FENCE_FLG_MASK 0x1 |
615 | #define RDMA_SQ_RDMA_WQE_RD_FENCE_FLG_SHIFT 1 |
616 | #define RDMA_SQ_RDMA_WQE_INV_FENCE_FLG_MASK 0x1 |
617 | #define RDMA_SQ_RDMA_WQE_INV_FENCE_FLG_SHIFT 2 |
618 | #define RDMA_SQ_RDMA_WQE_SE_FLG_MASK 0x1 |
619 | #define RDMA_SQ_RDMA_WQE_SE_FLG_SHIFT 3 |
620 | #define RDMA_SQ_RDMA_WQE_INLINE_FLG_MASK 0x1 |
621 | #define RDMA_SQ_RDMA_WQE_INLINE_FLG_SHIFT 4 |
622 | #define RDMA_SQ_RDMA_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
623 | #define RDMA_SQ_RDMA_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
624 | #define RDMA_SQ_RDMA_WQE_READ_INV_FLG_MASK 0x1 |
625 | #define RDMA_SQ_RDMA_WQE_READ_INV_FLG_SHIFT 6 |
626 | #define RDMA_SQ_RDMA_WQE_RESERVED1_MASK 0x1 |
627 | #define RDMA_SQ_RDMA_WQE_RESERVED1_SHIFT 7 |
628 | u8 wqe_size; |
629 | u8 prev_wqe_size; |
630 | struct regpair remote_va; |
631 | __le32 r_key; |
632 | u8 dif_flags; |
633 | #define RDMA_SQ_RDMA_WQE_DIF_BLOCK_SIZE_MASK 0x1 |
634 | #define RDMA_SQ_RDMA_WQE_DIF_BLOCK_SIZE_SHIFT 0 |
635 | #define RDMA_SQ_RDMA_WQE_RESERVED2_MASK 0x7F |
636 | #define RDMA_SQ_RDMA_WQE_RESERVED2_SHIFT 1 |
637 | u8 reserved3[3]; |
638 | }; |
639 | |
640 | /* First element (16 bytes) of rdma wqe */ |
641 | struct rdma_sq_rdma_wqe_1st { |
642 | __le32 imm_data; |
643 | __le32 length; |
644 | __le32 xrc_srq; |
645 | u8 req_type; |
646 | u8 flags; |
647 | #define RDMA_SQ_RDMA_WQE_1ST_COMP_FLG_MASK 0x1 |
648 | #define RDMA_SQ_RDMA_WQE_1ST_COMP_FLG_SHIFT 0 |
649 | #define RDMA_SQ_RDMA_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
650 | #define RDMA_SQ_RDMA_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
651 | #define RDMA_SQ_RDMA_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
652 | #define RDMA_SQ_RDMA_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
653 | #define RDMA_SQ_RDMA_WQE_1ST_SE_FLG_MASK 0x1 |
654 | #define RDMA_SQ_RDMA_WQE_1ST_SE_FLG_SHIFT 3 |
655 | #define RDMA_SQ_RDMA_WQE_1ST_INLINE_FLG_MASK 0x1 |
656 | #define RDMA_SQ_RDMA_WQE_1ST_INLINE_FLG_SHIFT 4 |
657 | #define RDMA_SQ_RDMA_WQE_1ST_DIF_ON_HOST_FLG_MASK 0x1 |
658 | #define RDMA_SQ_RDMA_WQE_1ST_DIF_ON_HOST_FLG_SHIFT 5 |
659 | #define RDMA_SQ_RDMA_WQE_1ST_READ_INV_FLG_MASK 0x1 |
660 | #define RDMA_SQ_RDMA_WQE_1ST_READ_INV_FLG_SHIFT 6 |
661 | #define RDMA_SQ_RDMA_WQE_1ST_RESERVED0_MASK 0x1 |
662 | #define RDMA_SQ_RDMA_WQE_1ST_RESERVED0_SHIFT 7 |
663 | u8 wqe_size; |
664 | u8 prev_wqe_size; |
665 | }; |
666 | |
667 | /* Second element (16 bytes) of rdma wqe */ |
668 | struct rdma_sq_rdma_wqe_2nd { |
669 | struct regpair remote_va; |
670 | __le32 r_key; |
671 | u8 dif_flags; |
672 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_BLOCK_SIZE_MASK 0x1 |
673 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_BLOCK_SIZE_SHIFT 0 |
674 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_FIRST_SEGMENT_FLG_MASK 0x1 |
675 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_FIRST_SEGMENT_FLG_SHIFT 1 |
676 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_LAST_SEGMENT_FLG_MASK 0x1 |
677 | #define RDMA_SQ_RDMA_WQE_2ND_DIF_LAST_SEGMENT_FLG_SHIFT 2 |
678 | #define RDMA_SQ_RDMA_WQE_2ND_RESERVED1_MASK 0x1F |
679 | #define RDMA_SQ_RDMA_WQE_2ND_RESERVED1_SHIFT 3 |
680 | u8 reserved2[3]; |
681 | }; |
682 | |
683 | /* SQ WQE req type enumeration */ |
684 | enum rdma_sq_req_type { |
685 | RDMA_SQ_REQ_TYPE_SEND, |
686 | RDMA_SQ_REQ_TYPE_SEND_WITH_IMM, |
687 | RDMA_SQ_REQ_TYPE_SEND_WITH_INVALIDATE, |
688 | RDMA_SQ_REQ_TYPE_RDMA_WR, |
689 | RDMA_SQ_REQ_TYPE_RDMA_WR_WITH_IMM, |
690 | RDMA_SQ_REQ_TYPE_RDMA_RD, |
691 | RDMA_SQ_REQ_TYPE_ATOMIC_CMP_AND_SWAP, |
692 | RDMA_SQ_REQ_TYPE_ATOMIC_ADD, |
693 | RDMA_SQ_REQ_TYPE_LOCAL_INVALIDATE, |
694 | RDMA_SQ_REQ_TYPE_FAST_MR, |
695 | RDMA_SQ_REQ_TYPE_BIND, |
696 | RDMA_SQ_REQ_TYPE_INVALID, |
697 | MAX_RDMA_SQ_REQ_TYPE |
698 | }; |
699 | |
700 | struct rdma_sq_send_wqe { |
701 | __le32 inv_key_or_imm_data; |
702 | __le32 length; |
703 | __le32 xrc_srq; |
704 | u8 req_type; |
705 | u8 flags; |
706 | #define RDMA_SQ_SEND_WQE_COMP_FLG_MASK 0x1 |
707 | #define RDMA_SQ_SEND_WQE_COMP_FLG_SHIFT 0 |
708 | #define RDMA_SQ_SEND_WQE_RD_FENCE_FLG_MASK 0x1 |
709 | #define RDMA_SQ_SEND_WQE_RD_FENCE_FLG_SHIFT 1 |
710 | #define RDMA_SQ_SEND_WQE_INV_FENCE_FLG_MASK 0x1 |
711 | #define RDMA_SQ_SEND_WQE_INV_FENCE_FLG_SHIFT 2 |
712 | #define RDMA_SQ_SEND_WQE_SE_FLG_MASK 0x1 |
713 | #define RDMA_SQ_SEND_WQE_SE_FLG_SHIFT 3 |
714 | #define RDMA_SQ_SEND_WQE_INLINE_FLG_MASK 0x1 |
715 | #define RDMA_SQ_SEND_WQE_INLINE_FLG_SHIFT 4 |
716 | #define RDMA_SQ_SEND_WQE_DIF_ON_HOST_FLG_MASK 0x1 |
717 | #define RDMA_SQ_SEND_WQE_DIF_ON_HOST_FLG_SHIFT 5 |
718 | #define RDMA_SQ_SEND_WQE_RESERVED0_MASK 0x3 |
719 | #define RDMA_SQ_SEND_WQE_RESERVED0_SHIFT 6 |
720 | u8 wqe_size; |
721 | u8 prev_wqe_size; |
722 | __le32 reserved1[4]; |
723 | }; |
724 | |
725 | struct rdma_sq_send_wqe_1st { |
726 | __le32 inv_key_or_imm_data; |
727 | __le32 length; |
728 | __le32 xrc_srq; |
729 | u8 req_type; |
730 | u8 flags; |
731 | #define RDMA_SQ_SEND_WQE_1ST_COMP_FLG_MASK 0x1 |
732 | #define RDMA_SQ_SEND_WQE_1ST_COMP_FLG_SHIFT 0 |
733 | #define RDMA_SQ_SEND_WQE_1ST_RD_FENCE_FLG_MASK 0x1 |
734 | #define RDMA_SQ_SEND_WQE_1ST_RD_FENCE_FLG_SHIFT 1 |
735 | #define RDMA_SQ_SEND_WQE_1ST_INV_FENCE_FLG_MASK 0x1 |
736 | #define RDMA_SQ_SEND_WQE_1ST_INV_FENCE_FLG_SHIFT 2 |
737 | #define RDMA_SQ_SEND_WQE_1ST_SE_FLG_MASK 0x1 |
738 | #define RDMA_SQ_SEND_WQE_1ST_SE_FLG_SHIFT 3 |
739 | #define RDMA_SQ_SEND_WQE_1ST_INLINE_FLG_MASK 0x1 |
740 | #define RDMA_SQ_SEND_WQE_1ST_INLINE_FLG_SHIFT 4 |
741 | #define RDMA_SQ_SEND_WQE_1ST_RESERVED0_MASK 0x7 |
742 | #define RDMA_SQ_SEND_WQE_1ST_RESERVED0_SHIFT 5 |
743 | u8 wqe_size; |
744 | u8 prev_wqe_size; |
745 | }; |
746 | |
747 | struct rdma_sq_send_wqe_2st { |
748 | __le32 reserved1[4]; |
749 | }; |
750 | |
751 | #endif /* __QED_HSI_RDMA__ */ |
752 | |