1// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Shared descriptors for aead, skcipher algorithms
4 *
5 * Copyright 2016-2019 NXP
6 */
7
8#include "compat.h"
9#include "desc_constr.h"
10#include "caamalg_desc.h"
11
12/*
13 * For aead functions, read payload and write payload,
14 * both of which are specified in req->src and req->dst
15 */
16static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
17{
18 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
19 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
20 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
21}
22
23/* Set DK bit in class 1 operation if shared */
24static inline void append_dec_op1(u32 *desc, u32 type)
25{
26 u32 *jump_cmd, *uncond_jump_cmd;
27
28 /* DK bit is valid only for AES */
29 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
30 append_operation(desc, type | OP_ALG_AS_INITFINAL |
31 OP_ALG_DECRYPT);
32 return;
33 }
34
35 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
36 append_operation(desc, type | OP_ALG_AS_INITFINAL |
37 OP_ALG_DECRYPT);
38 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
39 set_jump_tgt_here(desc, jump_cmd);
40 append_operation(desc, type | OP_ALG_AS_INITFINAL |
41 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
42 set_jump_tgt_here(desc, uncond_jump_cmd);
43}
44
45/**
46 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
47 * (non-protocol) with no (null) encryption.
48 * @desc: pointer to buffer used for descriptor construction
49 * @adata: pointer to authentication transform definitions.
50 * A split key is required for SEC Era < 6; the size of the split key
51 * is specified in this case. Valid algorithm values - one of
52 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
53 * with OP_ALG_AAI_HMAC_PRECOMP.
54 * @icvsize: integrity check value (ICV) size (truncated or full)
55 * @era: SEC Era
56 */
57void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
58 unsigned int icvsize, int era)
59{
60 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61
62 init_sh_desc(desc, HDR_SHARE_SERIAL);
63
64 /* Skip if already shared */
65 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
66 JUMP_COND_SHRD);
67 if (era < 6) {
68 if (adata->key_inline)
69 append_key_as_imm(desc, adata->key_virt,
70 adata->keylen_pad, adata->keylen,
71 CLASS_2 | KEY_DEST_MDHA_SPLIT |
72 KEY_ENC);
73 else
74 append_key(desc, adata->key_dma, adata->keylen,
75 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
76 } else {
77 append_proto_dkp(desc, adata);
78 }
79 set_jump_tgt_here(desc, key_jump_cmd);
80
81 /* assoclen + cryptlen = seqinlen */
82 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
83
84 /* Prepare to read and write cryptlen + assoclen bytes */
85 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
86 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
87
88 /*
89 * MOVE_LEN opcode is not available in all SEC HW revisions,
90 * thus need to do some magic, i.e. self-patch the descriptor
91 * buffer.
92 */
93 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
94 MOVE_DEST_MATH3 |
95 (0x6 << MOVE_LEN_SHIFT));
96 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
97 MOVE_DEST_DESCBUF |
98 MOVE_WAITCOMP |
99 (0x8 << MOVE_LEN_SHIFT));
100
101 /* Class 2 operation */
102 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
103 OP_ALG_ENCRYPT);
104
105 /* Read and write cryptlen bytes */
106 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
107
108 set_move_tgt_here(desc, read_move_cmd);
109 set_move_tgt_here(desc, write_move_cmd);
110 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
111 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
112 MOVE_AUX_LS);
113
114 /* Write ICV */
115 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
116 LDST_SRCDST_BYTE_CONTEXT);
117
118#ifdef DEBUG
119 print_hex_dump(KERN_ERR,
120 "aead null enc shdesc@" __stringify(__LINE__)": ",
121 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
122#endif
123}
124EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
125
126/**
127 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
128 * (non-protocol) with no (null) decryption.
129 * @desc: pointer to buffer used for descriptor construction
130 * @adata: pointer to authentication transform definitions.
131 * A split key is required for SEC Era < 6; the size of the split key
132 * is specified in this case. Valid algorithm values - one of
133 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
134 * with OP_ALG_AAI_HMAC_PRECOMP.
135 * @icvsize: integrity check value (ICV) size (truncated or full)
136 * @era: SEC Era
137 */
138void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
139 unsigned int icvsize, int era)
140{
141 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
142
143 init_sh_desc(desc, HDR_SHARE_SERIAL);
144
145 /* Skip if already shared */
146 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
147 JUMP_COND_SHRD);
148 if (era < 6) {
149 if (adata->key_inline)
150 append_key_as_imm(desc, adata->key_virt,
151 adata->keylen_pad, adata->keylen,
152 CLASS_2 | KEY_DEST_MDHA_SPLIT |
153 KEY_ENC);
154 else
155 append_key(desc, adata->key_dma, adata->keylen,
156 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
157 } else {
158 append_proto_dkp(desc, adata);
159 }
160 set_jump_tgt_here(desc, key_jump_cmd);
161
162 /* Class 2 operation */
163 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
164 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
165
166 /* assoclen + cryptlen = seqoutlen */
167 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
168
169 /* Prepare to read and write cryptlen + assoclen bytes */
170 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
171 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
172
173 /*
174 * MOVE_LEN opcode is not available in all SEC HW revisions,
175 * thus need to do some magic, i.e. self-patch the descriptor
176 * buffer.
177 */
178 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
179 MOVE_DEST_MATH2 |
180 (0x6 << MOVE_LEN_SHIFT));
181 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
182 MOVE_DEST_DESCBUF |
183 MOVE_WAITCOMP |
184 (0x8 << MOVE_LEN_SHIFT));
185
186 /* Read and write cryptlen bytes */
187 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
188
189 /*
190 * Insert a NOP here, since we need at least 4 instructions between
191 * code patching the descriptor buffer and the location being patched.
192 */
193 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
194 set_jump_tgt_here(desc, jump_cmd);
195
196 set_move_tgt_here(desc, read_move_cmd);
197 set_move_tgt_here(desc, write_move_cmd);
198 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
199 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
200 MOVE_AUX_LS);
201 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
202
203 /* Load ICV */
204 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
205 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
206
207#ifdef DEBUG
208 print_hex_dump(KERN_ERR,
209 "aead null dec shdesc@" __stringify(__LINE__)": ",
210 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
211#endif
212}
213EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
214
215static void init_sh_desc_key_aead(u32 * const desc,
216 struct alginfo * const cdata,
217 struct alginfo * const adata,
218 const bool is_rfc3686, u32 *nonce, int era)
219{
220 u32 *key_jump_cmd;
221 unsigned int enckeylen = cdata->keylen;
222
223 /* Note: Context registers are saved. */
224 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
225
226 /* Skip if already shared */
227 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
228 JUMP_COND_SHRD);
229
230 /*
231 * RFC3686 specific:
232 * | key = {AUTH_KEY, ENC_KEY, NONCE}
233 * | enckeylen = encryption key size + nonce size
234 */
235 if (is_rfc3686)
236 enckeylen -= CTR_RFC3686_NONCE_SIZE;
237
238 if (era < 6) {
239 if (adata->key_inline)
240 append_key_as_imm(desc, adata->key_virt,
241 adata->keylen_pad, adata->keylen,
242 CLASS_2 | KEY_DEST_MDHA_SPLIT |
243 KEY_ENC);
244 else
245 append_key(desc, adata->key_dma, adata->keylen,
246 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
247 } else {
248 append_proto_dkp(desc, adata);
249 }
250
251 if (cdata->key_inline)
252 append_key_as_imm(desc, cdata->key_virt, enckeylen,
253 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
254 else
255 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
256 KEY_DEST_CLASS_REG);
257
258 /* Load Counter into CONTEXT1 reg */
259 if (is_rfc3686) {
260 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
261 LDST_CLASS_IND_CCB |
262 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
263 append_move(desc,
264 MOVE_SRC_OUTFIFO |
265 MOVE_DEST_CLASS1CTX |
266 (16 << MOVE_OFFSET_SHIFT) |
267 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
268 }
269
270 set_jump_tgt_here(desc, key_jump_cmd);
271}
272
273/**
274 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
275 * (non-protocol).
276 * @desc: pointer to buffer used for descriptor construction
277 * @cdata: pointer to block cipher transform definitions
278 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
279 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
280 * @adata: pointer to authentication transform definitions.
281 * A split key is required for SEC Era < 6; the size of the split key
282 * is specified in this case. Valid algorithm values - one of
283 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
284 * with OP_ALG_AAI_HMAC_PRECOMP.
285 * @ivsize: initialization vector size
286 * @icvsize: integrity check value (ICV) size (truncated or full)
287 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
288 * @nonce: pointer to rfc3686 nonce
289 * @ctx1_iv_off: IV offset in CONTEXT1 register
290 * @is_qi: true when called from caam/qi
291 * @era: SEC Era
292 */
293void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
294 struct alginfo *adata, unsigned int ivsize,
295 unsigned int icvsize, const bool is_rfc3686,
296 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
297 int era)
298{
299 /* Note: Context registers are saved. */
300 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
301
302 /* Class 2 operation */
303 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
304 OP_ALG_ENCRYPT);
305
306 if (is_qi) {
307 u32 *wait_load_cmd;
308
309 /* REG3 = assoclen */
310 append_seq_load(desc, 4, LDST_CLASS_DECO |
311 LDST_SRCDST_WORD_DECO_MATH3 |
312 (4 << LDST_OFFSET_SHIFT));
313
314 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
315 JUMP_COND_CALM | JUMP_COND_NCP |
316 JUMP_COND_NOP | JUMP_COND_NIP |
317 JUMP_COND_NIFP);
318 set_jump_tgt_here(desc, wait_load_cmd);
319
320 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
321 LDST_SRCDST_BYTE_CONTEXT |
322 (ctx1_iv_off << LDST_OFFSET_SHIFT));
323 }
324
325 /* Read and write assoclen bytes */
326 if (is_qi || era < 3) {
327 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
328 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
329 } else {
330 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
331 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
332 }
333
334 /* Skip assoc data */
335 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
336
337 /* read assoc before reading payload */
338 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
339 FIFOLDST_VLF);
340
341 /* Load Counter into CONTEXT1 reg */
342 if (is_rfc3686)
343 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
344 LDST_SRCDST_BYTE_CONTEXT |
345 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
346 LDST_OFFSET_SHIFT));
347
348 /* Class 1 operation */
349 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
350 OP_ALG_ENCRYPT);
351
352 /* Read and write cryptlen bytes */
353 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
354 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
355 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
356
357 /* Write ICV */
358 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
359 LDST_SRCDST_BYTE_CONTEXT);
360
361#ifdef DEBUG
362 print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
363 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
364#endif
365}
366EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
367
368/**
369 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
370 * (non-protocol).
371 * @desc: pointer to buffer used for descriptor construction
372 * @cdata: pointer to block cipher transform definitions
373 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
374 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
375 * @adata: pointer to authentication transform definitions.
376 * A split key is required for SEC Era < 6; the size of the split key
377 * is specified in this case. Valid algorithm values - one of
378 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
379 * with OP_ALG_AAI_HMAC_PRECOMP.
380 * @ivsize: initialization vector size
381 * @icvsize: integrity check value (ICV) size (truncated or full)
382 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
383 * @nonce: pointer to rfc3686 nonce
384 * @ctx1_iv_off: IV offset in CONTEXT1 register
385 * @is_qi: true when called from caam/qi
386 * @era: SEC Era
387 */
388void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
389 struct alginfo *adata, unsigned int ivsize,
390 unsigned int icvsize, const bool geniv,
391 const bool is_rfc3686, u32 *nonce,
392 const u32 ctx1_iv_off, const bool is_qi, int era)
393{
394 /* Note: Context registers are saved. */
395 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
396
397 /* Class 2 operation */
398 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
399 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
400
401 if (is_qi) {
402 u32 *wait_load_cmd;
403
404 /* REG3 = assoclen */
405 append_seq_load(desc, 4, LDST_CLASS_DECO |
406 LDST_SRCDST_WORD_DECO_MATH3 |
407 (4 << LDST_OFFSET_SHIFT));
408
409 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
410 JUMP_COND_CALM | JUMP_COND_NCP |
411 JUMP_COND_NOP | JUMP_COND_NIP |
412 JUMP_COND_NIFP);
413 set_jump_tgt_here(desc, wait_load_cmd);
414
415 if (!geniv)
416 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
417 LDST_SRCDST_BYTE_CONTEXT |
418 (ctx1_iv_off << LDST_OFFSET_SHIFT));
419 }
420
421 /* Read and write assoclen bytes */
422 if (is_qi || era < 3) {
423 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
424 if (geniv)
425 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
426 ivsize);
427 else
428 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
429 CAAM_CMD_SZ);
430 } else {
431 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
432 if (geniv)
433 append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
434 ivsize);
435 else
436 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
437 CAAM_CMD_SZ);
438 }
439
440 /* Skip assoc data */
441 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
442
443 /* read assoc before reading payload */
444 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
445 KEY_VLF);
446
447 if (geniv) {
448 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
449 LDST_SRCDST_BYTE_CONTEXT |
450 (ctx1_iv_off << LDST_OFFSET_SHIFT));
451 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
452 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
453 }
454
455 /* Load Counter into CONTEXT1 reg */
456 if (is_rfc3686)
457 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
458 LDST_SRCDST_BYTE_CONTEXT |
459 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
460 LDST_OFFSET_SHIFT));
461
462 /* Choose operation */
463 if (ctx1_iv_off)
464 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
465 OP_ALG_DECRYPT);
466 else
467 append_dec_op1(desc, cdata->algtype);
468
469 /* Read and write cryptlen bytes */
470 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
471 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
472 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
473
474 /* Load ICV */
475 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
476 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
477
478#ifdef DEBUG
479 print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
480 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
481#endif
482}
483EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
484
485/**
486 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
487 * (non-protocol) with HW-generated initialization
488 * vector.
489 * @desc: pointer to buffer used for descriptor construction
490 * @cdata: pointer to block cipher transform definitions
491 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
492 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
493 * @adata: pointer to authentication transform definitions.
494 * A split key is required for SEC Era < 6; the size of the split key
495 * is specified in this case. Valid algorithm values - one of
496 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
497 * with OP_ALG_AAI_HMAC_PRECOMP.
498 * @ivsize: initialization vector size
499 * @icvsize: integrity check value (ICV) size (truncated or full)
500 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
501 * @nonce: pointer to rfc3686 nonce
502 * @ctx1_iv_off: IV offset in CONTEXT1 register
503 * @is_qi: true when called from caam/qi
504 * @era: SEC Era
505 */
506void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
507 struct alginfo *adata, unsigned int ivsize,
508 unsigned int icvsize, const bool is_rfc3686,
509 u32 *nonce, const u32 ctx1_iv_off,
510 const bool is_qi, int era)
511{
512 u32 geniv, moveiv;
513
514 /* Note: Context registers are saved. */
515 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
516
517 if (is_qi) {
518 u32 *wait_load_cmd;
519
520 /* REG3 = assoclen */
521 append_seq_load(desc, 4, LDST_CLASS_DECO |
522 LDST_SRCDST_WORD_DECO_MATH3 |
523 (4 << LDST_OFFSET_SHIFT));
524
525 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
526 JUMP_COND_CALM | JUMP_COND_NCP |
527 JUMP_COND_NOP | JUMP_COND_NIP |
528 JUMP_COND_NIFP);
529 set_jump_tgt_here(desc, wait_load_cmd);
530 }
531
532 if (is_rfc3686) {
533 if (is_qi)
534 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
535 LDST_SRCDST_BYTE_CONTEXT |
536 (ctx1_iv_off << LDST_OFFSET_SHIFT));
537
538 goto copy_iv;
539 }
540
541 /* Generate IV */
542 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
543 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
544 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
545 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
546 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
547 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
548 append_move(desc, MOVE_WAITCOMP |
549 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
550 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
551 (ivsize << MOVE_LEN_SHIFT));
552 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
553
554copy_iv:
555 /* Copy IV to class 1 context */
556 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
557 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
558 (ivsize << MOVE_LEN_SHIFT));
559
560 /* Return to encryption */
561 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
562 OP_ALG_ENCRYPT);
563
564 /* Read and write assoclen bytes */
565 if (is_qi || era < 3) {
566 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
567 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
568 } else {
569 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
570 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
571 }
572
573 /* Skip assoc data */
574 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
575
576 /* read assoc before reading payload */
577 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
578 KEY_VLF);
579
580 /* Copy iv from outfifo to class 2 fifo */
581 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
582 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
583 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
584 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
585 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
586 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
587
588 /* Load Counter into CONTEXT1 reg */
589 if (is_rfc3686)
590 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
591 LDST_SRCDST_BYTE_CONTEXT |
592 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
593 LDST_OFFSET_SHIFT));
594
595 /* Class 1 operation */
596 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
597 OP_ALG_ENCRYPT);
598
599 /* Will write ivsize + cryptlen */
600 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
601
602 /* Not need to reload iv */
603 append_seq_fifo_load(desc, ivsize,
604 FIFOLD_CLASS_SKIP);
605
606 /* Will read cryptlen */
607 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
608 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
609 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
610 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
611
612 /* Write ICV */
613 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
614 LDST_SRCDST_BYTE_CONTEXT);
615
616#ifdef DEBUG
617 print_hex_dump(KERN_ERR,
618 "aead givenc shdesc@" __stringify(__LINE__)": ",
619 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
620#endif
621}
622EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
623
624/**
625 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
626 * @desc: pointer to buffer used for descriptor construction
627 * @cdata: pointer to block cipher transform definitions
628 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
629 * @ivsize: initialization vector size
630 * @icvsize: integrity check value (ICV) size (truncated or full)
631 * @is_qi: true when called from caam/qi
632 */
633void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
634 unsigned int ivsize, unsigned int icvsize,
635 const bool is_qi)
636{
637 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
638 *zero_assoc_jump_cmd2;
639
640 init_sh_desc(desc, HDR_SHARE_SERIAL);
641
642 /* skip key loading if they are loaded due to sharing */
643 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
644 JUMP_COND_SHRD);
645 if (cdata->key_inline)
646 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
647 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
648 else
649 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
650 KEY_DEST_CLASS_REG);
651 set_jump_tgt_here(desc, key_jump_cmd);
652
653 /* class 1 operation */
654 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
655 OP_ALG_ENCRYPT);
656
657 if (is_qi) {
658 u32 *wait_load_cmd;
659
660 /* REG3 = assoclen */
661 append_seq_load(desc, 4, LDST_CLASS_DECO |
662 LDST_SRCDST_WORD_DECO_MATH3 |
663 (4 << LDST_OFFSET_SHIFT));
664
665 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
666 JUMP_COND_CALM | JUMP_COND_NCP |
667 JUMP_COND_NOP | JUMP_COND_NIP |
668 JUMP_COND_NIFP);
669 set_jump_tgt_here(desc, wait_load_cmd);
670
671 append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
672 ivsize);
673 } else {
674 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
675 CAAM_CMD_SZ);
676 }
677
678 /* if assoclen + cryptlen is ZERO, skip to ICV write */
679 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
680 JUMP_COND_MATH_Z);
681
682 if (is_qi)
683 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
684 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
685
686 /* if assoclen is ZERO, skip reading the assoc data */
687 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
688 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
689 JUMP_COND_MATH_Z);
690
691 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
692
693 /* skip assoc data */
694 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
695
696 /* cryptlen = seqinlen - assoclen */
697 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
698
699 /* if cryptlen is ZERO jump to zero-payload commands */
700 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
701 JUMP_COND_MATH_Z);
702
703 /* read assoc data */
704 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
705 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
706 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
707
708 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
709
710 /* write encrypted data */
711 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
712
713 /* read payload data */
714 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
715 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
716
717 /* jump to ICV writing */
718 if (is_qi)
719 append_jump(desc, JUMP_TEST_ALL | 4);
720 else
721 append_jump(desc, JUMP_TEST_ALL | 2);
722
723 /* zero-payload commands */
724 set_jump_tgt_here(desc, zero_payload_jump_cmd);
725
726 /* read assoc data */
727 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
728 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
729 if (is_qi)
730 /* jump to ICV writing */
731 append_jump(desc, JUMP_TEST_ALL | 2);
732
733 /* There is no input data */
734 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
735
736 if (is_qi)
737 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
738 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
739 FIFOLD_TYPE_LAST1);
740
741 /* write ICV */
742 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
743 LDST_SRCDST_BYTE_CONTEXT);
744
745#ifdef DEBUG
746 print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
747 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
748#endif
749}
750EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
751
752/**
753 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
754 * @desc: pointer to buffer used for descriptor construction
755 * @cdata: pointer to block cipher transform definitions
756 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
757 * @ivsize: initialization vector size
758 * @icvsize: integrity check value (ICV) size (truncated or full)
759 * @is_qi: true when called from caam/qi
760 */
761void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
762 unsigned int ivsize, unsigned int icvsize,
763 const bool is_qi)
764{
765 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
766
767 init_sh_desc(desc, HDR_SHARE_SERIAL);
768
769 /* skip key loading if they are loaded due to sharing */
770 key_jump_cmd = append_jump(desc, JUMP_JSL |
771 JUMP_TEST_ALL | JUMP_COND_SHRD);
772 if (cdata->key_inline)
773 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
774 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
775 else
776 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
777 KEY_DEST_CLASS_REG);
778 set_jump_tgt_here(desc, key_jump_cmd);
779
780 /* class 1 operation */
781 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
782 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
783
784 if (is_qi) {
785 u32 *wait_load_cmd;
786
787 /* REG3 = assoclen */
788 append_seq_load(desc, 4, LDST_CLASS_DECO |
789 LDST_SRCDST_WORD_DECO_MATH3 |
790 (4 << LDST_OFFSET_SHIFT));
791
792 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
793 JUMP_COND_CALM | JUMP_COND_NCP |
794 JUMP_COND_NOP | JUMP_COND_NIP |
795 JUMP_COND_NIFP);
796 set_jump_tgt_here(desc, wait_load_cmd);
797
798 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
799 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
800 }
801
802 /* if assoclen is ZERO, skip reading the assoc data */
803 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
804 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
805 JUMP_COND_MATH_Z);
806
807 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
808
809 /* skip assoc data */
810 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
811
812 /* read assoc data */
813 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
814 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
815
816 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
817
818 /* cryptlen = seqoutlen - assoclen */
819 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
820
821 /* jump to zero-payload command if cryptlen is zero */
822 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
823 JUMP_COND_MATH_Z);
824
825 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
826
827 /* store encrypted data */
828 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
829
830 /* read payload data */
831 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
832 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
833
834 /* zero-payload command */
835 set_jump_tgt_here(desc, zero_payload_jump_cmd);
836
837 /* read ICV */
838 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
839 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
840
841#ifdef DEBUG
842 print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
843 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
844#endif
845}
846EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
847
848/**
849 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
850 * (non-protocol).
851 * @desc: pointer to buffer used for descriptor construction
852 * @cdata: pointer to block cipher transform definitions
853 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
854 * @ivsize: initialization vector size
855 * @icvsize: integrity check value (ICV) size (truncated or full)
856 * @is_qi: true when called from caam/qi
857 */
858void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
859 unsigned int ivsize, unsigned int icvsize,
860 const bool is_qi)
861{
862 u32 *key_jump_cmd;
863
864 init_sh_desc(desc, HDR_SHARE_SERIAL);
865
866 /* Skip key loading if it is loaded due to sharing */
867 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
868 JUMP_COND_SHRD);
869 if (cdata->key_inline)
870 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
871 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
872 else
873 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
874 KEY_DEST_CLASS_REG);
875 set_jump_tgt_here(desc, key_jump_cmd);
876
877 /* Class 1 operation */
878 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
879 OP_ALG_ENCRYPT);
880
881 if (is_qi) {
882 u32 *wait_load_cmd;
883
884 /* REG3 = assoclen */
885 append_seq_load(desc, 4, LDST_CLASS_DECO |
886 LDST_SRCDST_WORD_DECO_MATH3 |
887 (4 << LDST_OFFSET_SHIFT));
888
889 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
890 JUMP_COND_CALM | JUMP_COND_NCP |
891 JUMP_COND_NOP | JUMP_COND_NIP |
892 JUMP_COND_NIFP);
893 set_jump_tgt_here(desc, wait_load_cmd);
894
895 /* Read salt and IV */
896 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
897 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
898 FIFOLD_TYPE_IV);
899 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
900 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
901 }
902
903 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
904 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
905
906 /* Read assoc data */
907 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
908 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
909
910 /* Skip IV */
911 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
912
913 /* Will read cryptlen bytes */
914 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
915
916 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
917 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
918
919 /* Skip assoc data */
920 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
921
922 /* cryptlen = seqoutlen - assoclen */
923 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
924
925 /* Write encrypted data */
926 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
927
928 /* Read payload data */
929 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
930 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
931
932 /* Write ICV */
933 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
934 LDST_SRCDST_BYTE_CONTEXT);
935
936#ifdef DEBUG
937 print_hex_dump(KERN_ERR,
938 "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
939 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
940#endif
941}
942EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
943
944/**
945 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
946 * (non-protocol).
947 * @desc: pointer to buffer used for descriptor construction
948 * @cdata: pointer to block cipher transform definitions
949 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
950 * @ivsize: initialization vector size
951 * @icvsize: integrity check value (ICV) size (truncated or full)
952 * @is_qi: true when called from caam/qi
953 */
954void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
955 unsigned int ivsize, unsigned int icvsize,
956 const bool is_qi)
957{
958 u32 *key_jump_cmd;
959
960 init_sh_desc(desc, HDR_SHARE_SERIAL);
961
962 /* Skip key loading if it is loaded due to sharing */
963 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
964 JUMP_COND_SHRD);
965 if (cdata->key_inline)
966 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
967 cdata->keylen, CLASS_1 |
968 KEY_DEST_CLASS_REG);
969 else
970 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
971 KEY_DEST_CLASS_REG);
972 set_jump_tgt_here(desc, key_jump_cmd);
973
974 /* Class 1 operation */
975 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
976 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
977
978 if (is_qi) {
979 u32 *wait_load_cmd;
980
981 /* REG3 = assoclen */
982 append_seq_load(desc, 4, LDST_CLASS_DECO |
983 LDST_SRCDST_WORD_DECO_MATH3 |
984 (4 << LDST_OFFSET_SHIFT));
985
986 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
987 JUMP_COND_CALM | JUMP_COND_NCP |
988 JUMP_COND_NOP | JUMP_COND_NIP |
989 JUMP_COND_NIFP);
990 set_jump_tgt_here(desc, wait_load_cmd);
991
992 /* Read salt and IV */
993 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
994 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
995 FIFOLD_TYPE_IV);
996 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
997 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
998 }
999
1000 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1001 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1002
1003 /* Read assoc data */
1004 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1005 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1006
1007 /* Skip IV */
1008 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1009
1010 /* Will read cryptlen bytes */
1011 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1012
1013 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1014 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1015
1016 /* Skip assoc data */
1017 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1018
1019 /* Will write cryptlen bytes */
1020 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1021
1022 /* Store payload data */
1023 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1024
1025 /* Read encrypted data */
1026 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1027 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1028
1029 /* Read ICV */
1030 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1031 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1032
1033#ifdef DEBUG
1034 print_hex_dump(KERN_ERR,
1035 "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1036 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1037#endif
1038}
1039EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1040
1041/**
1042 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1043 * (non-protocol).
1044 * @desc: pointer to buffer used for descriptor construction
1045 * @cdata: pointer to block cipher transform definitions
1046 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1047 * @ivsize: initialization vector size
1048 * @icvsize: integrity check value (ICV) size (truncated or full)
1049 * @is_qi: true when called from caam/qi
1050 */
1051void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1052 unsigned int ivsize, unsigned int icvsize,
1053 const bool is_qi)
1054{
1055 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1056
1057 init_sh_desc(desc, HDR_SHARE_SERIAL);
1058
1059 /* Skip key loading if it is loaded due to sharing */
1060 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1061 JUMP_COND_SHRD);
1062 if (cdata->key_inline)
1063 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1064 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1065 else
1066 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1067 KEY_DEST_CLASS_REG);
1068 set_jump_tgt_here(desc, key_jump_cmd);
1069
1070 /* Class 1 operation */
1071 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1072 OP_ALG_ENCRYPT);
1073
1074 if (is_qi) {
1075 /* assoclen is not needed, skip it */
1076 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1077
1078 /* Read salt and IV */
1079 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1080 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1081 FIFOLD_TYPE_IV);
1082 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1083 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1084 }
1085
1086 /* assoclen + cryptlen = seqinlen */
1087 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1088
1089 /*
1090 * MOVE_LEN opcode is not available in all SEC HW revisions,
1091 * thus need to do some magic, i.e. self-patch the descriptor
1092 * buffer.
1093 */
1094 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1095 (0x6 << MOVE_LEN_SHIFT));
1096 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1097 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1098
1099 /* Will read assoclen + cryptlen bytes */
1100 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1101
1102 /* Will write assoclen + cryptlen bytes */
1103 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1104
1105 /* Read and write assoclen + cryptlen bytes */
1106 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1107
1108 set_move_tgt_here(desc, read_move_cmd);
1109 set_move_tgt_here(desc, write_move_cmd);
1110 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1111 /* Move payload data to OFIFO */
1112 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1113
1114 /* Write ICV */
1115 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1116 LDST_SRCDST_BYTE_CONTEXT);
1117
1118#ifdef DEBUG
1119 print_hex_dump(KERN_ERR,
1120 "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1121 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1122#endif
1123}
1124EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1125
1126/**
1127 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1128 * (non-protocol).
1129 * @desc: pointer to buffer used for descriptor construction
1130 * @cdata: pointer to block cipher transform definitions
1131 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1132 * @ivsize: initialization vector size
1133 * @icvsize: integrity check value (ICV) size (truncated or full)
1134 * @is_qi: true when called from caam/qi
1135 */
1136void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1137 unsigned int ivsize, unsigned int icvsize,
1138 const bool is_qi)
1139{
1140 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1141
1142 init_sh_desc(desc, HDR_SHARE_SERIAL);
1143
1144 /* Skip key loading if it is loaded due to sharing */
1145 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1146 JUMP_COND_SHRD);
1147 if (cdata->key_inline)
1148 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1149 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1150 else
1151 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1152 KEY_DEST_CLASS_REG);
1153 set_jump_tgt_here(desc, key_jump_cmd);
1154
1155 /* Class 1 operation */
1156 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1157 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1158
1159 if (is_qi) {
1160 /* assoclen is not needed, skip it */
1161 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1162
1163 /* Read salt and IV */
1164 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1165 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1166 FIFOLD_TYPE_IV);
1167 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1168 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1169 }
1170
1171 /* assoclen + cryptlen = seqoutlen */
1172 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1173
1174 /*
1175 * MOVE_LEN opcode is not available in all SEC HW revisions,
1176 * thus need to do some magic, i.e. self-patch the descriptor
1177 * buffer.
1178 */
1179 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1180 (0x6 << MOVE_LEN_SHIFT));
1181 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1182 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1183
1184 /* Will read assoclen + cryptlen bytes */
1185 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1186
1187 /* Will write assoclen + cryptlen bytes */
1188 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1189
1190 /* Store payload data */
1191 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1192
1193 /* In-snoop assoclen + cryptlen data */
1194 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1195 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1196
1197 set_move_tgt_here(desc, read_move_cmd);
1198 set_move_tgt_here(desc, write_move_cmd);
1199 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1200 /* Move payload data to OFIFO */
1201 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1202 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1203
1204 /* Read ICV */
1205 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1206 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1207
1208#ifdef DEBUG
1209 print_hex_dump(KERN_ERR,
1210 "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1211 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1212#endif
1213}
1214EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1215
1216/**
1217 * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
1218 * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
1219 * descriptor (non-protocol).
1220 * @desc: pointer to buffer used for descriptor construction
1221 * @cdata: pointer to block cipher transform definitions
1222 * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
1223 * OP_ALG_AAI_AEAD.
1224 * @adata: pointer to authentication transform definitions
1225 * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
1226 * OP_ALG_AAI_AEAD.
1227 * @ivsize: initialization vector size
1228 * @icvsize: integrity check value (ICV) size (truncated or full)
1229 * @encap: true if encapsulation, false if decapsulation
1230 * @is_qi: true when called from caam/qi
1231 */
1232void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
1233 struct alginfo *adata, unsigned int ivsize,
1234 unsigned int icvsize, const bool encap,
1235 const bool is_qi)
1236{
1237 u32 *key_jump_cmd, *wait_cmd;
1238 u32 nfifo;
1239 const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
1240
1241 /* Note: Context registers are saved. */
1242 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1243
1244 /* skip key loading if they are loaded due to sharing */
1245 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1246 JUMP_COND_SHRD);
1247
1248 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
1249 CLASS_1 | KEY_DEST_CLASS_REG);
1250
1251 /* For IPsec load the salt from keymat in the context register */
1252 if (is_ipsec)
1253 append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
1254 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
1255 4 << LDST_OFFSET_SHIFT);
1256
1257 set_jump_tgt_here(desc, key_jump_cmd);
1258
1259 /* Class 2 and 1 operations: Poly & ChaCha */
1260 if (encap) {
1261 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1262 OP_ALG_ENCRYPT);
1263 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1264 OP_ALG_ENCRYPT);
1265 } else {
1266 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1267 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1268 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1269 OP_ALG_DECRYPT);
1270 }
1271
1272 if (is_qi) {
1273 u32 *wait_load_cmd;
1274 u32 ctx1_iv_off = is_ipsec ? 8 : 4;
1275
1276 /* REG3 = assoclen */
1277 append_seq_load(desc, 4, LDST_CLASS_DECO |
1278 LDST_SRCDST_WORD_DECO_MATH3 |
1279 4 << LDST_OFFSET_SHIFT);
1280
1281 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1282 JUMP_COND_CALM | JUMP_COND_NCP |
1283 JUMP_COND_NOP | JUMP_COND_NIP |
1284 JUMP_COND_NIFP);
1285 set_jump_tgt_here(desc, wait_load_cmd);
1286
1287 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
1288 LDST_SRCDST_BYTE_CONTEXT |
1289 ctx1_iv_off << LDST_OFFSET_SHIFT);
1290 }
1291
1292 /*
1293 * MAGIC with NFIFO
1294 * Read associated data from the input and send them to class1 and
1295 * class2 alignment blocks. From class1 send data to output fifo and
1296 * then write it to memory since we don't need to encrypt AD.
1297 */
1298 nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
1299 NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
1300 append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
1301 LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
1302
1303 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
1304 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1305 append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
1306 FIFOLD_CLASS_CLASS1 | LDST_VLF);
1307 append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
1308 MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
1309 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
1310
1311 /* IPsec - copy IV at the output */
1312 if (is_ipsec)
1313 append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
1314 0x2 << 25);
1315
1316 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
1317 JUMP_COND_NOP | JUMP_TEST_ALL);
1318 set_jump_tgt_here(desc, wait_cmd);
1319
1320 if (encap) {
1321 /* Read and write cryptlen bytes */
1322 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1323 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
1324 CAAM_CMD_SZ);
1325 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
1326
1327 /* Write ICV */
1328 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
1329 LDST_SRCDST_BYTE_CONTEXT);
1330 } else {
1331 /* Read and write cryptlen bytes */
1332 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
1333 CAAM_CMD_SZ);
1334 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
1335 CAAM_CMD_SZ);
1336 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
1337
1338 /* Load ICV for verification */
1339 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
1340 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
1341 }
1342
1343 print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
1344 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1345 1);
1346}
1347EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
1348
1349/* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
1350static inline void skcipher_append_src_dst(u32 *desc)
1351{
1352 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1353 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1354 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1355 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1356 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1357}
1358
1359/**
1360 * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
1361 * @desc: pointer to buffer used for descriptor construction
1362 * @cdata: pointer to block cipher transform definitions
1363 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1364 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1365 * - OP_ALG_ALGSEL_CHACHA20
1366 * @ivsize: initialization vector size
1367 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1368 * @ctx1_iv_off: IV offset in CONTEXT1 register
1369 */
1370void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
1371 unsigned int ivsize, const bool is_rfc3686,
1372 const u32 ctx1_iv_off)
1373{
1374 u32 *key_jump_cmd;
1375
1376 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1377 /* Skip if already shared */
1378 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1379 JUMP_COND_SHRD);
1380
1381 /* Load class1 key only */
1382 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1383 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1384
1385 /* Load nonce into CONTEXT1 reg */
1386 if (is_rfc3686) {
1387 const u8 *nonce = cdata->key_virt + cdata->keylen;
1388
1389 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1390 LDST_CLASS_IND_CCB |
1391 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1392 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1393 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1394 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1395 }
1396
1397 set_jump_tgt_here(desc, key_jump_cmd);
1398
1399 /* Load IV, if there is one */
1400 if (ivsize)
1401 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1402 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1403 LDST_OFFSET_SHIFT));
1404
1405 /* Load counter into CONTEXT1 reg */
1406 if (is_rfc3686)
1407 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1408 LDST_SRCDST_BYTE_CONTEXT |
1409 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1410 LDST_OFFSET_SHIFT));
1411
1412 /* Load operation */
1413 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1414 OP_ALG_ENCRYPT);
1415
1416 /* Perform operation */
1417 skcipher_append_src_dst(desc);
1418
1419#ifdef DEBUG
1420 print_hex_dump(KERN_ERR,
1421 "skcipher enc shdesc@" __stringify(__LINE__)": ",
1422 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1423#endif
1424}
1425EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
1426
1427/**
1428 * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
1429 * @desc: pointer to buffer used for descriptor construction
1430 * @cdata: pointer to block cipher transform definitions
1431 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1432 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1433 * - OP_ALG_ALGSEL_CHACHA20
1434 * @ivsize: initialization vector size
1435 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1436 * @ctx1_iv_off: IV offset in CONTEXT1 register
1437 */
1438void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
1439 unsigned int ivsize, const bool is_rfc3686,
1440 const u32 ctx1_iv_off)
1441{
1442 u32 *key_jump_cmd;
1443
1444 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1445 /* Skip if already shared */
1446 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1447 JUMP_COND_SHRD);
1448
1449 /* Load class1 key only */
1450 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1451 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1452
1453 /* Load nonce into CONTEXT1 reg */
1454 if (is_rfc3686) {
1455 const u8 *nonce = cdata->key_virt + cdata->keylen;
1456
1457 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1458 LDST_CLASS_IND_CCB |
1459 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1460 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1461 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1462 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1463 }
1464
1465 set_jump_tgt_here(desc, key_jump_cmd);
1466
1467 /* Load IV, if there is one */
1468 if (ivsize)
1469 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1470 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1471 LDST_OFFSET_SHIFT));
1472
1473 /* Load counter into CONTEXT1 reg */
1474 if (is_rfc3686)
1475 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1476 LDST_SRCDST_BYTE_CONTEXT |
1477 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1478 LDST_OFFSET_SHIFT));
1479
1480 /* Choose operation */
1481 if (ctx1_iv_off)
1482 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1483 OP_ALG_DECRYPT);
1484 else
1485 append_dec_op1(desc, cdata->algtype);
1486
1487 /* Perform operation */
1488 skcipher_append_src_dst(desc);
1489
1490#ifdef DEBUG
1491 print_hex_dump(KERN_ERR,
1492 "skcipher dec shdesc@" __stringify(__LINE__)": ",
1493 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1494#endif
1495}
1496EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
1497
1498/**
1499 * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
1500 * @desc: pointer to buffer used for descriptor construction
1501 * @cdata: pointer to block cipher transform definitions
1502 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1503 */
1504void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
1505{
1506 __be64 sector_size = cpu_to_be64(512);
1507 u32 *key_jump_cmd;
1508
1509 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1510 /* Skip if already shared */
1511 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1512 JUMP_COND_SHRD);
1513
1514 /* Load class1 keys only */
1515 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1516 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1517
1518 /* Load sector size with index 40 bytes (0x28) */
1519 append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1520 LDST_SRCDST_BYTE_CONTEXT |
1521 (0x28 << LDST_OFFSET_SHIFT));
1522
1523 set_jump_tgt_here(desc, key_jump_cmd);
1524
1525 /*
1526 * create sequence for loading the sector index
1527 * Upper 8B of IV - will be used as sector index
1528 * Lower 8B of IV - will be discarded
1529 */
1530 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1531 (0x20 << LDST_OFFSET_SHIFT));
1532 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1533
1534 /* Load operation */
1535 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1536 OP_ALG_ENCRYPT);
1537
1538 /* Perform operation */
1539 skcipher_append_src_dst(desc);
1540
1541#ifdef DEBUG
1542 print_hex_dump(KERN_ERR,
1543 "xts skcipher enc shdesc@" __stringify(__LINE__) ": ",
1544 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1545#endif
1546}
1547EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
1548
1549/**
1550 * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
1551 * @desc: pointer to buffer used for descriptor construction
1552 * @cdata: pointer to block cipher transform definitions
1553 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1554 */
1555void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
1556{
1557 __be64 sector_size = cpu_to_be64(512);
1558 u32 *key_jump_cmd;
1559
1560 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1561 /* Skip if already shared */
1562 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1563 JUMP_COND_SHRD);
1564
1565 /* Load class1 key only */
1566 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1567 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1568
1569 /* Load sector size with index 40 bytes (0x28) */
1570 append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1571 LDST_SRCDST_BYTE_CONTEXT |
1572 (0x28 << LDST_OFFSET_SHIFT));
1573
1574 set_jump_tgt_here(desc, key_jump_cmd);
1575
1576 /*
1577 * create sequence for loading the sector index
1578 * Upper 8B of IV - will be used as sector index
1579 * Lower 8B of IV - will be discarded
1580 */
1581 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1582 (0x20 << LDST_OFFSET_SHIFT));
1583 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1584
1585 /* Load operation */
1586 append_dec_op1(desc, cdata->algtype);
1587
1588 /* Perform operation */
1589 skcipher_append_src_dst(desc);
1590
1591#ifdef DEBUG
1592 print_hex_dump(KERN_ERR,
1593 "xts skcipher dec shdesc@" __stringify(__LINE__) ": ",
1594 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1595#endif
1596}
1597EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
1598
1599MODULE_LICENSE("GPL");
1600MODULE_DESCRIPTION("FSL CAAM descriptor support");
1601MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
1602