1 | // SPDX-License-Identifier: GPL-2.0 |
2 | #include <linux/kernel.h> |
3 | #include <linux/printk.h> |
4 | #include <linux/crypto.h> |
5 | #include <linux/rtnetlink.h> |
6 | |
7 | #include <crypto/aead.h> |
8 | #include <crypto/authenc.h> |
9 | #include <crypto/des.h> |
10 | #include <crypto/internal/aead.h> |
11 | #include <crypto/scatterwalk.h> |
12 | #include <crypto/gcm.h> |
13 | |
14 | #include "nitrox_dev.h" |
15 | #include "nitrox_common.h" |
16 | #include "nitrox_req.h" |
17 | |
18 | #define GCM_AES_SALT_SIZE 4 |
19 | |
20 | union gph_p3 { |
21 | struct { |
22 | #ifdef __BIG_ENDIAN_BITFIELD |
23 | u16 iv_offset : 8; |
24 | u16 auth_offset : 8; |
25 | #else |
26 | u16 auth_offset : 8; |
27 | u16 iv_offset : 8; |
28 | #endif |
29 | }; |
30 | u16 param; |
31 | }; |
32 | |
33 | static int nitrox_aes_gcm_setkey(struct crypto_aead *aead, const u8 *key, |
34 | unsigned int keylen) |
35 | { |
36 | int aes_keylen; |
37 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
38 | struct flexi_crypto_context *fctx; |
39 | union fc_ctx_flags flags; |
40 | |
41 | aes_keylen = flexi_aes_keylen(keylen); |
42 | if (aes_keylen < 0) |
43 | return -EINVAL; |
44 | |
45 | /* fill crypto context */ |
46 | fctx = nctx->u.fctx; |
47 | flags.fu = be64_to_cpu(fctx->flags.f); |
48 | flags.w0.aes_keylen = aes_keylen; |
49 | fctx->flags.f = cpu_to_be64(flags.fu); |
50 | |
51 | /* copy enc key to context */ |
52 | memset(&fctx->crypto, 0, sizeof(fctx->crypto)); |
53 | memcpy(fctx->crypto.u.key, key, keylen); |
54 | |
55 | return 0; |
56 | } |
57 | |
58 | static int nitrox_aead_setauthsize(struct crypto_aead *aead, |
59 | unsigned int authsize) |
60 | { |
61 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
62 | struct flexi_crypto_context *fctx = nctx->u.fctx; |
63 | union fc_ctx_flags flags; |
64 | |
65 | flags.fu = be64_to_cpu(fctx->flags.f); |
66 | flags.w0.mac_len = authsize; |
67 | fctx->flags.f = cpu_to_be64(flags.fu); |
68 | |
69 | aead->authsize = authsize; |
70 | |
71 | return 0; |
72 | } |
73 | |
74 | static int nitrox_aes_gcm_setauthsize(struct crypto_aead *aead, |
75 | unsigned int authsize) |
76 | { |
77 | switch (authsize) { |
78 | case 4: |
79 | case 8: |
80 | case 12: |
81 | case 13: |
82 | case 14: |
83 | case 15: |
84 | case 16: |
85 | break; |
86 | default: |
87 | return -EINVAL; |
88 | } |
89 | |
90 | return nitrox_aead_setauthsize(aead, authsize); |
91 | } |
92 | |
93 | static int alloc_src_sglist(struct nitrox_kcrypt_request *nkreq, |
94 | struct scatterlist *src, char *iv, int ivsize, |
95 | int buflen) |
96 | { |
97 | int nents = sg_nents_for_len(sg: src, len: buflen); |
98 | int ret; |
99 | |
100 | if (nents < 0) |
101 | return nents; |
102 | |
103 | /* IV entry */ |
104 | nents += 1; |
105 | /* Allocate buffer to hold IV and input scatterlist array */ |
106 | ret = alloc_src_req_buf(nkreq, nents, ivsize); |
107 | if (ret) |
108 | return ret; |
109 | |
110 | nitrox_creq_copy_iv(dst: nkreq->src, src: iv, size: ivsize); |
111 | nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); |
112 | |
113 | return 0; |
114 | } |
115 | |
116 | static int alloc_dst_sglist(struct nitrox_kcrypt_request *nkreq, |
117 | struct scatterlist *dst, int ivsize, int buflen) |
118 | { |
119 | int nents = sg_nents_for_len(sg: dst, len: buflen); |
120 | int ret; |
121 | |
122 | if (nents < 0) |
123 | return nents; |
124 | |
125 | /* IV, ORH, COMPLETION entries */ |
126 | nents += 3; |
127 | /* Allocate buffer to hold ORH, COMPLETION and output scatterlist |
128 | * array |
129 | */ |
130 | ret = alloc_dst_req_buf(nkreq, nents); |
131 | if (ret) |
132 | return ret; |
133 | |
134 | nitrox_creq_set_orh(nkreq); |
135 | nitrox_creq_set_comp(nkreq); |
136 | nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen); |
137 | |
138 | return 0; |
139 | } |
140 | |
141 | static void free_src_sglist(struct nitrox_kcrypt_request *nkreq) |
142 | { |
143 | kfree(objp: nkreq->src); |
144 | } |
145 | |
146 | static void free_dst_sglist(struct nitrox_kcrypt_request *nkreq) |
147 | { |
148 | kfree(objp: nkreq->dst); |
149 | } |
150 | |
151 | static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) |
152 | { |
153 | struct se_crypto_request *creq = &rctx->nkreq.creq; |
154 | union gph_p3 param3; |
155 | int ret; |
156 | |
157 | creq->flags = rctx->flags; |
158 | creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : |
159 | GFP_ATOMIC; |
160 | |
161 | creq->ctrl.value = 0; |
162 | creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; |
163 | creq->ctrl.s.arg = rctx->ctrl_arg; |
164 | |
165 | creq->gph.param0 = cpu_to_be16(rctx->cryptlen); |
166 | creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); |
167 | creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); |
168 | param3.iv_offset = 0; |
169 | param3.auth_offset = rctx->ivsize; |
170 | creq->gph.param3 = cpu_to_be16(param3.param); |
171 | |
172 | creq->ctx_handle = rctx->ctx_handle; |
173 | creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); |
174 | |
175 | ret = alloc_src_sglist(nkreq: &rctx->nkreq, src: rctx->src, iv: rctx->iv, ivsize: rctx->ivsize, |
176 | buflen: rctx->srclen); |
177 | if (ret) |
178 | return ret; |
179 | |
180 | ret = alloc_dst_sglist(nkreq: &rctx->nkreq, dst: rctx->dst, ivsize: rctx->ivsize, |
181 | buflen: rctx->dstlen); |
182 | if (ret) { |
183 | free_src_sglist(nkreq: &rctx->nkreq); |
184 | return ret; |
185 | } |
186 | |
187 | return 0; |
188 | } |
189 | |
190 | static void nitrox_aead_callback(void *arg, int err) |
191 | { |
192 | struct aead_request *areq = arg; |
193 | struct nitrox_aead_rctx *rctx = aead_request_ctx(req: areq); |
194 | |
195 | free_src_sglist(nkreq: &rctx->nkreq); |
196 | free_dst_sglist(nkreq: &rctx->nkreq); |
197 | if (err) { |
198 | pr_err_ratelimited("request failed status 0x%0x\n" , err); |
199 | err = -EINVAL; |
200 | } |
201 | |
202 | aead_request_complete(req: areq, err); |
203 | } |
204 | |
205 | static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen) |
206 | { |
207 | if (assoclen <= 512) |
208 | return true; |
209 | |
210 | return false; |
211 | } |
212 | |
213 | static int nitrox_aes_gcm_enc(struct aead_request *areq) |
214 | { |
215 | struct crypto_aead *aead = crypto_aead_reqtfm(req: areq); |
216 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
217 | struct nitrox_aead_rctx *rctx = aead_request_ctx(req: areq); |
218 | struct se_crypto_request *creq = &rctx->nkreq.creq; |
219 | struct flexi_crypto_context *fctx = nctx->u.fctx; |
220 | int ret; |
221 | |
222 | if (!nitrox_aes_gcm_assoclen_supported(assoclen: areq->assoclen)) |
223 | return -EINVAL; |
224 | |
225 | memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); |
226 | |
227 | rctx->cryptlen = areq->cryptlen; |
228 | rctx->assoclen = areq->assoclen; |
229 | rctx->srclen = areq->assoclen + areq->cryptlen; |
230 | rctx->dstlen = rctx->srclen + aead->authsize; |
231 | rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; |
232 | rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE; |
233 | rctx->flags = areq->base.flags; |
234 | rctx->ctx_handle = nctx->u.ctx_handle; |
235 | rctx->src = areq->src; |
236 | rctx->dst = areq->dst; |
237 | rctx->ctrl_arg = ENCRYPT; |
238 | ret = nitrox_set_creq(rctx); |
239 | if (ret) |
240 | return ret; |
241 | |
242 | /* send the crypto request */ |
243 | return nitrox_process_se_request(ndev: nctx->ndev, req: creq, cb: nitrox_aead_callback, |
244 | cb_arg: areq); |
245 | } |
246 | |
247 | static int nitrox_aes_gcm_dec(struct aead_request *areq) |
248 | { |
249 | struct crypto_aead *aead = crypto_aead_reqtfm(req: areq); |
250 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
251 | struct nitrox_aead_rctx *rctx = aead_request_ctx(req: areq); |
252 | struct se_crypto_request *creq = &rctx->nkreq.creq; |
253 | struct flexi_crypto_context *fctx = nctx->u.fctx; |
254 | int ret; |
255 | |
256 | if (!nitrox_aes_gcm_assoclen_supported(assoclen: areq->assoclen)) |
257 | return -EINVAL; |
258 | |
259 | memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); |
260 | |
261 | rctx->cryptlen = areq->cryptlen - aead->authsize; |
262 | rctx->assoclen = areq->assoclen; |
263 | rctx->srclen = areq->cryptlen + areq->assoclen; |
264 | rctx->dstlen = rctx->srclen - aead->authsize; |
265 | rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; |
266 | rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE; |
267 | rctx->flags = areq->base.flags; |
268 | rctx->ctx_handle = nctx->u.ctx_handle; |
269 | rctx->src = areq->src; |
270 | rctx->dst = areq->dst; |
271 | rctx->ctrl_arg = DECRYPT; |
272 | ret = nitrox_set_creq(rctx); |
273 | if (ret) |
274 | return ret; |
275 | |
276 | /* send the crypto request */ |
277 | return nitrox_process_se_request(ndev: nctx->ndev, req: creq, cb: nitrox_aead_callback, |
278 | cb_arg: areq); |
279 | } |
280 | |
281 | static int nitrox_aead_init(struct crypto_aead *aead) |
282 | { |
283 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
284 | struct crypto_ctx_hdr *chdr; |
285 | |
286 | /* get the first device */ |
287 | nctx->ndev = nitrox_get_first_device(); |
288 | if (!nctx->ndev) |
289 | return -ENODEV; |
290 | |
291 | /* allocate nitrox crypto context */ |
292 | chdr = crypto_alloc_context(ndev: nctx->ndev); |
293 | if (!chdr) { |
294 | nitrox_put_device(ndev: nctx->ndev); |
295 | return -ENOMEM; |
296 | } |
297 | nctx->chdr = chdr; |
298 | nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr + |
299 | sizeof(struct ctx_hdr)); |
300 | nctx->u.fctx->flags.f = 0; |
301 | |
302 | return 0; |
303 | } |
304 | |
305 | static int nitrox_gcm_common_init(struct crypto_aead *aead) |
306 | { |
307 | int ret; |
308 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
309 | union fc_ctx_flags *flags; |
310 | |
311 | ret = nitrox_aead_init(aead); |
312 | if (ret) |
313 | return ret; |
314 | |
315 | flags = &nctx->u.fctx->flags; |
316 | flags->w0.cipher_type = CIPHER_AES_GCM; |
317 | flags->w0.hash_type = AUTH_NULL; |
318 | flags->w0.iv_source = IV_FROM_DPTR; |
319 | /* ask microcode to calculate ipad/opad */ |
320 | flags->w0.auth_input_type = 1; |
321 | flags->f = cpu_to_be64(flags->fu); |
322 | |
323 | return 0; |
324 | } |
325 | |
326 | static int nitrox_aes_gcm_init(struct crypto_aead *aead) |
327 | { |
328 | int ret; |
329 | |
330 | ret = nitrox_gcm_common_init(aead); |
331 | if (ret) |
332 | return ret; |
333 | |
334 | crypto_aead_set_reqsize(aead, |
335 | reqsize: sizeof(struct aead_request) + |
336 | sizeof(struct nitrox_aead_rctx)); |
337 | |
338 | return 0; |
339 | } |
340 | |
341 | static void nitrox_aead_exit(struct crypto_aead *aead) |
342 | { |
343 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
344 | |
345 | /* free the nitrox crypto context */ |
346 | if (nctx->u.ctx_handle) { |
347 | struct flexi_crypto_context *fctx = nctx->u.fctx; |
348 | |
349 | memzero_explicit(s: &fctx->crypto, count: sizeof(struct crypto_keys)); |
350 | memzero_explicit(s: &fctx->auth, count: sizeof(struct auth_keys)); |
351 | crypto_free_context(ctx: (void *)nctx->chdr); |
352 | } |
353 | nitrox_put_device(ndev: nctx->ndev); |
354 | |
355 | nctx->u.ctx_handle = 0; |
356 | nctx->ndev = NULL; |
357 | } |
358 | |
359 | static int nitrox_rfc4106_setkey(struct crypto_aead *aead, const u8 *key, |
360 | unsigned int keylen) |
361 | { |
362 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
363 | struct flexi_crypto_context *fctx = nctx->u.fctx; |
364 | int ret; |
365 | |
366 | if (keylen < GCM_AES_SALT_SIZE) |
367 | return -EINVAL; |
368 | |
369 | keylen -= GCM_AES_SALT_SIZE; |
370 | ret = nitrox_aes_gcm_setkey(aead, key, keylen); |
371 | if (ret) |
372 | return ret; |
373 | |
374 | memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE); |
375 | return 0; |
376 | } |
377 | |
378 | static int nitrox_rfc4106_setauthsize(struct crypto_aead *aead, |
379 | unsigned int authsize) |
380 | { |
381 | switch (authsize) { |
382 | case 8: |
383 | case 12: |
384 | case 16: |
385 | break; |
386 | default: |
387 | return -EINVAL; |
388 | } |
389 | |
390 | return nitrox_aead_setauthsize(aead, authsize); |
391 | } |
392 | |
393 | static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq) |
394 | { |
395 | struct nitrox_rfc4106_rctx *rctx = aead_request_ctx_dma(req: areq); |
396 | struct nitrox_aead_rctx *aead_rctx = &rctx->base; |
397 | unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; |
398 | struct scatterlist *sg; |
399 | |
400 | if (areq->assoclen != 16 && areq->assoclen != 20) |
401 | return -EINVAL; |
402 | |
403 | scatterwalk_map_and_copy(buf: rctx->assoc, sg: areq->src, start: 0, nbytes: assoclen, out: 0); |
404 | sg_init_table(rctx->src, 3); |
405 | sg_set_buf(sg: rctx->src, buf: rctx->assoc, buflen: assoclen); |
406 | sg = scatterwalk_ffwd(dst: rctx->src + 1, src: areq->src, len: areq->assoclen); |
407 | if (sg != rctx->src + 1) |
408 | sg_chain(prv: rctx->src, prv_nents: 2, sgl: sg); |
409 | |
410 | if (areq->src != areq->dst) { |
411 | sg_init_table(rctx->dst, 3); |
412 | sg_set_buf(sg: rctx->dst, buf: rctx->assoc, buflen: assoclen); |
413 | sg = scatterwalk_ffwd(dst: rctx->dst + 1, src: areq->dst, len: areq->assoclen); |
414 | if (sg != rctx->dst + 1) |
415 | sg_chain(prv: rctx->dst, prv_nents: 2, sgl: sg); |
416 | } |
417 | |
418 | aead_rctx->src = rctx->src; |
419 | aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst; |
420 | |
421 | return 0; |
422 | } |
423 | |
424 | static void nitrox_rfc4106_callback(void *arg, int err) |
425 | { |
426 | struct aead_request *areq = arg; |
427 | struct nitrox_rfc4106_rctx *rctx = aead_request_ctx_dma(req: areq); |
428 | struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq; |
429 | |
430 | free_src_sglist(nkreq); |
431 | free_dst_sglist(nkreq); |
432 | if (err) { |
433 | pr_err_ratelimited("request failed status 0x%0x\n" , err); |
434 | err = -EINVAL; |
435 | } |
436 | |
437 | aead_request_complete(req: areq, err); |
438 | } |
439 | |
440 | static int nitrox_rfc4106_enc(struct aead_request *areq) |
441 | { |
442 | struct crypto_aead *aead = crypto_aead_reqtfm(req: areq); |
443 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(tfm: aead); |
444 | struct nitrox_rfc4106_rctx *rctx = aead_request_ctx_dma(req: areq); |
445 | struct nitrox_aead_rctx *aead_rctx = &rctx->base; |
446 | struct se_crypto_request *creq = &aead_rctx->nkreq.creq; |
447 | int ret; |
448 | |
449 | aead_rctx->cryptlen = areq->cryptlen; |
450 | aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; |
451 | aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; |
452 | aead_rctx->dstlen = aead_rctx->srclen + aead->authsize; |
453 | aead_rctx->iv = areq->iv; |
454 | aead_rctx->ivsize = GCM_RFC4106_IV_SIZE; |
455 | aead_rctx->flags = areq->base.flags; |
456 | aead_rctx->ctx_handle = nctx->u.ctx_handle; |
457 | aead_rctx->ctrl_arg = ENCRYPT; |
458 | |
459 | ret = nitrox_rfc4106_set_aead_rctx_sglist(areq); |
460 | if (ret) |
461 | return ret; |
462 | |
463 | ret = nitrox_set_creq(rctx: aead_rctx); |
464 | if (ret) |
465 | return ret; |
466 | |
467 | /* send the crypto request */ |
468 | return nitrox_process_se_request(ndev: nctx->ndev, req: creq, |
469 | cb: nitrox_rfc4106_callback, cb_arg: areq); |
470 | } |
471 | |
472 | static int nitrox_rfc4106_dec(struct aead_request *areq) |
473 | { |
474 | struct crypto_aead *aead = crypto_aead_reqtfm(req: areq); |
475 | struct nitrox_crypto_ctx *nctx = crypto_aead_ctx_dma(tfm: aead); |
476 | struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(req: areq); |
477 | struct nitrox_aead_rctx *aead_rctx = &rctx->base; |
478 | struct se_crypto_request *creq = &aead_rctx->nkreq.creq; |
479 | int ret; |
480 | |
481 | aead_rctx->cryptlen = areq->cryptlen - aead->authsize; |
482 | aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; |
483 | aead_rctx->srclen = |
484 | areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; |
485 | aead_rctx->dstlen = aead_rctx->srclen - aead->authsize; |
486 | aead_rctx->iv = areq->iv; |
487 | aead_rctx->ivsize = GCM_RFC4106_IV_SIZE; |
488 | aead_rctx->flags = areq->base.flags; |
489 | aead_rctx->ctx_handle = nctx->u.ctx_handle; |
490 | aead_rctx->ctrl_arg = DECRYPT; |
491 | |
492 | ret = nitrox_rfc4106_set_aead_rctx_sglist(areq); |
493 | if (ret) |
494 | return ret; |
495 | |
496 | ret = nitrox_set_creq(rctx: aead_rctx); |
497 | if (ret) |
498 | return ret; |
499 | |
500 | /* send the crypto request */ |
501 | return nitrox_process_se_request(ndev: nctx->ndev, req: creq, |
502 | cb: nitrox_rfc4106_callback, cb_arg: areq); |
503 | } |
504 | |
505 | static int nitrox_rfc4106_init(struct crypto_aead *aead) |
506 | { |
507 | int ret; |
508 | |
509 | ret = nitrox_gcm_common_init(aead); |
510 | if (ret) |
511 | return ret; |
512 | |
513 | crypto_aead_set_reqsize_dma(aead, reqsize: sizeof(struct aead_request) + |
514 | sizeof(struct nitrox_rfc4106_rctx)); |
515 | |
516 | return 0; |
517 | } |
518 | |
519 | static struct aead_alg nitrox_aeads[] = { { |
520 | .base = { |
521 | .cra_name = "gcm(aes)" , |
522 | .cra_driver_name = "n5_aes_gcm" , |
523 | .cra_priority = PRIO, |
524 | .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY, |
525 | .cra_blocksize = 1, |
526 | .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), |
527 | .cra_alignmask = 0, |
528 | .cra_module = THIS_MODULE, |
529 | }, |
530 | .setkey = nitrox_aes_gcm_setkey, |
531 | .setauthsize = nitrox_aes_gcm_setauthsize, |
532 | .encrypt = nitrox_aes_gcm_enc, |
533 | .decrypt = nitrox_aes_gcm_dec, |
534 | .init = nitrox_aes_gcm_init, |
535 | .exit = nitrox_aead_exit, |
536 | .ivsize = GCM_AES_IV_SIZE, |
537 | .maxauthsize = AES_BLOCK_SIZE, |
538 | }, { |
539 | .base = { |
540 | .cra_name = "rfc4106(gcm(aes))" , |
541 | .cra_driver_name = "n5_rfc4106" , |
542 | .cra_priority = PRIO, |
543 | .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY, |
544 | .cra_blocksize = 1, |
545 | .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), |
546 | .cra_alignmask = 0, |
547 | .cra_module = THIS_MODULE, |
548 | }, |
549 | .setkey = nitrox_rfc4106_setkey, |
550 | .setauthsize = nitrox_rfc4106_setauthsize, |
551 | .encrypt = nitrox_rfc4106_enc, |
552 | .decrypt = nitrox_rfc4106_dec, |
553 | .init = nitrox_rfc4106_init, |
554 | .exit = nitrox_aead_exit, |
555 | .ivsize = GCM_RFC4106_IV_SIZE, |
556 | .maxauthsize = AES_BLOCK_SIZE, |
557 | } }; |
558 | |
559 | int nitrox_register_aeads(void) |
560 | { |
561 | return crypto_register_aeads(algs: nitrox_aeads, ARRAY_SIZE(nitrox_aeads)); |
562 | } |
563 | |
564 | void nitrox_unregister_aeads(void) |
565 | { |
566 | crypto_unregister_aeads(algs: nitrox_aeads, ARRAY_SIZE(nitrox_aeads)); |
567 | } |
568 | |