1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * The AEGIS-256 Authenticated-Encryption Algorithm
4 *
5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
7 */
8
9#include <crypto/algapi.h>
10#include <crypto/internal/aead.h>
11#include <crypto/internal/skcipher.h>
12#include <crypto/scatterwalk.h>
13#include <linux/err.h>
14#include <linux/init.h>
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/scatterlist.h>
18
19#include "aegis.h"
20
21#define AEGIS256_NONCE_SIZE 32
22#define AEGIS256_STATE_BLOCKS 6
23#define AEGIS256_KEY_SIZE 32
24#define AEGIS256_MIN_AUTH_SIZE 8
25#define AEGIS256_MAX_AUTH_SIZE 16
26
27struct aegis_state {
28 union aegis_block blocks[AEGIS256_STATE_BLOCKS];
29};
30
31struct aegis_ctx {
32 union aegis_block key[AEGIS256_KEY_SIZE / AEGIS_BLOCK_SIZE];
33};
34
35struct aegis256_ops {
36 int (*skcipher_walk_init)(struct skcipher_walk *walk,
37 struct aead_request *req, bool atomic);
38
39 void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
40 const u8 *src, unsigned int size);
41};
42
43static void crypto_aegis256_update(struct aegis_state *state)
44{
45 union aegis_block tmp;
46 unsigned int i;
47
48 tmp = state->blocks[AEGIS256_STATE_BLOCKS - 1];
49 for (i = AEGIS256_STATE_BLOCKS - 1; i > 0; i--)
50 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
51 &state->blocks[i]);
52 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
53}
54
55static void crypto_aegis256_update_a(struct aegis_state *state,
56 const union aegis_block *msg)
57{
58 crypto_aegis256_update(state);
59 crypto_aegis_block_xor(&state->blocks[0], msg);
60}
61
62static void crypto_aegis256_update_u(struct aegis_state *state, const void *msg)
63{
64 crypto_aegis256_update(state);
65 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
66}
67
68static void crypto_aegis256_init(struct aegis_state *state,
69 const union aegis_block *key,
70 const u8 *iv)
71{
72 union aegis_block key_iv[2];
73 unsigned int i;
74
75 key_iv[0] = key[0];
76 key_iv[1] = key[1];
77 crypto_xor(key_iv[0].bytes, iv + 0 * AEGIS_BLOCK_SIZE,
78 AEGIS_BLOCK_SIZE);
79 crypto_xor(key_iv[1].bytes, iv + 1 * AEGIS_BLOCK_SIZE,
80 AEGIS_BLOCK_SIZE);
81
82 state->blocks[0] = key_iv[0];
83 state->blocks[1] = key_iv[1];
84 state->blocks[2] = crypto_aegis_const[1];
85 state->blocks[3] = crypto_aegis_const[0];
86 state->blocks[4] = key[0];
87 state->blocks[5] = key[1];
88
89 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[0]);
90 crypto_aegis_block_xor(&state->blocks[5], &crypto_aegis_const[1]);
91
92 for (i = 0; i < 4; i++) {
93 crypto_aegis256_update_a(state, &key[0]);
94 crypto_aegis256_update_a(state, &key[1]);
95 crypto_aegis256_update_a(state, &key_iv[0]);
96 crypto_aegis256_update_a(state, &key_iv[1]);
97 }
98}
99
100static void crypto_aegis256_ad(struct aegis_state *state,
101 const u8 *src, unsigned int size)
102{
103 if (AEGIS_ALIGNED(src)) {
104 const union aegis_block *src_blk =
105 (const union aegis_block *)src;
106
107 while (size >= AEGIS_BLOCK_SIZE) {
108 crypto_aegis256_update_a(state, src_blk);
109
110 size -= AEGIS_BLOCK_SIZE;
111 src_blk++;
112 }
113 } else {
114 while (size >= AEGIS_BLOCK_SIZE) {
115 crypto_aegis256_update_u(state, src);
116
117 size -= AEGIS_BLOCK_SIZE;
118 src += AEGIS_BLOCK_SIZE;
119 }
120 }
121}
122
123static void crypto_aegis256_encrypt_chunk(struct aegis_state *state, u8 *dst,
124 const u8 *src, unsigned int size)
125{
126 union aegis_block tmp;
127
128 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
129 while (size >= AEGIS_BLOCK_SIZE) {
130 union aegis_block *dst_blk =
131 (union aegis_block *)dst;
132 const union aegis_block *src_blk =
133 (const union aegis_block *)src;
134
135 tmp = state->blocks[2];
136 crypto_aegis_block_and(&tmp, &state->blocks[3]);
137 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
138 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
139 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
140 crypto_aegis_block_xor(&tmp, src_blk);
141
142 crypto_aegis256_update_a(state, src_blk);
143
144 *dst_blk = tmp;
145
146 size -= AEGIS_BLOCK_SIZE;
147 src += AEGIS_BLOCK_SIZE;
148 dst += AEGIS_BLOCK_SIZE;
149 }
150 } else {
151 while (size >= AEGIS_BLOCK_SIZE) {
152 tmp = state->blocks[2];
153 crypto_aegis_block_and(&tmp, &state->blocks[3]);
154 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
155 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
156 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
157 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
158
159 crypto_aegis256_update_u(state, src);
160
161 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
162
163 size -= AEGIS_BLOCK_SIZE;
164 src += AEGIS_BLOCK_SIZE;
165 dst += AEGIS_BLOCK_SIZE;
166 }
167 }
168
169 if (size > 0) {
170 union aegis_block msg = {};
171 memcpy(msg.bytes, src, size);
172
173 tmp = state->blocks[2];
174 crypto_aegis_block_and(&tmp, &state->blocks[3]);
175 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
176 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
177 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
178
179 crypto_aegis256_update_a(state, &msg);
180
181 crypto_aegis_block_xor(&msg, &tmp);
182
183 memcpy(dst, msg.bytes, size);
184 }
185}
186
187static void crypto_aegis256_decrypt_chunk(struct aegis_state *state, u8 *dst,
188 const u8 *src, unsigned int size)
189{
190 union aegis_block tmp;
191
192 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
193 while (size >= AEGIS_BLOCK_SIZE) {
194 union aegis_block *dst_blk =
195 (union aegis_block *)dst;
196 const union aegis_block *src_blk =
197 (const union aegis_block *)src;
198
199 tmp = state->blocks[2];
200 crypto_aegis_block_and(&tmp, &state->blocks[3]);
201 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
202 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
203 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
204 crypto_aegis_block_xor(&tmp, src_blk);
205
206 crypto_aegis256_update_a(state, &tmp);
207
208 *dst_blk = tmp;
209
210 size -= AEGIS_BLOCK_SIZE;
211 src += AEGIS_BLOCK_SIZE;
212 dst += AEGIS_BLOCK_SIZE;
213 }
214 } else {
215 while (size >= AEGIS_BLOCK_SIZE) {
216 tmp = state->blocks[2];
217 crypto_aegis_block_and(&tmp, &state->blocks[3]);
218 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
219 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
220 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
221 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
222
223 crypto_aegis256_update_a(state, &tmp);
224
225 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
226
227 size -= AEGIS_BLOCK_SIZE;
228 src += AEGIS_BLOCK_SIZE;
229 dst += AEGIS_BLOCK_SIZE;
230 }
231 }
232
233 if (size > 0) {
234 union aegis_block msg = {};
235 memcpy(msg.bytes, src, size);
236
237 tmp = state->blocks[2];
238 crypto_aegis_block_and(&tmp, &state->blocks[3]);
239 crypto_aegis_block_xor(&tmp, &state->blocks[5]);
240 crypto_aegis_block_xor(&tmp, &state->blocks[4]);
241 crypto_aegis_block_xor(&tmp, &state->blocks[1]);
242 crypto_aegis_block_xor(&msg, &tmp);
243
244 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
245
246 crypto_aegis256_update_a(state, &msg);
247
248 memcpy(dst, msg.bytes, size);
249 }
250}
251
252static void crypto_aegis256_process_ad(struct aegis_state *state,
253 struct scatterlist *sg_src,
254 unsigned int assoclen)
255{
256 struct scatter_walk walk;
257 union aegis_block buf;
258 unsigned int pos = 0;
259
260 scatterwalk_start(&walk, sg_src);
261 while (assoclen != 0) {
262 unsigned int size = scatterwalk_clamp(&walk, assoclen);
263 unsigned int left = size;
264 void *mapped = scatterwalk_map(&walk);
265 const u8 *src = (const u8 *)mapped;
266
267 if (pos + size >= AEGIS_BLOCK_SIZE) {
268 if (pos > 0) {
269 unsigned int fill = AEGIS_BLOCK_SIZE - pos;
270 memcpy(buf.bytes + pos, src, fill);
271 crypto_aegis256_update_a(state, &buf);
272 pos = 0;
273 left -= fill;
274 src += fill;
275 }
276
277 crypto_aegis256_ad(state, src, left);
278 src += left & ~(AEGIS_BLOCK_SIZE - 1);
279 left &= AEGIS_BLOCK_SIZE - 1;
280 }
281
282 memcpy(buf.bytes + pos, src, left);
283
284 pos += left;
285 assoclen -= size;
286 scatterwalk_unmap(mapped);
287 scatterwalk_advance(&walk, size);
288 scatterwalk_done(&walk, 0, assoclen);
289 }
290
291 if (pos > 0) {
292 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
293 crypto_aegis256_update_a(state, &buf);
294 }
295}
296
297static void crypto_aegis256_process_crypt(struct aegis_state *state,
298 struct aead_request *req,
299 const struct aegis256_ops *ops)
300{
301 struct skcipher_walk walk;
302
303 ops->skcipher_walk_init(&walk, req, false);
304
305 while (walk.nbytes) {
306 unsigned int nbytes = walk.nbytes;
307
308 if (nbytes < walk.total)
309 nbytes = round_down(nbytes, walk.stride);
310
311 ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
312 nbytes);
313
314 skcipher_walk_done(&walk, walk.nbytes - nbytes);
315 }
316}
317
318static void crypto_aegis256_final(struct aegis_state *state,
319 union aegis_block *tag_xor,
320 u64 assoclen, u64 cryptlen)
321{
322 u64 assocbits = assoclen * 8;
323 u64 cryptbits = cryptlen * 8;
324
325 union aegis_block tmp;
326 unsigned int i;
327
328 tmp.words64[0] = cpu_to_le64(assocbits);
329 tmp.words64[1] = cpu_to_le64(cryptbits);
330
331 crypto_aegis_block_xor(&tmp, &state->blocks[3]);
332
333 for (i = 0; i < 7; i++)
334 crypto_aegis256_update_a(state, &tmp);
335
336 for (i = 0; i < AEGIS256_STATE_BLOCKS; i++)
337 crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
338}
339
340static int crypto_aegis256_setkey(struct crypto_aead *aead, const u8 *key,
341 unsigned int keylen)
342{
343 struct aegis_ctx *ctx = crypto_aead_ctx(aead);
344
345 if (keylen != AEGIS256_KEY_SIZE) {
346 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
347 return -EINVAL;
348 }
349
350 memcpy(ctx->key[0].bytes, key, AEGIS_BLOCK_SIZE);
351 memcpy(ctx->key[1].bytes, key + AEGIS_BLOCK_SIZE,
352 AEGIS_BLOCK_SIZE);
353 return 0;
354}
355
356static int crypto_aegis256_setauthsize(struct crypto_aead *tfm,
357 unsigned int authsize)
358{
359 if (authsize > AEGIS256_MAX_AUTH_SIZE)
360 return -EINVAL;
361 if (authsize < AEGIS256_MIN_AUTH_SIZE)
362 return -EINVAL;
363 return 0;
364}
365
366static void crypto_aegis256_crypt(struct aead_request *req,
367 union aegis_block *tag_xor,
368 unsigned int cryptlen,
369 const struct aegis256_ops *ops)
370{
371 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
372 struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
373 struct aegis_state state;
374
375 crypto_aegis256_init(&state, ctx->key, req->iv);
376 crypto_aegis256_process_ad(&state, req->src, req->assoclen);
377 crypto_aegis256_process_crypt(&state, req, ops);
378 crypto_aegis256_final(&state, tag_xor, req->assoclen, cryptlen);
379}
380
381static int crypto_aegis256_encrypt(struct aead_request *req)
382{
383 static const struct aegis256_ops ops = {
384 .skcipher_walk_init = skcipher_walk_aead_encrypt,
385 .crypt_chunk = crypto_aegis256_encrypt_chunk,
386 };
387
388 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
389 union aegis_block tag = {};
390 unsigned int authsize = crypto_aead_authsize(tfm);
391 unsigned int cryptlen = req->cryptlen;
392
393 crypto_aegis256_crypt(req, &tag, cryptlen, &ops);
394
395 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
396 authsize, 1);
397 return 0;
398}
399
400static int crypto_aegis256_decrypt(struct aead_request *req)
401{
402 static const struct aegis256_ops ops = {
403 .skcipher_walk_init = skcipher_walk_aead_decrypt,
404 .crypt_chunk = crypto_aegis256_decrypt_chunk,
405 };
406 static const u8 zeros[AEGIS256_MAX_AUTH_SIZE] = {};
407
408 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
409 union aegis_block tag;
410 unsigned int authsize = crypto_aead_authsize(tfm);
411 unsigned int cryptlen = req->cryptlen - authsize;
412
413 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
414 authsize, 0);
415
416 crypto_aegis256_crypt(req, &tag, cryptlen, &ops);
417
418 return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
419}
420
421static int crypto_aegis256_init_tfm(struct crypto_aead *tfm)
422{
423 return 0;
424}
425
426static void crypto_aegis256_exit_tfm(struct crypto_aead *tfm)
427{
428}
429
430static struct aead_alg crypto_aegis256_alg = {
431 .setkey = crypto_aegis256_setkey,
432 .setauthsize = crypto_aegis256_setauthsize,
433 .encrypt = crypto_aegis256_encrypt,
434 .decrypt = crypto_aegis256_decrypt,
435 .init = crypto_aegis256_init_tfm,
436 .exit = crypto_aegis256_exit_tfm,
437
438 .ivsize = AEGIS256_NONCE_SIZE,
439 .maxauthsize = AEGIS256_MAX_AUTH_SIZE,
440 .chunksize = AEGIS_BLOCK_SIZE,
441
442 .base = {
443 .cra_blocksize = 1,
444 .cra_ctxsize = sizeof(struct aegis_ctx),
445 .cra_alignmask = 0,
446
447 .cra_priority = 100,
448
449 .cra_name = "aegis256",
450 .cra_driver_name = "aegis256-generic",
451
452 .cra_module = THIS_MODULE,
453 }
454};
455
456static int __init crypto_aegis256_module_init(void)
457{
458 return crypto_register_aead(&crypto_aegis256_alg);
459}
460
461static void __exit crypto_aegis256_module_exit(void)
462{
463 crypto_unregister_aead(&crypto_aegis256_alg);
464}
465
466module_init(crypto_aegis256_module_init);
467module_exit(crypto_aegis256_module_exit);
468
469MODULE_LICENSE("GPL");
470MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
471MODULE_DESCRIPTION("AEGIS-256 AEAD algorithm");
472MODULE_ALIAS_CRYPTO("aegis256");
473MODULE_ALIAS_CRYPTO("aegis256-generic");
474