1 | // SPDX-License-Identifier: GPL-2.0-or-later |
2 | /* |
3 | * Cryptographic API. |
4 | * |
5 | * Single-block cipher operations. |
6 | * |
7 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
8 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> |
9 | */ |
10 | |
11 | #include <crypto/algapi.h> |
12 | #include <crypto/internal/cipher.h> |
13 | #include <linux/kernel.h> |
14 | #include <linux/crypto.h> |
15 | #include <linux/errno.h> |
16 | #include <linux/slab.h> |
17 | #include <linux/string.h> |
18 | #include "internal.h" |
19 | |
20 | static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key, |
21 | unsigned int keylen) |
22 | { |
23 | struct cipher_alg *cia = crypto_cipher_alg(tfm); |
24 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
25 | int ret; |
26 | u8 *buffer, *alignbuffer; |
27 | unsigned long absize; |
28 | |
29 | absize = keylen + alignmask; |
30 | buffer = kmalloc(size: absize, GFP_ATOMIC); |
31 | if (!buffer) |
32 | return -ENOMEM; |
33 | |
34 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
35 | memcpy(alignbuffer, key, keylen); |
36 | ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen); |
37 | memset(alignbuffer, 0, keylen); |
38 | kfree(objp: buffer); |
39 | return ret; |
40 | |
41 | } |
42 | |
43 | int crypto_cipher_setkey(struct crypto_cipher *tfm, |
44 | const u8 *key, unsigned int keylen) |
45 | { |
46 | struct cipher_alg *cia = crypto_cipher_alg(tfm); |
47 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
48 | |
49 | if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) |
50 | return -EINVAL; |
51 | |
52 | if ((unsigned long)key & alignmask) |
53 | return setkey_unaligned(tfm, key, keylen); |
54 | |
55 | return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen); |
56 | } |
57 | EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL); |
58 | |
59 | static inline void cipher_crypt_one(struct crypto_cipher *tfm, |
60 | u8 *dst, const u8 *src, bool enc) |
61 | { |
62 | unsigned long alignmask = crypto_cipher_alignmask(tfm); |
63 | struct cipher_alg *cia = crypto_cipher_alg(tfm); |
64 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = |
65 | enc ? cia->cia_encrypt : cia->cia_decrypt; |
66 | |
67 | if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { |
68 | unsigned int bs = crypto_cipher_blocksize(tfm); |
69 | u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; |
70 | u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
71 | |
72 | memcpy(tmp, src, bs); |
73 | fn(crypto_cipher_tfm(tfm), tmp, tmp); |
74 | memcpy(dst, tmp, bs); |
75 | } else { |
76 | fn(crypto_cipher_tfm(tfm), dst, src); |
77 | } |
78 | } |
79 | |
80 | void crypto_cipher_encrypt_one(struct crypto_cipher *tfm, |
81 | u8 *dst, const u8 *src) |
82 | { |
83 | cipher_crypt_one(tfm, dst, src, enc: true); |
84 | } |
85 | EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL); |
86 | |
87 | void crypto_cipher_decrypt_one(struct crypto_cipher *tfm, |
88 | u8 *dst, const u8 *src) |
89 | { |
90 | cipher_crypt_one(tfm, dst, src, enc: false); |
91 | } |
92 | EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL); |
93 | |
94 | struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher) |
95 | { |
96 | struct crypto_tfm *tfm = crypto_cipher_tfm(tfm: cipher); |
97 | struct crypto_alg *alg = tfm->__crt_alg; |
98 | struct crypto_cipher *ncipher; |
99 | struct crypto_tfm *ntfm; |
100 | |
101 | if (alg->cra_init) |
102 | return ERR_PTR(error: -ENOSYS); |
103 | |
104 | if (unlikely(!crypto_mod_get(alg))) |
105 | return ERR_PTR(error: -ESTALE); |
106 | |
107 | ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER, |
108 | CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC); |
109 | if (IS_ERR(ptr: ntfm)) { |
110 | crypto_mod_put(alg); |
111 | return ERR_CAST(ptr: ntfm); |
112 | } |
113 | |
114 | ntfm->crt_flags = tfm->crt_flags; |
115 | |
116 | ncipher = __crypto_cipher_cast(tfm: ntfm); |
117 | |
118 | return ncipher; |
119 | } |
120 | EXPORT_SYMBOL_GPL(crypto_clone_cipher); |
121 | |