1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * SM4-GCM AEAD Algorithm using ARMv8 Crypto Extensions
4 * as specified in rfc8998
5 * https://datatracker.ietf.org/doc/html/rfc8998
6 *
7 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
8 */
9
10#include <linux/module.h>
11#include <linux/crypto.h>
12#include <linux/kernel.h>
13#include <linux/cpufeature.h>
14#include <asm/neon.h>
15#include <crypto/b128ops.h>
16#include <crypto/scatterwalk.h>
17#include <crypto/internal/aead.h>
18#include <crypto/internal/skcipher.h>
19#include <crypto/sm4.h>
20#include "sm4-ce.h"
21
22asmlinkage void sm4_ce_pmull_ghash_setup(const u32 *rkey_enc, u8 *ghash_table);
23asmlinkage void pmull_ghash_update(const u8 *ghash_table, u8 *ghash,
24 const u8 *src, unsigned int nblocks);
25asmlinkage void sm4_ce_pmull_gcm_enc(const u32 *rkey_enc, u8 *dst,
26 const u8 *src, u8 *iv,
27 unsigned int nbytes, u8 *ghash,
28 const u8 *ghash_table, const u8 *lengths);
29asmlinkage void sm4_ce_pmull_gcm_dec(const u32 *rkey_enc, u8 *dst,
30 const u8 *src, u8 *iv,
31 unsigned int nbytes, u8 *ghash,
32 const u8 *ghash_table, const u8 *lengths);
33
34#define GHASH_BLOCK_SIZE 16
35#define GCM_IV_SIZE 12
36
37struct sm4_gcm_ctx {
38 struct sm4_ctx key;
39 u8 ghash_table[16 * 4];
40};
41
42
43static int gcm_setkey(struct crypto_aead *tfm, const u8 *key,
44 unsigned int key_len)
45{
46 struct sm4_gcm_ctx *ctx = crypto_aead_ctx(tfm);
47
48 if (key_len != SM4_KEY_SIZE)
49 return -EINVAL;
50
51 kernel_neon_begin();
52
53 sm4_ce_expand_key(key, rkey_enc: ctx->key.rkey_enc, rkey_dec: ctx->key.rkey_dec,
54 fk: crypto_sm4_fk, ck: crypto_sm4_ck);
55 sm4_ce_pmull_ghash_setup(rkey_enc: ctx->key.rkey_enc, ghash_table: ctx->ghash_table);
56
57 kernel_neon_end();
58 return 0;
59}
60
61static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
62{
63 switch (authsize) {
64 case 4:
65 case 8:
66 case 12 ... 16:
67 return 0;
68 default:
69 return -EINVAL;
70 }
71}
72
73static void gcm_calculate_auth_mac(struct aead_request *req, u8 ghash[])
74{
75 struct crypto_aead *aead = crypto_aead_reqtfm(req);
76 struct sm4_gcm_ctx *ctx = crypto_aead_ctx(tfm: aead);
77 u8 __aligned(8) buffer[GHASH_BLOCK_SIZE];
78 u32 assoclen = req->assoclen;
79 struct scatter_walk walk;
80 unsigned int buflen = 0;
81
82 scatterwalk_start(walk: &walk, sg: req->src);
83
84 do {
85 u32 n = scatterwalk_clamp(walk: &walk, nbytes: assoclen);
86 u8 *p, *ptr;
87
88 if (!n) {
89 scatterwalk_start(walk: &walk, sg: sg_next(walk.sg));
90 n = scatterwalk_clamp(walk: &walk, nbytes: assoclen);
91 }
92
93 p = ptr = scatterwalk_map(walk: &walk);
94 assoclen -= n;
95 scatterwalk_advance(walk: &walk, nbytes: n);
96
97 if (n + buflen < GHASH_BLOCK_SIZE) {
98 memcpy(&buffer[buflen], ptr, n);
99 buflen += n;
100 } else {
101 unsigned int nblocks;
102
103 if (buflen) {
104 unsigned int l = GHASH_BLOCK_SIZE - buflen;
105
106 memcpy(&buffer[buflen], ptr, l);
107 ptr += l;
108 n -= l;
109
110 pmull_ghash_update(ghash_table: ctx->ghash_table, ghash,
111 src: buffer, nblocks: 1);
112 }
113
114 nblocks = n / GHASH_BLOCK_SIZE;
115 if (nblocks) {
116 pmull_ghash_update(ghash_table: ctx->ghash_table, ghash,
117 src: ptr, nblocks);
118 ptr += nblocks * GHASH_BLOCK_SIZE;
119 }
120
121 buflen = n % GHASH_BLOCK_SIZE;
122 if (buflen)
123 memcpy(&buffer[0], ptr, buflen);
124 }
125
126 scatterwalk_unmap(vaddr: p);
127 scatterwalk_done(walk: &walk, out: 0, more: assoclen);
128 } while (assoclen);
129
130 /* padding with '0' */
131 if (buflen) {
132 memset(&buffer[buflen], 0, GHASH_BLOCK_SIZE - buflen);
133 pmull_ghash_update(ghash_table: ctx->ghash_table, ghash, src: buffer, nblocks: 1);
134 }
135}
136
137static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
138 u8 ghash[], int err,
139 void (*sm4_ce_pmull_gcm_crypt)(const u32 *rkey_enc,
140 u8 *dst, const u8 *src, u8 *iv,
141 unsigned int nbytes, u8 *ghash,
142 const u8 *ghash_table, const u8 *lengths))
143{
144 struct crypto_aead *aead = crypto_aead_reqtfm(req);
145 struct sm4_gcm_ctx *ctx = crypto_aead_ctx(tfm: aead);
146 u8 __aligned(8) iv[SM4_BLOCK_SIZE];
147 be128 __aligned(8) lengths;
148
149 memset(ghash, 0, SM4_BLOCK_SIZE);
150
151 lengths.a = cpu_to_be64(req->assoclen * 8);
152 lengths.b = cpu_to_be64(walk->total * 8);
153
154 memcpy(iv, req->iv, GCM_IV_SIZE);
155 put_unaligned_be32(val: 2, p: iv + GCM_IV_SIZE);
156
157 kernel_neon_begin();
158
159 if (req->assoclen)
160 gcm_calculate_auth_mac(req, ghash);
161
162 while (walk->nbytes) {
163 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
164 const u8 *src = walk->src.virt.addr;
165 u8 *dst = walk->dst.virt.addr;
166
167 if (walk->nbytes == walk->total) {
168 sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
169 walk->nbytes, ghash,
170 ctx->ghash_table,
171 (const u8 *)&lengths);
172
173 kernel_neon_end();
174
175 return skcipher_walk_done(walk, err: 0);
176 }
177
178 sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
179 walk->nbytes - tail, ghash,
180 ctx->ghash_table, NULL);
181
182 kernel_neon_end();
183
184 err = skcipher_walk_done(walk, err: tail);
185
186 kernel_neon_begin();
187 }
188
189 sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, NULL, NULL, iv,
190 walk->nbytes, ghash, ctx->ghash_table,
191 (const u8 *)&lengths);
192
193 kernel_neon_end();
194
195 return err;
196}
197
198static int gcm_encrypt(struct aead_request *req)
199{
200 struct crypto_aead *aead = crypto_aead_reqtfm(req);
201 u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
202 struct skcipher_walk walk;
203 int err;
204
205 err = skcipher_walk_aead_encrypt(walk: &walk, req, atomic: false);
206 err = gcm_crypt(req, walk: &walk, ghash, err, sm4_ce_pmull_gcm_crypt: sm4_ce_pmull_gcm_enc);
207 if (err)
208 return err;
209
210 /* copy authtag to end of dst */
211 scatterwalk_map_and_copy(buf: ghash, sg: req->dst, start: req->assoclen + req->cryptlen,
212 nbytes: crypto_aead_authsize(tfm: aead), out: 1);
213
214 return 0;
215}
216
217static int gcm_decrypt(struct aead_request *req)
218{
219 struct crypto_aead *aead = crypto_aead_reqtfm(req);
220 unsigned int authsize = crypto_aead_authsize(tfm: aead);
221 u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
222 u8 authtag[SM4_BLOCK_SIZE];
223 struct skcipher_walk walk;
224 int err;
225
226 err = skcipher_walk_aead_decrypt(walk: &walk, req, atomic: false);
227 err = gcm_crypt(req, walk: &walk, ghash, err, sm4_ce_pmull_gcm_crypt: sm4_ce_pmull_gcm_dec);
228 if (err)
229 return err;
230
231 /* compare calculated auth tag with the stored one */
232 scatterwalk_map_and_copy(buf: authtag, sg: req->src,
233 start: req->assoclen + req->cryptlen - authsize,
234 nbytes: authsize, out: 0);
235
236 if (crypto_memneq(a: authtag, b: ghash, size: authsize))
237 return -EBADMSG;
238
239 return 0;
240}
241
242static struct aead_alg sm4_gcm_alg = {
243 .base = {
244 .cra_name = "gcm(sm4)",
245 .cra_driver_name = "gcm-sm4-ce",
246 .cra_priority = 400,
247 .cra_blocksize = 1,
248 .cra_ctxsize = sizeof(struct sm4_gcm_ctx),
249 .cra_module = THIS_MODULE,
250 },
251 .ivsize = GCM_IV_SIZE,
252 .chunksize = SM4_BLOCK_SIZE,
253 .maxauthsize = SM4_BLOCK_SIZE,
254 .setkey = gcm_setkey,
255 .setauthsize = gcm_setauthsize,
256 .encrypt = gcm_encrypt,
257 .decrypt = gcm_decrypt,
258};
259
260static int __init sm4_ce_gcm_init(void)
261{
262 if (!cpu_have_named_feature(PMULL))
263 return -ENODEV;
264
265 return crypto_register_aead(alg: &sm4_gcm_alg);
266}
267
268static void __exit sm4_ce_gcm_exit(void)
269{
270 crypto_unregister_aead(alg: &sm4_gcm_alg);
271}
272
273static const struct cpu_feature __maybe_unused sm4_ce_gcm_cpu_feature[] = {
274 { cpu_feature(PMULL) },
275 {}
276};
277MODULE_DEVICE_TABLE(cpu, sm4_ce_gcm_cpu_feature);
278
279module_cpu_feature_match(SM4, sm4_ce_gcm_init);
280module_exit(sm4_ce_gcm_exit);
281
282MODULE_DESCRIPTION("Synchronous SM4 in GCM mode using ARMv8 Crypto Extensions");
283MODULE_ALIAS_CRYPTO("gcm(sm4)");
284MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
285MODULE_LICENSE("GPL v2");
286

source code of linux/arch/arm64/crypto/sm4-ce-gcm-glue.c