1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3 *
4 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5 *
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
8 *
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
16 */
17
18 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19
20 #include <linux/crypto.h>
21 #include <linux/init.h>
22 #include <linux/module.h>
23 #include <linux/mm.h>
24 #include <linux/types.h>
25 #include <crypto/algapi.h>
26 #include <crypto/aes.h>
27 #include <crypto/internal/skcipher.h>
28
29 #include <asm/fpumacro.h>
30 #include <asm/opcodes.h>
31 #include <asm/pstate.h>
32 #include <asm/elf.h>
33
34 struct aes_ops {
35 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
36 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
37 void (*load_encrypt_keys)(const u64 *key);
38 void (*load_decrypt_keys)(const u64 *key);
39 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
40 unsigned int len);
41 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
42 unsigned int len);
43 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
44 unsigned int len, u64 *iv);
45 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
46 unsigned int len, u64 *iv);
47 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
48 unsigned int len, u64 *iv);
49 };
50
51 struct crypto_sparc64_aes_ctx {
52 struct aes_ops *ops;
53 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
54 u32 key_length;
55 u32 expanded_key_length;
56 };
57
58 extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
59 u32 *output);
60 extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
61 u32 *output);
62 extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
63 u32 *output);
64
65 extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
66 u32 *output);
67 extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
68 u32 *output);
69 extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
70 u32 *output);
71
72 extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
73 extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
74 extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
75
76 extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
77 extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
78 extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
79
80 extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
81 u64 *output, unsigned int len);
82 extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
83 u64 *output, unsigned int len);
84 extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
85 u64 *output, unsigned int len);
86
87 extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
88 u64 *output, unsigned int len);
89 extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
90 u64 *output, unsigned int len);
91 extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
92 u64 *output, unsigned int len);
93
94 extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
95 u64 *output, unsigned int len,
96 u64 *iv);
97
98 extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
99 u64 *output, unsigned int len,
100 u64 *iv);
101
102 extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
103 u64 *output, unsigned int len,
104 u64 *iv);
105
106 extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
107 u64 *output, unsigned int len,
108 u64 *iv);
109
110 extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
111 u64 *output, unsigned int len,
112 u64 *iv);
113
114 extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
115 u64 *output, unsigned int len,
116 u64 *iv);
117
118 extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
119 u64 *output, unsigned int len,
120 u64 *iv);
121 extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
122 u64 *output, unsigned int len,
123 u64 *iv);
124 extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
125 u64 *output, unsigned int len,
126 u64 *iv);
127
128 static struct aes_ops aes128_ops = {
129 .encrypt = aes_sparc64_encrypt_128,
130 .decrypt = aes_sparc64_decrypt_128,
131 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
132 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
133 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
134 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
135 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
136 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
137 .ctr_crypt = aes_sparc64_ctr_crypt_128,
138 };
139
140 static struct aes_ops aes192_ops = {
141 .encrypt = aes_sparc64_encrypt_192,
142 .decrypt = aes_sparc64_decrypt_192,
143 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
144 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
145 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
146 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
147 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
148 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
149 .ctr_crypt = aes_sparc64_ctr_crypt_192,
150 };
151
152 static struct aes_ops aes256_ops = {
153 .encrypt = aes_sparc64_encrypt_256,
154 .decrypt = aes_sparc64_decrypt_256,
155 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
156 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
157 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
158 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
159 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
160 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
161 .ctr_crypt = aes_sparc64_ctr_crypt_256,
162 };
163
164 extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
165 unsigned int key_len);
166
aes_set_key(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)167 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
168 unsigned int key_len)
169 {
170 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
171
172 switch (key_len) {
173 case AES_KEYSIZE_128:
174 ctx->expanded_key_length = 0xb0;
175 ctx->ops = &aes128_ops;
176 break;
177
178 case AES_KEYSIZE_192:
179 ctx->expanded_key_length = 0xd0;
180 ctx->ops = &aes192_ops;
181 break;
182
183 case AES_KEYSIZE_256:
184 ctx->expanded_key_length = 0xf0;
185 ctx->ops = &aes256_ops;
186 break;
187
188 default:
189 return -EINVAL;
190 }
191
192 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
193 ctx->key_length = key_len;
194
195 return 0;
196 }
197
aes_set_key_skcipher(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)198 static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
199 unsigned int key_len)
200 {
201 return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
202 }
203
crypto_aes_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)204 static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
205 {
206 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
207
208 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
209 }
210
crypto_aes_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)211 static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
212 {
213 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
214
215 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
216 }
217
ecb_encrypt(struct skcipher_request * req)218 static int ecb_encrypt(struct skcipher_request *req)
219 {
220 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
221 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
222 struct skcipher_walk walk;
223 unsigned int nbytes;
224 int err;
225
226 err = skcipher_walk_virt(&walk, req, true);
227 if (err)
228 return err;
229
230 ctx->ops->load_encrypt_keys(&ctx->key[0]);
231 while ((nbytes = walk.nbytes) != 0) {
232 ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
233 walk.dst.virt.addr,
234 round_down(nbytes, AES_BLOCK_SIZE));
235 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
236 }
237 fprs_write(0);
238 return err;
239 }
240
ecb_decrypt(struct skcipher_request * req)241 static int ecb_decrypt(struct skcipher_request *req)
242 {
243 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
244 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
245 const u64 *key_end;
246 struct skcipher_walk walk;
247 unsigned int nbytes;
248 int err;
249
250 err = skcipher_walk_virt(&walk, req, true);
251 if (err)
252 return err;
253
254 ctx->ops->load_decrypt_keys(&ctx->key[0]);
255 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
256 while ((nbytes = walk.nbytes) != 0) {
257 ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
258 walk.dst.virt.addr,
259 round_down(nbytes, AES_BLOCK_SIZE));
260 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
261 }
262 fprs_write(0);
263
264 return err;
265 }
266
cbc_encrypt(struct skcipher_request * req)267 static int cbc_encrypt(struct skcipher_request *req)
268 {
269 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
270 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
271 struct skcipher_walk walk;
272 unsigned int nbytes;
273 int err;
274
275 err = skcipher_walk_virt(&walk, req, true);
276 if (err)
277 return err;
278
279 ctx->ops->load_encrypt_keys(&ctx->key[0]);
280 while ((nbytes = walk.nbytes) != 0) {
281 ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
282 walk.dst.virt.addr,
283 round_down(nbytes, AES_BLOCK_SIZE),
284 walk.iv);
285 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
286 }
287 fprs_write(0);
288 return err;
289 }
290
cbc_decrypt(struct skcipher_request * req)291 static int cbc_decrypt(struct skcipher_request *req)
292 {
293 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
294 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
295 const u64 *key_end;
296 struct skcipher_walk walk;
297 unsigned int nbytes;
298 int err;
299
300 err = skcipher_walk_virt(&walk, req, true);
301 if (err)
302 return err;
303
304 ctx->ops->load_decrypt_keys(&ctx->key[0]);
305 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
306 while ((nbytes = walk.nbytes) != 0) {
307 ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
308 walk.dst.virt.addr,
309 round_down(nbytes, AES_BLOCK_SIZE),
310 walk.iv);
311 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
312 }
313 fprs_write(0);
314
315 return err;
316 }
317
ctr_crypt_final(const struct crypto_sparc64_aes_ctx * ctx,struct skcipher_walk * walk)318 static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
319 struct skcipher_walk *walk)
320 {
321 u8 *ctrblk = walk->iv;
322 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
323 const u8 *src = walk->src.virt.addr;
324 u8 *dst = walk->dst.virt.addr;
325 unsigned int nbytes = walk->nbytes;
326
327 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
328 keystream, AES_BLOCK_SIZE);
329 crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
330 crypto_inc(ctrblk, AES_BLOCK_SIZE);
331 }
332
ctr_crypt(struct skcipher_request * req)333 static int ctr_crypt(struct skcipher_request *req)
334 {
335 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
336 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
337 struct skcipher_walk walk;
338 unsigned int nbytes;
339 int err;
340
341 err = skcipher_walk_virt(&walk, req, true);
342 if (err)
343 return err;
344
345 ctx->ops->load_encrypt_keys(&ctx->key[0]);
346 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
347 ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
348 walk.dst.virt.addr,
349 round_down(nbytes, AES_BLOCK_SIZE),
350 walk.iv);
351 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
352 }
353 if (walk.nbytes) {
354 ctr_crypt_final(ctx, &walk);
355 err = skcipher_walk_done(&walk, 0);
356 }
357 fprs_write(0);
358 return err;
359 }
360
361 static struct crypto_alg cipher_alg = {
362 .cra_name = "aes",
363 .cra_driver_name = "aes-sparc64",
364 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
365 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
366 .cra_blocksize = AES_BLOCK_SIZE,
367 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
368 .cra_alignmask = 3,
369 .cra_module = THIS_MODULE,
370 .cra_u = {
371 .cipher = {
372 .cia_min_keysize = AES_MIN_KEY_SIZE,
373 .cia_max_keysize = AES_MAX_KEY_SIZE,
374 .cia_setkey = aes_set_key,
375 .cia_encrypt = crypto_aes_encrypt,
376 .cia_decrypt = crypto_aes_decrypt
377 }
378 }
379 };
380
381 static struct skcipher_alg skcipher_algs[] = {
382 {
383 .base.cra_name = "ecb(aes)",
384 .base.cra_driver_name = "ecb-aes-sparc64",
385 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
386 .base.cra_blocksize = AES_BLOCK_SIZE,
387 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
388 .base.cra_alignmask = 7,
389 .base.cra_module = THIS_MODULE,
390 .min_keysize = AES_MIN_KEY_SIZE,
391 .max_keysize = AES_MAX_KEY_SIZE,
392 .setkey = aes_set_key_skcipher,
393 .encrypt = ecb_encrypt,
394 .decrypt = ecb_decrypt,
395 }, {
396 .base.cra_name = "cbc(aes)",
397 .base.cra_driver_name = "cbc-aes-sparc64",
398 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
399 .base.cra_blocksize = AES_BLOCK_SIZE,
400 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
401 .base.cra_alignmask = 7,
402 .base.cra_module = THIS_MODULE,
403 .min_keysize = AES_MIN_KEY_SIZE,
404 .max_keysize = AES_MAX_KEY_SIZE,
405 .ivsize = AES_BLOCK_SIZE,
406 .setkey = aes_set_key_skcipher,
407 .encrypt = cbc_encrypt,
408 .decrypt = cbc_decrypt,
409 }, {
410 .base.cra_name = "ctr(aes)",
411 .base.cra_driver_name = "ctr-aes-sparc64",
412 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
413 .base.cra_blocksize = 1,
414 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
415 .base.cra_alignmask = 7,
416 .base.cra_module = THIS_MODULE,
417 .min_keysize = AES_MIN_KEY_SIZE,
418 .max_keysize = AES_MAX_KEY_SIZE,
419 .ivsize = AES_BLOCK_SIZE,
420 .setkey = aes_set_key_skcipher,
421 .encrypt = ctr_crypt,
422 .decrypt = ctr_crypt,
423 .chunksize = AES_BLOCK_SIZE,
424 }
425 };
426
sparc64_has_aes_opcode(void)427 static bool __init sparc64_has_aes_opcode(void)
428 {
429 unsigned long cfr;
430
431 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
432 return false;
433
434 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
435 if (!(cfr & CFR_AES))
436 return false;
437
438 return true;
439 }
440
aes_sparc64_mod_init(void)441 static int __init aes_sparc64_mod_init(void)
442 {
443 int err;
444
445 if (!sparc64_has_aes_opcode()) {
446 pr_info("sparc64 aes opcodes not available.\n");
447 return -ENODEV;
448 }
449 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
450 err = crypto_register_alg(&cipher_alg);
451 if (err)
452 return err;
453 err = crypto_register_skciphers(skcipher_algs,
454 ARRAY_SIZE(skcipher_algs));
455 if (err)
456 crypto_unregister_alg(&cipher_alg);
457 return err;
458 }
459
aes_sparc64_mod_fini(void)460 static void __exit aes_sparc64_mod_fini(void)
461 {
462 crypto_unregister_alg(&cipher_alg);
463 crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
464 }
465
466 module_init(aes_sparc64_mod_init);
467 module_exit(aes_sparc64_mod_fini);
468
469 MODULE_LICENSE("GPL");
470 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
471
472 MODULE_ALIAS_CRYPTO("aes");
473
474 #include "crop_devid.c"
475