1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2015, 2020 Linaro Limited
4  * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
5  * Copyright (c) 2001-2007, Tom St Denis
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions are met:
10  *
11  * 1. Redistributions of source code must retain the above copyright notice,
12  * this list of conditions and the following disclaimer.
13  *
14  * 2. Redistributions in binary form must reproduce the above copyright notice,
15  * this list of conditions and the following disclaimer in the documentation
16  * and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28  * POSSIBILITY OF SUCH DAMAGE.
29  */
30 
31 /* LibTomCrypt, modular cryptographic library -- Tom St Denis
32  *
33  * LibTomCrypt is a library that provides various cryptographic
34  * algorithms in a highly modular and flexible manner.
35  *
36  * The library is free for all purposes without any express
37  * guarantee it works.
38  *
39  * Tom St Denis, tomstdenis@gmail.com, http://libtom.org
40  */
41 
42 /*
43  * AES cipher for ARMv8 with Crypto Extensions
44  */
45 
46 #include <crypto/crypto_accel.h>
47 #include <kernel/thread.h>
48 #include <string.h>
49 #include <types_ext.h>
50 #include <utee_defines.h>
51 #include <util.h>
52 
53 #include "aes_armv8a_ce.h"
54 
55 struct aes_block {
56 	uint8_t b[TEE_AES_BLOCK_SIZE];
57 };
58 
ror32(uint32_t val,unsigned int shift)59 static uint32_t ror32(uint32_t val, unsigned int shift)
60 {
61 	return (val >> shift) | (val << (32 - shift));
62 }
63 
expand_enc_key(uint32_t * enc_key,size_t key_len)64 static void expand_enc_key(uint32_t *enc_key, size_t key_len)
65 {
66 	/* The AES key schedule round constants */
67 	static uint8_t const rcon[] = {
68 		0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
69 	};
70 	unsigned int kwords = key_len / sizeof(uint32_t);
71 	unsigned int i = 0;
72 
73 	for (i = 0; i < sizeof(rcon); i++) {
74 		uint32_t *rki = enc_key + i * kwords;
75 		uint32_t *rko = rki + kwords;
76 
77 		rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8) ^
78 			 rcon[i] ^ rki[0];
79 		rko[1] = rko[0] ^ rki[1];
80 		rko[2] = rko[1] ^ rki[2];
81 		rko[3] = rko[2] ^ rki[3];
82 
83 		if (key_len == 24) {
84 			if (i >= 7)
85 				break;
86 			rko[4] = rko[3] ^ rki[4];
87 			rko[5] = rko[4] ^ rki[5];
88 		} else if (key_len == 32) {
89 			if (i >= 6)
90 				break;
91 			rko[4] = ce_aes_sub(rko[3]) ^ rki[4];
92 			rko[5] = rko[4] ^ rki[5];
93 			rko[6] = rko[5] ^ rki[6];
94 			rko[7] = rko[6] ^ rki[7];
95 		}
96 	}
97 }
98 
make_dec_key(unsigned int round_count,const struct aes_block * key_enc,struct aes_block * key_dec)99 static void make_dec_key(unsigned int round_count,
100 			 const struct aes_block *key_enc,
101 			 struct aes_block *key_dec)
102 {
103 	unsigned int i = 0;
104 	unsigned int j = round_count;
105 
106 	/*
107 	 * Generate the decryption keys for the Equivalent Inverse Cipher.
108 	 * This involves reversing the order of the round keys, and applying
109 	 * the Inverse Mix Columns transformation on all but the first and
110 	 * the last ones.
111 	 */
112 	j = round_count;
113 
114 	key_dec[0] = key_enc[j];
115 	for (i = 1, j--; j > 0; i++, j--)
116 		ce_aes_invert(key_dec + i, key_enc + j);
117 	key_dec[i] = key_enc[0];
118 }
119 
crypto_accel_aes_expand_keys(const void * key,size_t key_len,void * enc_key,void * dec_key,size_t expanded_key_len,unsigned int * round_count)120 TEE_Result crypto_accel_aes_expand_keys(const void *key, size_t key_len,
121 					void *enc_key, void *dec_key,
122 					size_t expanded_key_len,
123 					unsigned int *round_count)
124 {
125 	unsigned int num_rounds = 0;
126 	uint32_t vfp_state = 0;
127 
128 	if (!key || !enc_key)
129 		return TEE_ERROR_BAD_PARAMETERS;
130 	if (key_len != 16 && key_len != 24 && key_len != 32)
131 		return TEE_ERROR_BAD_PARAMETERS;
132 	if (!IS_ALIGNED_WITH_TYPE(enc_key, struct aes_block) ||
133 	    !IS_ALIGNED_WITH_TYPE(dec_key, struct aes_block))
134 		return TEE_ERROR_BAD_PARAMETERS;
135 
136 	num_rounds = 10 + ((key_len / 8) - 2) * 2;
137 
138 	if (expanded_key_len < (num_rounds + 1) * sizeof(struct aes_block))
139 		return TEE_ERROR_BAD_PARAMETERS;
140 
141 	*round_count = num_rounds;
142 	memset(enc_key, 0, expanded_key_len);
143 	memcpy(enc_key, key, key_len);
144 
145 	vfp_state = thread_kernel_enable_vfp();
146 
147 	expand_enc_key(enc_key, key_len);
148 	if (dec_key)
149 		make_dec_key(num_rounds, enc_key, dec_key);
150 
151 	thread_kernel_disable_vfp(vfp_state);
152 
153 	return TEE_SUCCESS;
154 }
155 
crypto_accel_aes_ecb_enc(void * out,const void * in,const void * key,unsigned int round_count,unsigned int block_count)156 void crypto_accel_aes_ecb_enc(void *out, const void *in, const void *key,
157 			      unsigned int round_count,
158 			      unsigned int block_count)
159 {
160 	uint32_t vfp_state = 0;
161 
162 	assert(out && in && key);
163 
164 	vfp_state = thread_kernel_enable_vfp();
165 	ce_aes_ecb_encrypt(out, in, key, round_count, block_count, 1);
166 	thread_kernel_disable_vfp(vfp_state);
167 }
168 
crypto_accel_aes_ecb_dec(void * out,const void * in,const void * key,unsigned int round_count,unsigned int block_count)169 void crypto_accel_aes_ecb_dec(void *out, const void *in, const void *key,
170 			      unsigned int round_count,
171 			      unsigned int block_count)
172 {
173 	uint32_t vfp_state = 0;
174 
175 	assert(out && in && key);
176 
177 	vfp_state = thread_kernel_enable_vfp();
178 	ce_aes_ecb_decrypt(out, in, key, round_count, block_count, 1);
179 	thread_kernel_disable_vfp(vfp_state);
180 }
181 
crypto_accel_aes_cbc_enc(void * out,const void * in,const void * key,unsigned int round_count,unsigned int block_count,void * iv)182 void crypto_accel_aes_cbc_enc(void *out, const void *in, const void *key,
183 			      unsigned int round_count,
184 			      unsigned int block_count, void *iv)
185 {
186 	uint32_t vfp_state = 0;
187 
188 	assert(out && in && key && iv);
189 
190 	vfp_state = thread_kernel_enable_vfp();
191 	ce_aes_cbc_encrypt(out, in, key, round_count, block_count, iv);
192 	thread_kernel_disable_vfp(vfp_state);
193 }
194 
crypto_accel_aes_cbc_dec(void * out,const void * in,const void * key,unsigned int round_count,unsigned int block_count,void * iv)195 void crypto_accel_aes_cbc_dec(void *out, const void *in, const void *key,
196 			      unsigned int round_count,
197 			      unsigned int block_count, void *iv)
198 {
199 	uint32_t vfp_state = 0;
200 
201 	assert(out && in && key && iv);
202 
203 	vfp_state = thread_kernel_enable_vfp();
204 	ce_aes_cbc_decrypt(out, in, key, round_count, block_count, iv);
205 	thread_kernel_disable_vfp(vfp_state);
206 }
207 
crypto_accel_aes_ctr_be_enc(void * out,const void * in,const void * key,unsigned int round_count,unsigned int block_count,void * iv)208 void crypto_accel_aes_ctr_be_enc(void *out, const void *in, const void *key,
209 				 unsigned int round_count,
210 				 unsigned int block_count, void *iv)
211 {
212 	uint32_t vfp_state = 0;
213 
214 	assert(out && in && key && iv);
215 
216 	vfp_state = thread_kernel_enable_vfp();
217 	ce_aes_ctr_encrypt(out, in, key, round_count, block_count, iv, 1);
218 	thread_kernel_disable_vfp(vfp_state);
219 }
220 
crypto_accel_aes_xts_enc(void * out,const void * in,const void * key1,unsigned int round_count,unsigned int block_count,const void * key2,void * tweak)221 void crypto_accel_aes_xts_enc(void *out, const void *in, const void *key1,
222 			      unsigned int round_count,
223 			      unsigned int block_count, const void *key2,
224 			      void *tweak)
225 {
226 	uint32_t vfp_state = 0;
227 
228 	assert(out && in && key1 && key2 && tweak);
229 
230 	vfp_state = thread_kernel_enable_vfp();
231 	ce_aes_xts_encrypt(out, in, key1, round_count, block_count, key2,
232 			   tweak);
233 	thread_kernel_disable_vfp(vfp_state);
234 }
235 
crypto_accel_aes_xts_dec(void * out,const void * in,const void * key1,unsigned int round_count,unsigned int block_count,const void * key2,void * tweak)236 void crypto_accel_aes_xts_dec(void *out, const void *in, const void *key1,
237 			      unsigned int round_count,
238 			      unsigned int block_count, const void *key2,
239 			      void *tweak)
240 {
241 	uint32_t vfp_state = 0;
242 
243 	assert(out && in && key1 && key2 && tweak);
244 
245 	vfp_state = thread_kernel_enable_vfp();
246 	ce_aes_xts_decrypt(out, in, key1, round_count, block_count, key2,
247 			   tweak);
248 	thread_kernel_disable_vfp(vfp_state);
249 }
250