1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright (c) 2017-2020, Linaro Limited
4 */
5
6 #include <assert.h>
7 #include <crypto/crypto_accel.h>
8 #include <crypto/crypto.h>
9 #include <crypto/ghash-ce-core.h>
10 #include <crypto/internal_aes-gcm.h>
11 #include <io.h>
12 #include <kernel/panic.h>
13 #include <kernel/thread.h>
14 #include <string.h>
15 #include <types_ext.h>
16
17 #include "aes_armv8a_ce.h"
18
get_be_block(void * dst,const void * src)19 static void get_be_block(void *dst, const void *src)
20 {
21 uint64_t *d = dst;
22
23 d[1] = get_be64(src);
24 d[0] = get_be64((const uint8_t *)src + 8);
25 }
26
put_be_block(void * dst,const void * src)27 static void put_be_block(void *dst, const void *src)
28 {
29 const uint64_t *s = src;
30
31 put_be64(dst, s[1]);
32 put_be64((uint8_t *)dst + 8, s[0]);
33 }
34
ghash_reflect(uint64_t h[2],const uint64_t k[2])35 static void ghash_reflect(uint64_t h[2], const uint64_t k[2])
36 {
37 uint64_t b = get_be64(k);
38 uint64_t a = get_be64(k + 1);
39
40 h[0] = (a << 1) | (b >> 63);
41 h[1] = (b << 1) | (a >> 63);
42 if (b >> 63)
43 h[1] ^= 0xc200000000000000UL;
44 }
45
internal_aes_gcm_set_key(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * enc_key)46 void internal_aes_gcm_set_key(struct internal_aes_gcm_state *state,
47 const struct internal_aes_gcm_key *enc_key)
48 {
49 uint64_t k[2] = { 0 };
50 uint64_t h[2] = { 0 };
51
52 crypto_aes_enc_block(enc_key->data, sizeof(enc_key->data),
53 enc_key->rounds, state->ctr, k);
54
55 ghash_reflect(state->ghash_key.h, k);
56
57 internal_aes_gcm_gfmul(k, k, h);
58 ghash_reflect(state->ghash_key.h2, h);
59
60 internal_aes_gcm_gfmul(k, h, h);
61 ghash_reflect(state->ghash_key.h3, h);
62
63 internal_aes_gcm_gfmul(k, h, h);
64 ghash_reflect(state->ghash_key.h4, h);
65 }
66
pmull_ghash_update(int num_blocks,uint64_t dg[2],const uint8_t * src,const struct internal_ghash_key * ghash_key,const uint8_t * head)67 static void pmull_ghash_update(int num_blocks, uint64_t dg[2],
68 const uint8_t *src,
69 const struct internal_ghash_key *ghash_key,
70 const uint8_t *head)
71 {
72 #ifdef CFG_HWSUPP_PMULT_64
73 pmull_ghash_update_p64(num_blocks, dg, src, ghash_key, head);
74 #else
75 pmull_ghash_update_p8(num_blocks, dg, src, ghash_key, head);
76 #endif
77 }
78
internal_aes_gcm_ghash_update(struct internal_aes_gcm_state * state,const void * head,const void * data,size_t num_blocks)79 void internal_aes_gcm_ghash_update(struct internal_aes_gcm_state *state,
80 const void *head, const void *data,
81 size_t num_blocks)
82 {
83 uint32_t vfp_state;
84 uint64_t dg[2];
85
86 get_be_block(dg, state->hash_state);
87
88 vfp_state = thread_kernel_enable_vfp();
89
90 pmull_ghash_update(num_blocks, dg, data, &state->ghash_key, head);
91
92 thread_kernel_disable_vfp(vfp_state);
93
94 put_be_block(state->hash_state, dg);
95 }
96
encrypt_pl(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,uint64_t dg[2],const uint8_t * src,size_t num_blocks,uint8_t * dst)97 static void encrypt_pl(struct internal_aes_gcm_state *state,
98 const struct internal_aes_gcm_key *ek, uint64_t dg[2],
99 const uint8_t *src, size_t num_blocks, uint8_t *dst)
100 {
101 void *buf_cryp = state->buf_cryp;
102
103 while (num_blocks) {
104 ce_aes_xor_block(buf_cryp, buf_cryp, src);
105
106 pmull_ghash_update(1, dg, buf_cryp, &state->ghash_key, NULL);
107 memcpy(dst, buf_cryp, TEE_AES_BLOCK_SIZE);
108
109 ce_aes_ecb_encrypt(buf_cryp, (const uint8_t *)state->ctr,
110 (const uint8_t *)ek->data, ek->rounds,
111 1, 1);
112 internal_aes_gcm_inc_ctr(state);
113
114 src += TEE_AES_BLOCK_SIZE;
115 dst += TEE_AES_BLOCK_SIZE;
116 num_blocks--;
117 }
118 }
119
decrypt_pl(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,uint64_t dg[2],const uint8_t * src,size_t num_blocks,uint8_t * dst)120 static void decrypt_pl(struct internal_aes_gcm_state *state,
121 const struct internal_aes_gcm_key *ek, uint64_t dg[2],
122 const uint8_t *src, size_t num_blocks, uint8_t *dst)
123 {
124 while (num_blocks) {
125 ce_aes_ctr_encrypt(dst, src, (const uint8_t *)ek->data,
126 ek->rounds, 1, (uint8_t *)state->ctr, 1);
127 pmull_ghash_update(1, dg, src, &state->ghash_key, NULL);
128
129 src += TEE_AES_BLOCK_SIZE;
130 dst += TEE_AES_BLOCK_SIZE;
131 num_blocks--;
132 }
133 }
134
135 #ifdef ARM64
update_payload_2block(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,uint64_t dg[2],TEE_OperationMode mode,const void * src,size_t num_blocks,void * dst)136 static void update_payload_2block(struct internal_aes_gcm_state *state,
137 const struct internal_aes_gcm_key *ek,
138 uint64_t dg[2], TEE_OperationMode mode,
139 const void *src, size_t num_blocks, void *dst)
140 {
141 assert(num_blocks && !(num_blocks % 2));
142
143 if (mode == TEE_MODE_ENCRYPT) {
144 uint8_t ks[sizeof(state->buf_cryp) * 2] = { 0 };
145
146 /*
147 * ks holds the encrypted counters of the next two blocks.
148 * pmull_gcm_encrypt() uses this to encrypt the first two
149 * blocks. When pmull_gcm_encrypt() returns is ks updated
150 * with the encrypted counters of the next two blocks. As
151 * we're only keeping one of these blocks we throw away
152 * block number two consequently decreases the counter by
153 * one.
154 */
155 memcpy(ks, state->buf_cryp, sizeof(state->buf_cryp));
156
157 pmull_gcm_load_round_keys(ek->data, ek->rounds);
158 pmull_gcm_encrypt_block(ks + sizeof(state->buf_cryp),
159 (uint8_t *)state->ctr, ek->rounds);
160 internal_aes_gcm_inc_ctr(state);
161 pmull_gcm_encrypt(num_blocks, dg, dst, src, &state->ghash_key,
162 state->ctr, NULL, ek->rounds, ks);
163 memcpy(state->buf_cryp, ks, TEE_AES_BLOCK_SIZE);
164 internal_aes_gcm_dec_ctr(state);
165 } else {
166 pmull_gcm_decrypt(num_blocks, dg, dst, src, &state->ghash_key,
167 state->ctr, ek->data, ek->rounds);
168 }
169 }
170
171 /* Overriding the __weak function */
172 void
internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,TEE_OperationMode mode,const void * src,size_t num_blocks,void * dst)173 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
174 const struct internal_aes_gcm_key *ek,
175 TEE_OperationMode mode, const void *src,
176 size_t num_blocks, void *dst)
177 {
178 size_t nb = ROUNDDOWN(num_blocks, 2);
179 uint32_t vfp_state = 0;
180 uint64_t dg[2] = { 0 };
181
182 get_be_block(dg, state->hash_state);
183 vfp_state = thread_kernel_enable_vfp();
184
185 /*
186 * pmull_gcm_encrypt() and pmull_gcm_decrypt() can only handle
187 * blocks in multiples of two.
188 */
189 if (nb)
190 update_payload_2block(state, ek, dg, mode, src, nb, dst);
191
192 if (nb != num_blocks) {
193 /* There's a final block */
194 const void *s = (const uint8_t *)src + nb * TEE_AES_BLOCK_SIZE;
195 void *d = (uint8_t *)dst + nb * TEE_AES_BLOCK_SIZE;
196
197 if (mode == TEE_MODE_ENCRYPT)
198 encrypt_pl(state, ek, dg, s, 1, d);
199 else
200 decrypt_pl(state, ek, dg, s, 1, d);
201 }
202
203 thread_kernel_disable_vfp(vfp_state);
204 put_be_block(state->hash_state, dg);
205 }
206 #endif /*ARM64*/
207
208 #ifdef ARM32
209 /* Overriding the __weak function */
210 void
internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,TEE_OperationMode mode,const void * src,size_t num_blocks,void * dst)211 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
212 const struct internal_aes_gcm_key *ek,
213 TEE_OperationMode mode, const void *src,
214 size_t num_blocks, void *dst)
215 {
216 uint64_t dg[2] = { 0 };
217 uint32_t vfp_state = 0;
218
219 assert(!state->buf_pos && num_blocks);
220 get_be_block(dg, state->hash_state);
221 vfp_state = thread_kernel_enable_vfp();
222
223 if (mode == TEE_MODE_ENCRYPT)
224 encrypt_pl(state, ek, dg, src, num_blocks, dst);
225 else
226 decrypt_pl(state, ek, dg, src, num_blocks, dst);
227
228 thread_kernel_disable_vfp(vfp_state);
229 put_be_block(state->hash_state, dg);
230 }
231 #endif
232