1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4 */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <initcall.h>
11 #include <io.h>
12 #include <kernel/boot.h>
13 #include <kernel/delay.h>
14 #include <kernel/dt.h>
15 #include <kernel/mutex.h>
16 #include <libfdt.h>
17 #include <mm/core_memprot.h>
18 #include <stdint.h>
19 #include <stm32_util.h>
20 #include <string.h>
21 #include <utee_defines.h>
22 #include <util.h>
23
24 #include "stm32_cryp.h"
25 #include "common.h"
26
27 #define INT8_BIT 8U
28 #define AES_BLOCK_SIZE_BIT 128U
29 #define AES_BLOCK_SIZE (AES_BLOCK_SIZE_BIT / INT8_BIT)
30 #define AES_BLOCK_NB_U32 (AES_BLOCK_SIZE / sizeof(uint32_t))
31 #define DES_BLOCK_SIZE_BIT 64U
32 #define DES_BLOCK_SIZE (DES_BLOCK_SIZE_BIT / INT8_BIT)
33 #define DES_BLOCK_NB_U32 (DES_BLOCK_SIZE / sizeof(uint32_t))
34 #define MAX_BLOCK_SIZE_BIT AES_BLOCK_SIZE_BIT
35 #define MAX_BLOCK_SIZE AES_BLOCK_SIZE
36 #define MAX_BLOCK_NB_U32 AES_BLOCK_NB_U32
37 #define AES_KEYSIZE_128 16U
38 #define AES_KEYSIZE_192 24U
39 #define AES_KEYSIZE_256 32U
40
41 /* CRYP control register */
42 #define _CRYP_CR 0x0U
43 /* CRYP status register */
44 #define _CRYP_SR 0x04U
45 /* CRYP data input register */
46 #define _CRYP_DIN 0x08U
47 /* CRYP data output register */
48 #define _CRYP_DOUT 0x0CU
49 /* CRYP DMA control register */
50 #define _CRYP_DMACR 0x10U
51 /* CRYP interrupt mask set/clear register */
52 #define _CRYP_IMSCR 0x14U
53 /* CRYP raw interrupt status register */
54 #define _CRYP_RISR 0x18U
55 /* CRYP masked interrupt status register */
56 #define _CRYP_MISR 0x1CU
57 /* CRYP key registers */
58 #define _CRYP_K0LR 0x20U
59 #define _CRYP_K0RR 0x24U
60 #define _CRYP_K1LR 0x28U
61 #define _CRYP_K1RR 0x2CU
62 #define _CRYP_K2LR 0x30U
63 #define _CRYP_K2RR 0x34U
64 #define _CRYP_K3LR 0x38U
65 #define _CRYP_K3RR 0x3CU
66 /* CRYP initialization vector registers */
67 #define _CRYP_IV0LR 0x40U
68 #define _CRYP_IV0RR 0x44U
69 #define _CRYP_IV1LR 0x48U
70 #define _CRYP_IV1RR 0x4CU
71 /* CRYP context swap GCM-CCM registers */
72 #define _CRYP_CSGCMCCM0R 0x50U
73 #define _CRYP_CSGCMCCM1R 0x54U
74 #define _CRYP_CSGCMCCM2R 0x58U
75 #define _CRYP_CSGCMCCM3R 0x5CU
76 #define _CRYP_CSGCMCCM4R 0x60U
77 #define _CRYP_CSGCMCCM5R 0x64U
78 #define _CRYP_CSGCMCCM6R 0x68U
79 #define _CRYP_CSGCMCCM7R 0x6CU
80 /* CRYP context swap GCM registers */
81 #define _CRYP_CSGCM0R 0x70U
82 #define _CRYP_CSGCM1R 0x74U
83 #define _CRYP_CSGCM2R 0x78U
84 #define _CRYP_CSGCM3R 0x7CU
85 #define _CRYP_CSGCM4R 0x80U
86 #define _CRYP_CSGCM5R 0x84U
87 #define _CRYP_CSGCM6R 0x88U
88 #define _CRYP_CSGCM7R 0x8CU
89 /* CRYP hardware configuration register */
90 #define _CRYP_HWCFGR 0x3F0U
91 /* CRYP HW version register */
92 #define _CRYP_VERR 0x3F4U
93 /* CRYP identification */
94 #define _CRYP_IPIDR 0x3F8U
95 /* CRYP HW magic ID */
96 #define _CRYP_MID 0x3FCU
97
98 #define CRYP_TIMEOUT_US 1000000U
99 #define TIMEOUT_US_1MS 1000U
100
101 /* CRYP control register fields */
102 #define _CRYP_CR_RESET_VALUE 0x0U
103 #define _CRYP_CR_NPBLB_MSK GENMASK_32(23, 20)
104 #define _CRYP_CR_NPBLB_OFF 20U
105 #define _CRYP_CR_GCM_CCMPH_MSK GENMASK_32(17, 16)
106 #define _CRYP_CR_GCM_CCMPH_OFF 16U
107 #define _CRYP_CR_GCM_CCMPH_INIT 0U
108 #define _CRYP_CR_GCM_CCMPH_HEADER 1U
109 #define _CRYP_CR_GCM_CCMPH_PAYLOAD 2U
110 #define _CRYP_CR_GCM_CCMPH_FINAL 3U
111 #define _CRYP_CR_CRYPEN BIT(15)
112 #define _CRYP_CR_FFLUSH BIT(14)
113 #define _CRYP_CR_KEYSIZE_MSK GENMASK_32(9, 8)
114 #define _CRYP_CR_KEYSIZE_OFF 8U
115 #define _CRYP_CR_KSIZE_128 0U
116 #define _CRYP_CR_KSIZE_192 1U
117 #define _CRYP_CR_KSIZE_256 2U
118 #define _CRYP_CR_DATATYPE_MSK GENMASK_32(7, 6)
119 #define _CRYP_CR_DATATYPE_OFF 6U
120 #define _CRYP_CR_DATATYPE_NONE 0U
121 #define _CRYP_CR_DATATYPE_HALF_WORD 1U
122 #define _CRYP_CR_DATATYPE_BYTE 2U
123 #define _CRYP_CR_DATATYPE_BIT 3U
124 #define _CRYP_CR_ALGOMODE_MSK (BIT(19) | GENMASK_32(5, 3))
125 #define _CRYP_CR_ALGOMODE_OFF 3U
126 #define _CRYP_CR_ALGOMODE_TDES_ECB 0x0U
127 #define _CRYP_CR_ALGOMODE_TDES_CBC 0x1U
128 #define _CRYP_CR_ALGOMODE_DES_ECB 0x2U
129 #define _CRYP_CR_ALGOMODE_DES_CBC 0x3U
130 #define _CRYP_CR_ALGOMODE_AES_ECB 0x4U
131 #define _CRYP_CR_ALGOMODE_AES_CBC 0x5U
132 #define _CRYP_CR_ALGOMODE_AES_CTR 0x6U
133 #define _CRYP_CR_ALGOMODE_AES 0x7U
134 #define _CRYP_CR_ALGOMODE_AES_GCM BIT(16)
135 #define _CRYP_CR_ALGOMODE_AES_CCM (BIT(16) | BIT(0))
136 #define _CRYP_CR_ALGODIR BIT(2)
137 #define _CRYP_CR_ALGODIR_ENC 0U
138 #define _CRYP_CR_ALGODIR_DEC BIT(2)
139
140 /* CRYP status register fields */
141 #define _CRYP_SR_BUSY BIT(4)
142 #define _CRYP_SR_OFFU BIT(3)
143 #define _CRYP_SR_OFNE BIT(2)
144 #define _CRYP_SR_IFNF BIT(1)
145 #define _CRYP_SR_IFEM BIT(0)
146
147 /* CRYP DMA control register fields */
148 #define _CRYP_DMACR_DOEN BIT(1)
149 #define _CRYP_DMACR_DIEN BIT(0)
150
151 /* CRYP interrupt fields */
152 #define _CRYP_I_OUT BIT(1)
153 #define _CRYP_I_IN BIT(0)
154
155 /* CRYP hardware configuration register fields */
156 #define _CRYP_HWCFGR_CFG1_MSK GENMASK_32(3, 0)
157 #define _CRYP_HWCFGR_CFG1_OFF 0U
158 #define _CRYP_HWCFGR_CFG2_MSK GENMASK_32(7, 4)
159 #define _CRYP_HWCFGR_CFG2_OFF 4U
160 #define _CRYP_HWCFGR_CFG3_MSK GENMASK_32(11, 8)
161 #define _CRYP_HWCFGR_CFG3_OFF 8U
162 #define _CRYP_HWCFGR_CFG4_MSK GENMASK_32(15, 12)
163 #define _CRYP_HWCFGR_CFG4_OFF 12U
164
165 /* CRYP HW version register */
166 #define _CRYP_VERR_MSK GENMASK_32(7, 0)
167 #define _CRYP_VERR_OFF 0U
168
169 /*
170 * Macro to manage bit manipulation when we work on local variable
171 * before writing only once to the real register.
172 */
173 #define CLRBITS(v, bits) ((v) &= ~(bits))
174 #define SETBITS(v, bits) ((v) |= (bits))
175
176 #define IS_ALGOMODE(cr, mod) \
177 (((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \
178 _CRYP_CR_ALGOMODE_OFF))
179
180 #define SET_ALGOMODE(mod, cr) \
181 clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \
182 _CRYP_CR_ALGOMODE_OFF))
183
184 #define GET_ALGOMODE(cr) \
185 (((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF)
186
187 #define TOBE32(x) TEE_U32_BSWAP(x)
188 #define FROMBE32(x) TEE_U32_BSWAP(x)
189
190 static struct stm32_cryp_platdata cryp_pdata;
191 static struct mutex cryp_lock = MUTEX_INITIALIZER;
192
clrsetbits(uint32_t * v,uint32_t mask,uint32_t bits)193 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits)
194 {
195 *v = (*v & ~mask) | bits;
196 }
197
algo_mode_needs_iv(uint32_t cr)198 static bool algo_mode_needs_iv(uint32_t cr)
199 {
200 return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) &&
201 !IS_ALGOMODE(cr, AES_ECB);
202 }
203
algo_mode_is_ecb_cbc(uint32_t cr)204 static bool algo_mode_is_ecb_cbc(uint32_t cr)
205 {
206 return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR;
207 }
208
algo_mode_is_aes(uint32_t cr)209 static bool algo_mode_is_aes(uint32_t cr)
210 {
211 return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >=
212 _CRYP_CR_ALGOMODE_AES_ECB;
213 }
214
is_decrypt(uint32_t cr)215 static bool is_decrypt(uint32_t cr)
216 {
217 return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC;
218 }
219
is_encrypt(uint32_t cr)220 static bool is_encrypt(uint32_t cr)
221 {
222 return !is_decrypt(cr);
223 }
224
does_need_npblb(uint32_t cr)225 static bool does_need_npblb(uint32_t cr)
226 {
227 return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) ||
228 (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr));
229 }
230
wait_sr_bits(vaddr_t base,uint32_t bits)231 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits)
232 {
233 uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
234
235 while ((io_read32(base + _CRYP_SR) & bits) != bits)
236 if (timeout_elapsed(timeout_ref))
237 break;
238
239 if ((io_read32(base + _CRYP_SR) & bits) != bits)
240 return TEE_ERROR_BUSY;
241
242 return TEE_SUCCESS;
243 }
244
wait_end_busy(vaddr_t base)245 static TEE_Result wait_end_busy(vaddr_t base)
246 {
247 uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
248
249 while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
250 if (timeout_elapsed(timeout_ref))
251 break;
252
253 if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
254 return TEE_ERROR_BUSY;
255
256 return TEE_SUCCESS;
257 }
258
wait_end_enable(vaddr_t base)259 static TEE_Result wait_end_enable(vaddr_t base)
260 {
261 uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
262
263 while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
264 if (timeout_elapsed(timeout_ref))
265 break;
266
267 if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
268 return TEE_ERROR_BUSY;
269
270 return TEE_SUCCESS;
271 }
272
write_align_block(struct stm32_cryp_context * ctx,uint32_t * data)273 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx,
274 uint32_t *data)
275 {
276 TEE_Result res = TEE_SUCCESS;
277 unsigned int i = 0;
278
279 res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF);
280 if (res)
281 return res;
282
283 for (i = 0; i < ctx->block_u32; i++) {
284 /* No need to htobe() as we configure the HW to swap bytes */
285 io_write32(ctx->base + _CRYP_DIN, data[i]);
286 }
287
288 return TEE_SUCCESS;
289 }
290
write_block(struct stm32_cryp_context * ctx,uint8_t * data)291 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx,
292 uint8_t *data)
293 {
294 if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
295 uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
296
297 memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t));
298 return write_align_block(ctx, data_u32);
299 }
300
301 return write_align_block(ctx, (void *)data);
302 }
303
read_align_block(struct stm32_cryp_context * ctx,uint32_t * data)304 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx,
305 uint32_t *data)
306 {
307 TEE_Result res = TEE_SUCCESS;
308 unsigned int i = 0;
309
310 res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE);
311 if (res)
312 return res;
313
314 for (i = 0; i < ctx->block_u32; i++) {
315 /* No need to htobe() as we configure the HW to swap bytes */
316 data[i] = io_read32(ctx->base + _CRYP_DOUT);
317 }
318
319 return TEE_SUCCESS;
320 }
321
read_block(struct stm32_cryp_context * ctx,uint8_t * data)322 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx,
323 uint8_t *data)
324 {
325 if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
326 TEE_Result res = TEE_SUCCESS;
327 uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
328
329 res = read_align_block(ctx, data_u32);
330 if (res)
331 return res;
332
333 memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t));
334
335 return TEE_SUCCESS;
336 }
337
338 return read_align_block(ctx, (void *)data);
339 }
340
cryp_end(struct stm32_cryp_context * ctx,TEE_Result prev_error)341 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error)
342 {
343 if (prev_error) {
344 if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
345 panic();
346 if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
347 panic();
348 }
349
350 /* Disable the CRYP peripheral */
351 io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
352 }
353
cryp_write_iv(struct stm32_cryp_context * ctx)354 static void cryp_write_iv(struct stm32_cryp_context *ctx)
355 {
356 if (algo_mode_needs_iv(ctx->cr)) {
357 unsigned int i = 0;
358
359 /* Restore the _CRYP_IVRx */
360 for (i = 0; i < ctx->block_u32; i++)
361 io_write32(ctx->base + _CRYP_IV0LR + i *
362 sizeof(uint32_t), ctx->iv[i]);
363 }
364 }
365
cryp_save_suspend(struct stm32_cryp_context * ctx)366 static void cryp_save_suspend(struct stm32_cryp_context *ctx)
367 {
368 unsigned int i = 0;
369
370 if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
371 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
372 ctx->pm_gcmccm[i] = io_read32(ctx->base +
373 _CRYP_CSGCMCCM0R +
374 i * sizeof(uint32_t));
375
376 if (IS_ALGOMODE(ctx->cr, AES_GCM))
377 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
378 ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R +
379 i * sizeof(uint32_t));
380 }
381
cryp_restore_suspend(struct stm32_cryp_context * ctx)382 static void cryp_restore_suspend(struct stm32_cryp_context *ctx)
383 {
384 unsigned int i = 0;
385
386 if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
387 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
388 io_write32(ctx->base + _CRYP_CSGCMCCM0R +
389 i * sizeof(uint32_t), ctx->pm_gcmccm[i]);
390
391 if (IS_ALGOMODE(ctx->cr, AES_GCM))
392 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
393 io_write32(ctx->base + _CRYP_CSGCM0R +
394 i * sizeof(uint32_t), ctx->pm_gcm[i]);
395 }
396
cryp_write_key(struct stm32_cryp_context * ctx)397 static void cryp_write_key(struct stm32_cryp_context *ctx)
398 {
399 vaddr_t reg = 0;
400 int i = 0;
401 uint32_t algo = GET_ALGOMODE(ctx->cr);
402
403 if (algo == _CRYP_CR_ALGOMODE_DES_ECB ||
404 algo == _CRYP_CR_ALGOMODE_DES_CBC)
405 reg = ctx->base + _CRYP_K1RR;
406 else
407 reg = ctx->base + _CRYP_K3RR;
408
409 for (i = ctx->key_size / sizeof(uint32_t) - 1;
410 i >= 0;
411 i--, reg -= sizeof(uint32_t))
412 io_write32(reg, ctx->key[i]);
413 }
414
cryp_prepare_key(struct stm32_cryp_context * ctx)415 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx)
416 {
417 TEE_Result res = TEE_SUCCESS;
418
419 /*
420 * For AES ECB/CBC decryption, key preparation mode must be selected
421 * to populate the key.
422 */
423 if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) ||
424 IS_ALGOMODE(ctx->cr, AES_CBC))) {
425 /* Select Algomode "prepare key" */
426 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
427 _CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF);
428
429 cryp_write_key(ctx);
430
431 /* Enable CRYP */
432 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
433
434 res = wait_end_busy(ctx->base);
435 if (res)
436 return res;
437
438 /* Reset 'real' algomode */
439 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
440 ctx->cr & _CRYP_CR_ALGOMODE_MSK);
441 } else {
442 cryp_write_key(ctx);
443 }
444
445 return TEE_SUCCESS;
446 }
447
save_context(struct stm32_cryp_context * ctx)448 static TEE_Result save_context(struct stm32_cryp_context *ctx)
449 {
450 /* Device should not be in a processing phase */
451 if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY)
452 return TEE_ERROR_BAD_STATE;
453
454 /* Disable the CRYP peripheral */
455 io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
456
457 /* Save CR */
458 ctx->cr = io_read32(ctx->base + _CRYP_CR);
459
460 cryp_save_suspend(ctx);
461
462 /* If algo mode needs to save current IV */
463 if (algo_mode_needs_iv(ctx->cr)) {
464 unsigned int i = 0;
465
466 /* Save IV */
467 for (i = 0; i < ctx->block_u32; i++)
468 ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i *
469 sizeof(uint32_t));
470 }
471
472 return TEE_SUCCESS;
473 }
474
475 /* To resume the processing of a message */
restore_context(struct stm32_cryp_context * ctx)476 static TEE_Result restore_context(struct stm32_cryp_context *ctx)
477 {
478 TEE_Result res = TEE_SUCCESS;
479
480 /* IP should be disabled */
481 if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) {
482 DMSG("Device is still enabled");
483 return TEE_ERROR_BAD_STATE;
484 }
485
486 /* Restore the _CRYP_CR */
487 io_write32(ctx->base + _CRYP_CR, ctx->cr);
488
489 /* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */
490 res = cryp_prepare_key(ctx);
491 if (res)
492 return res;
493
494 cryp_restore_suspend(ctx);
495
496 cryp_write_iv(ctx);
497
498 /* Flush internal fifo */
499 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH);
500
501 /* Enable the CRYP peripheral */
502 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
503
504 return TEE_SUCCESS;
505 }
506
507 /*
508 * Translate a byte index in an array of BE uint32_t into the index of same
509 * byte in the corresponding LE uint32_t array.
510 */
be_index(size_t index)511 static size_t be_index(size_t index)
512 {
513 return (index & ~0x3) + 3 - (index & 0x3);
514 }
515
ccm_first_context(struct stm32_cryp_context * ctx)516 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx)
517 {
518 TEE_Result res = TEE_SUCCESS;
519 uint32_t b0[AES_BLOCK_NB_U32] = { 0 };
520 uint8_t *iv = (uint8_t *)ctx->iv;
521 size_t l = 0;
522 size_t i = 15;
523
524 /* IP should be disabled */
525 if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN)
526 return TEE_ERROR_BAD_STATE;
527
528 /* Write the _CRYP_CR */
529 io_write32(ctx->base + _CRYP_CR, ctx->cr);
530
531 /* Write key */
532 res = cryp_prepare_key(ctx);
533 if (res)
534 return res;
535
536 /* Save full IV that will be b0 */
537 memcpy(b0, iv, sizeof(b0));
538
539 /*
540 * Update IV to become CTR0/1 before setting it.
541 * IV is saved as LE uint32_t[4] as expected by hardware,
542 * but CCM RFC defines bytes to update in a BE array.
543 */
544 /* Set flag bits to 0 (5 higher bits), keep 3 low bits */
545 iv[be_index(0)] &= 0x7;
546 /* Get size of length field (can be from 2 to 8) */
547 l = iv[be_index(0)] + 1;
548 /* Set Q to 0 */
549 for (i = 15; i >= 15 - l + 1; i--)
550 iv[be_index(i)] = 0;
551 /* Save CTR0 */
552 memcpy(ctx->ctr0_ccm, iv, sizeof(b0));
553 /* Increment Q */
554 iv[be_index(15)] |= 0x1;
555
556 cryp_write_iv(ctx);
557
558 /* Enable the CRYP peripheral */
559 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
560
561 res = write_align_block(ctx, b0);
562
563 return res;
564 }
565
do_from_init_to_phase(struct stm32_cryp_context * ctx,uint32_t new_phase)566 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx,
567 uint32_t new_phase)
568 {
569 TEE_Result res = TEE_SUCCESS;
570
571 /*
572 * We didn't run the init phase yet
573 * CCM need a specific restore_context phase for the init phase
574 */
575 if (IS_ALGOMODE(ctx->cr, AES_CCM))
576 res = ccm_first_context(ctx);
577 else
578 res = restore_context(ctx);
579
580 if (res)
581 return res;
582
583 res = wait_end_enable(ctx->base);
584 if (res)
585 return res;
586
587 /* Move to 'new_phase' */
588 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
589 new_phase << _CRYP_CR_GCM_CCMPH_OFF);
590
591 /* Enable the CRYP peripheral (init disabled it) */
592 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
593
594 return TEE_SUCCESS;
595 }
596
do_from_header_to_phase(struct stm32_cryp_context * ctx,uint32_t new_phase)597 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx,
598 uint32_t new_phase)
599 {
600 TEE_Result res = TEE_SUCCESS;
601
602 res = restore_context(ctx);
603 if (res)
604 return res;
605
606 if (ctx->extra_size) {
607 /* Manage unaligned header data before moving to next phase */
608 memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
609 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
610
611 res = write_align_block(ctx, ctx->extra);
612 if (res)
613 return res;
614
615 ctx->assoc_len += (ctx->extra_size) * INT8_BIT;
616 ctx->extra_size = 0;
617 }
618
619 /* Move to 'new_phase' */
620 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
621 new_phase << _CRYP_CR_GCM_CCMPH_OFF);
622
623 return TEE_SUCCESS;
624 }
625
626 /**
627 * @brief Start a AES computation.
628 * @param ctx: CRYP process context
629 * @param is_dec: true if decryption, false if encryption
630 * @param algo: define the algo mode
631 * @param key: pointer to key
632 * @param key_size: key size
633 * @param iv: pointer to initialization vector (unused if algo is ECB)
634 * @param iv_size: iv size
635 * @note this function doesn't access to hardware but stores in ctx the values
636 *
637 * @retval TEE_SUCCESS if OK.
638 */
stm32_cryp_init(struct stm32_cryp_context * ctx,bool is_dec,enum stm32_cryp_algo_mode algo,const void * key,size_t key_size,const void * iv,size_t iv_size)639 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec,
640 enum stm32_cryp_algo_mode algo,
641 const void *key, size_t key_size, const void *iv,
642 size_t iv_size)
643 {
644 unsigned int i = 0;
645 const uint32_t *iv_u32 = NULL;
646 uint32_t local_iv[4] = { 0 };
647 const uint32_t *key_u32 = NULL;
648 uint32_t local_key[8] = { 0 };
649
650 ctx->assoc_len = 0;
651 ctx->load_len = 0;
652 ctx->extra_size = 0;
653 ctx->lock = &cryp_lock;
654
655 ctx->base = io_pa_or_va(&cryp_pdata.base, 1);
656 ctx->cr = _CRYP_CR_RESET_VALUE;
657
658 /* We want buffer to be u32 aligned */
659 if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
660 key_u32 = key;
661 } else {
662 memcpy(local_key, key, key_size);
663 key_u32 = local_key;
664 }
665
666 if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
667 iv_u32 = iv;
668 } else {
669 memcpy(local_iv, iv, iv_size);
670 iv_u32 = local_iv;
671 }
672
673 if (is_dec)
674 SETBITS(ctx->cr, _CRYP_CR_ALGODIR);
675 else
676 CLRBITS(ctx->cr, _CRYP_CR_ALGODIR);
677
678 /* Save algo mode */
679 switch (algo) {
680 case STM32_CRYP_MODE_TDES_ECB:
681 SET_ALGOMODE(TDES_ECB, ctx->cr);
682 break;
683 case STM32_CRYP_MODE_TDES_CBC:
684 SET_ALGOMODE(TDES_CBC, ctx->cr);
685 break;
686 case STM32_CRYP_MODE_DES_ECB:
687 SET_ALGOMODE(DES_ECB, ctx->cr);
688 break;
689 case STM32_CRYP_MODE_DES_CBC:
690 SET_ALGOMODE(DES_CBC, ctx->cr);
691 break;
692 case STM32_CRYP_MODE_AES_ECB:
693 SET_ALGOMODE(AES_ECB, ctx->cr);
694 break;
695 case STM32_CRYP_MODE_AES_CBC:
696 SET_ALGOMODE(AES_CBC, ctx->cr);
697 break;
698 case STM32_CRYP_MODE_AES_CTR:
699 SET_ALGOMODE(AES_CTR, ctx->cr);
700 break;
701 case STM32_CRYP_MODE_AES_GCM:
702 SET_ALGOMODE(AES_GCM, ctx->cr);
703 break;
704 case STM32_CRYP_MODE_AES_CCM:
705 SET_ALGOMODE(AES_CCM, ctx->cr);
706 break;
707 default:
708 return TEE_ERROR_BAD_PARAMETERS;
709 }
710
711 /*
712 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data.
713 * So we won't need to
714 * TOBE32(data) before write to DIN
715 * nor
716 * FROMBE32 after reading from DOUT.
717 */
718 clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK,
719 _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF);
720
721 /*
722 * Configure keysize for AES algorithms
723 * And save block size
724 */
725 if (algo_mode_is_aes(ctx->cr)) {
726 switch (key_size) {
727 case AES_KEYSIZE_128:
728 clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
729 _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF);
730 break;
731 case AES_KEYSIZE_192:
732 clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
733 _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF);
734 break;
735 case AES_KEYSIZE_256:
736 clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
737 _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF);
738 break;
739 default:
740 return TEE_ERROR_BAD_PARAMETERS;
741 }
742
743 /* And set block size */
744 ctx->block_u32 = AES_BLOCK_NB_U32;
745 } else {
746 /* And set DES/TDES block size */
747 ctx->block_u32 = DES_BLOCK_NB_U32;
748 }
749
750 /* Save key in HW order */
751 ctx->key_size = key_size;
752 for (i = 0; i < key_size / sizeof(uint32_t); i++)
753 ctx->key[i] = TOBE32(key_u32[i]);
754
755 /* Save IV */
756 if (algo_mode_needs_iv(ctx->cr)) {
757 if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t))
758 return TEE_ERROR_BAD_PARAMETERS;
759
760 /*
761 * We save IV in the byte order expected by the
762 * IV registers
763 */
764 for (i = 0; i < ctx->block_u32; i++)
765 ctx->iv[i] = TOBE32(iv_u32[i]);
766 }
767
768 /* Reset suspend registers */
769 memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm));
770 memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm));
771
772 return TEE_SUCCESS;
773 }
774
775 /**
776 * @brief Update (or start) a AES authenticate process of
777 * associated data (CCM or GCM).
778 * @param ctx: CRYP process context
779 * @param data: pointer to associated data
780 * @param data_size: data size
781 * @retval TEE_SUCCESS if OK.
782 */
stm32_cryp_update_assodata(struct stm32_cryp_context * ctx,uint8_t * data,size_t data_size)783 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx,
784 uint8_t *data, size_t data_size)
785 {
786 TEE_Result res = TEE_SUCCESS;
787 unsigned int i = 0;
788 uint32_t previous_phase = 0;
789
790 /* If no associated data, nothing to do */
791 if (!data || !data_size)
792 return TEE_SUCCESS;
793
794 mutex_lock(ctx->lock);
795
796 previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
797 _CRYP_CR_GCM_CCMPH_OFF;
798
799 switch (previous_phase) {
800 case _CRYP_CR_GCM_CCMPH_INIT:
801 res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER);
802 break;
803 case _CRYP_CR_GCM_CCMPH_HEADER:
804 /*
805 * Function update_assodata was already called.
806 * We only need to restore the context.
807 */
808 res = restore_context(ctx);
809 break;
810 default:
811 assert(0);
812 res = TEE_ERROR_BAD_STATE;
813 }
814
815 if (res)
816 goto out;
817
818 /* Manage if remaining data from a previous update_assodata call */
819 if (ctx->extra_size &&
820 (ctx->extra_size + data_size >=
821 ctx->block_u32 * sizeof(uint32_t))) {
822 uint32_t block[MAX_BLOCK_NB_U32] = { 0 };
823
824 memcpy(block, ctx->extra, ctx->extra_size);
825 memcpy((uint8_t *)block + ctx->extra_size, data,
826 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
827
828 res = write_align_block(ctx, block);
829 if (res)
830 goto out;
831
832 i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
833 ctx->extra_size = 0;
834 ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
835 }
836
837 while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
838 res = write_block(ctx, data + i);
839 if (res)
840 goto out;
841
842 /* Process next block */
843 i += ctx->block_u32 * sizeof(uint32_t);
844 ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
845 }
846
847 /*
848 * Manage last block if not a block size multiple:
849 * Save remaining data to manage them later (potentially with new
850 * associated data).
851 */
852 if (i < data_size) {
853 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
854 data_size - i);
855 ctx->extra_size += data_size - i;
856 }
857
858 res = save_context(ctx);
859 out:
860 if (res)
861 cryp_end(ctx, res);
862
863 mutex_unlock(ctx->lock);
864
865 return res;
866 }
867
868 /**
869 * @brief Update (or start) a AES authenticate and de/encrypt with
870 * payload data (CCM or GCM).
871 * @param ctx: CRYP process context
872 * @param data_in: pointer to payload
873 * @param data_out: pointer where to save de/encrypted payload
874 * @param data_size: payload size
875 *
876 * @retval TEE_SUCCESS if OK.
877 */
stm32_cryp_update_load(struct stm32_cryp_context * ctx,uint8_t * data_in,uint8_t * data_out,size_t data_size)878 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx,
879 uint8_t *data_in, uint8_t *data_out,
880 size_t data_size)
881 {
882 TEE_Result res = TEE_SUCCESS;
883 unsigned int i = 0;
884 uint32_t previous_phase = 0;
885
886 if (!data_in || !data_size)
887 return TEE_SUCCESS;
888
889 mutex_lock(ctx->lock);
890
891 previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
892 _CRYP_CR_GCM_CCMPH_OFF;
893
894 switch (previous_phase) {
895 case _CRYP_CR_GCM_CCMPH_INIT:
896 res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
897 break;
898 case _CRYP_CR_GCM_CCMPH_HEADER:
899 res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
900 break;
901 case _CRYP_CR_GCM_CCMPH_PAYLOAD:
902 /* new update_load call, we only need to restore context */
903 res = restore_context(ctx);
904 break;
905 default:
906 assert(0);
907 res = TEE_ERROR_BAD_STATE;
908 }
909
910 if (res)
911 goto out;
912
913 /* Manage if incomplete block from a previous update_load call */
914 if (ctx->extra_size &&
915 (ctx->extra_size + data_size >=
916 ctx->block_u32 * sizeof(uint32_t))) {
917 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
918
919 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
920 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
921
922 res = write_align_block(ctx, ctx->extra);
923 if (res)
924 goto out;
925
926 res = read_align_block(ctx, block_out);
927 if (res)
928 goto out;
929
930 memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size,
931 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
932
933 i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
934 ctx->extra_size = 0;
935
936 ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
937 }
938
939 while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
940 res = write_block(ctx, data_in + i);
941 if (res)
942 goto out;
943
944 res = read_block(ctx, data_out + i);
945 if (res)
946 goto out;
947
948 /* Process next block */
949 i += ctx->block_u32 * sizeof(uint32_t);
950 ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
951 }
952
953 res = save_context(ctx);
954 if (res)
955 goto out;
956
957 /*
958 * Manage last block if not a block size multiple
959 * We saved context,
960 * Complete block with 0 and send to CRYP to get {en,de}crypted data
961 * Store data to resend as last block in final()
962 * or to complete next update_load() to get correct tag.
963 */
964 if (i < data_size) {
965 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
966 size_t prev_extra_size = ctx->extra_size;
967
968 /* Re-enable the CRYP peripheral */
969 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
970
971 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
972 data_size - i);
973 ctx->extra_size += data_size - i;
974 memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
975 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
976
977 res = write_align_block(ctx, ctx->extra);
978 if (res)
979 goto out;
980
981 res = read_align_block(ctx, block_out);
982 if (res)
983 goto out;
984
985 memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size,
986 data_size - i);
987
988 /* Disable the CRYP peripheral */
989 io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
990 }
991
992 out:
993 if (res)
994 cryp_end(ctx, res);
995
996 mutex_unlock(ctx->lock);
997
998 return res;
999 }
1000
1001 /**
1002 * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
1003 * @param ctx: CRYP process context
1004 * @param tag: pointer where to save the tag
1005 * @param data_size: tag size
1006 *
1007 * @retval TEE_SUCCESS if OK.
1008 */
stm32_cryp_final(struct stm32_cryp_context * ctx,uint8_t * tag,size_t tag_size)1009 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag,
1010 size_t tag_size)
1011 {
1012 TEE_Result res = TEE_SUCCESS;
1013 uint32_t tag_u32[4] = { 0 };
1014 uint32_t previous_phase = 0;
1015
1016 mutex_lock(ctx->lock);
1017
1018 previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
1019 _CRYP_CR_GCM_CCMPH_OFF;
1020
1021 switch (previous_phase) {
1022 case _CRYP_CR_GCM_CCMPH_INIT:
1023 res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1024 break;
1025 case _CRYP_CR_GCM_CCMPH_HEADER:
1026 res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1027 break;
1028 case _CRYP_CR_GCM_CCMPH_PAYLOAD:
1029 res = restore_context(ctx);
1030 if (res)
1031 break;
1032
1033 /* Manage if incomplete block from a previous update_load() */
1034 if (ctx->extra_size) {
1035 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1036 size_t sz = ctx->block_u32 * sizeof(uint32_t) -
1037 ctx->extra_size;
1038
1039 if (does_need_npblb(ctx->cr)) {
1040 io_clrsetbits32(ctx->base + _CRYP_CR,
1041 _CRYP_CR_NPBLB_MSK,
1042 sz << _CRYP_CR_NPBLB_OFF);
1043 }
1044
1045 memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz);
1046
1047 res = write_align_block(ctx, ctx->extra);
1048 if (res)
1049 break;
1050
1051 /* Don't care {en,de}crypted data, already saved */
1052 res = read_align_block(ctx, block_out);
1053 if (res)
1054 break;
1055
1056 ctx->load_len += (ctx->extra_size * INT8_BIT);
1057 ctx->extra_size = 0;
1058 }
1059
1060 /* Move to final phase */
1061 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
1062 _CRYP_CR_GCM_CCMPH_FINAL <<
1063 _CRYP_CR_GCM_CCMPH_OFF);
1064 break;
1065 default:
1066 assert(0);
1067 res = TEE_ERROR_BAD_STATE;
1068 }
1069
1070 if (res)
1071 goto out;
1072
1073 if (IS_ALGOMODE(ctx->cr, AES_GCM)) {
1074 /* No need to htobe() as we configure the HW to swap bytes */
1075 io_write32(ctx->base + _CRYP_DIN, 0U);
1076 io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len);
1077 io_write32(ctx->base + _CRYP_DIN, 0U);
1078 io_write32(ctx->base + _CRYP_DIN, ctx->load_len);
1079 } else if (IS_ALGOMODE(ctx->cr, AES_CCM)) {
1080 /* No need to htobe() in this phase */
1081 res = write_align_block(ctx, ctx->ctr0_ccm);
1082 if (res)
1083 goto out;
1084 }
1085
1086 res = read_align_block(ctx, tag_u32);
1087 if (res)
1088 goto out;
1089
1090 memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
1091
1092 out:
1093 cryp_end(ctx, res);
1094 mutex_unlock(ctx->lock);
1095
1096 return res;
1097 }
1098
1099 /**
1100 * @brief Update (or start) a de/encrypt process.
1101 * @param ctx: CRYP process context
1102 * @param last_block: true if last payload data block
1103 * @param data_in: pointer to payload
1104 * @param data_out: pointer where to save de/encrypted payload
1105 * @param data_size: payload size
1106 *
1107 * @retval TEE_SUCCESS if OK.
1108 */
stm32_cryp_update(struct stm32_cryp_context * ctx,bool last_block,uint8_t * data_in,uint8_t * data_out,size_t data_size)1109 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block,
1110 uint8_t *data_in, uint8_t *data_out,
1111 size_t data_size)
1112 {
1113 TEE_Result res = TEE_SUCCESS;
1114 unsigned int i = 0;
1115
1116 mutex_lock(ctx->lock);
1117
1118 /*
1119 * In CBC and ECB encryption we need to manage specifically last
1120 * 2 blocks if total size in not aligned to a block size.
1121 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to
1122 * know last 2 blocks, if unaligned and call with less than two blocks,
1123 * return TEE_ERROR_BAD_STATE.
1124 */
1125 if (last_block && algo_mode_is_ecb_cbc(ctx->cr) &&
1126 is_encrypt(ctx->cr) &&
1127 (ROUNDDOWN(data_size, ctx->block_u32 * sizeof(uint32_t)) !=
1128 data_size)) {
1129 if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) {
1130 /*
1131 * If CBC, size of the last part should be at
1132 * least 2*BLOCK_SIZE
1133 */
1134 EMSG("Unexpected last block size");
1135 res = TEE_ERROR_BAD_STATE;
1136 goto out;
1137 }
1138 /*
1139 * Moreover the ECB/CBC specific padding for encrypt is not
1140 * yet implemented, and not used in OPTEE
1141 */
1142 res = TEE_ERROR_NOT_IMPLEMENTED;
1143 goto out;
1144 }
1145
1146 /* Manage remaining CTR mask from previous update call */
1147 if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) {
1148 unsigned int j = 0;
1149 uint8_t *mask = (uint8_t *)ctx->extra;
1150
1151 for (j = 0; j < ctx->extra_size && i < data_size; j++, i++)
1152 data_out[i] = data_in[i] ^ mask[j];
1153
1154 if (j != ctx->extra_size) {
1155 /*
1156 * We didn't consume all saved mask,
1157 * but no more data.
1158 */
1159
1160 /* We save remaining mask and its new size */
1161 memmove(ctx->extra, ctx->extra + j,
1162 ctx->extra_size - j);
1163 ctx->extra_size -= j;
1164
1165 /*
1166 * We don't need to save HW context we didn't
1167 * modify HW state.
1168 */
1169 res = TEE_SUCCESS;
1170 goto out;
1171 }
1172
1173 /* All extra mask consumed */
1174 ctx->extra_size = 0;
1175 }
1176
1177 res = restore_context(ctx);
1178 if (res)
1179 goto out;
1180
1181 while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
1182 /*
1183 * We only write/read one block at a time
1184 * but CRYP use a in (and out) FIFO of 8 * uint32_t
1185 */
1186 res = write_block(ctx, data_in + i);
1187 if (res)
1188 goto out;
1189
1190 res = read_block(ctx, data_out + i);
1191 if (res)
1192 goto out;
1193
1194 /* Process next block */
1195 i += ctx->block_u32 * sizeof(uint32_t);
1196 }
1197
1198 /* Manage last block if not a block size multiple */
1199 if (i < data_size) {
1200 uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 };
1201 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1202
1203 if (!IS_ALGOMODE(ctx->cr, AES_CTR)) {
1204 /*
1205 * Other algorithm than CTR can manage only multiple
1206 * of block_size.
1207 */
1208 res = TEE_ERROR_BAD_PARAMETERS;
1209 goto out;
1210 }
1211
1212 /*
1213 * For CTR we save the generated mask to use it at next
1214 * update call.
1215 */
1216 memcpy(block_in, data_in + i, data_size - i);
1217
1218 res = write_align_block(ctx, block_in);
1219 if (res)
1220 goto out;
1221
1222 res = read_align_block(ctx, block_out);
1223 if (res)
1224 goto out;
1225
1226 memcpy(data_out + i, block_out, data_size - i);
1227
1228 /* Save mask for possibly next call */
1229 ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) -
1230 (data_size - i);
1231 memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1232 ctx->extra_size);
1233 }
1234
1235 if (!last_block)
1236 res = save_context(ctx);
1237
1238 out:
1239 /* If last block or error, end of CRYP process */
1240 if (last_block || res)
1241 cryp_end(ctx, res);
1242
1243 mutex_unlock(ctx->lock);
1244
1245 return res;
1246 }
1247
stm32_cryp_probe(const void * fdt,int node,const void * compt_data __unused)1248 static TEE_Result stm32_cryp_probe(const void *fdt, int node,
1249 const void *compt_data __unused)
1250 {
1251 TEE_Result res = TEE_SUCCESS;
1252 struct dt_node_info dt_cryp = { };
1253 struct rstctrl *rstctrl = NULL;
1254 struct clk *clk = NULL;
1255
1256 _fdt_fill_device_info(fdt, &dt_cryp, node);
1257
1258 if (dt_cryp.reg == DT_INFO_INVALID_REG ||
1259 dt_cryp.reg_size == DT_INFO_INVALID_REG_SIZE)
1260 panic();
1261
1262 res = clk_dt_get_by_index(fdt, node, 0, &clk);
1263 if (res)
1264 return res;
1265
1266 res = rstctrl_dt_get_by_index(fdt, node, 0, &rstctrl);
1267 if (res)
1268 return res;
1269
1270 cryp_pdata.clock = clk;
1271 cryp_pdata.reset = rstctrl;
1272 cryp_pdata.base.pa = dt_cryp.reg;
1273
1274 io_pa_or_va_secure(&cryp_pdata.base, dt_cryp.reg_size);
1275 if (!cryp_pdata.base.va)
1276 panic();
1277
1278 stm32mp_register_secure_periph_iomem(cryp_pdata.base.pa);
1279
1280 if (clk_enable(cryp_pdata.clock))
1281 panic();
1282
1283 if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
1284 panic();
1285
1286 if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
1287 panic();
1288
1289 if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) {
1290 res = stm32_register_authenc();
1291 if (res) {
1292 EMSG("Failed to register to authenc: %#"PRIx32, res);
1293 panic();
1294 }
1295 }
1296
1297 if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1298 res = stm32_register_cipher();
1299 if (res) {
1300 EMSG("Failed to register to cipher: %#"PRIx32, res);
1301 panic();
1302 }
1303 }
1304
1305 return TEE_SUCCESS;
1306 }
1307
1308 static const struct dt_device_match stm32_cryp_match_table[] = {
1309 { .compatible = "st,stm32mp1-cryp" },
1310 { }
1311 };
1312
1313 DEFINE_DT_DRIVER(stm32_cryp_dt_driver) = {
1314 .name = "stm32-cryp",
1315 .match_table = stm32_cryp_match_table,
1316 .probe = stm32_cryp_probe,
1317 };
1318