1 /**************************************************************************//**
2 *
3 * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
4 *
5 * SPDX-License-Identifier: Apache-2.0
6 *
7 * Change Logs:
8 * Date Author Notes
9 * 2020-12-4 Wayne First version
10 *
11 ******************************************************************************/
12
13 #include <rtconfig.h>
14
15 #if defined(BSP_USING_CRYPTO) && defined(RT_USING_HWCRYPTO)
16
17 #include <rtdevice.h>
18 #include <rtdbg.h>
19 #include <board.h>
20 #include "NuMicro.h"
21 #include "drv_sys.h"
22
23 #include <nu_bitutil.h>
24
25 /* Private typedef --------------------------------------------------------------*/
26
27 typedef struct
28 {
29 uint8_t *pu8SHATempBuf;
30 uint32_t u32SHATempBufLen;
31 uint32_t u32DMAMode;
32 uint32_t u32BlockSize;
33 } S_SHA_CONTEXT;
34
35 /* Private functions ------------------------------------------------------------*/
36 static rt_err_t nu_hwcrypto_create(struct rt_hwcrypto_ctx *ctx);
37 static void nu_hwcrypto_destroy(struct rt_hwcrypto_ctx *ctx);
38 static rt_err_t nu_hwcrypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src);
39 static void nu_hwcrypto_reset(struct rt_hwcrypto_ctx *ctx);
40
41 /* Private variables ------------------------------------------------------------*/
42 static const struct rt_hwcrypto_ops nu_hwcrypto_ops =
43 {
44 .create = nu_hwcrypto_create,
45 .destroy = nu_hwcrypto_destroy,
46 .copy = nu_hwcrypto_clone,
47 .reset = nu_hwcrypto_reset,
48 };
49
50 /* Crypto engine operation ------------------------------------------------------------*/
51
52 #define NU_HWCRYPTO_AES_NAME "nu_AES"
53 #define NU_HWCRYPTO_SHA_NAME "nu_SHA"
54 #define NU_HWCRYPTO_PRNG_NAME "nu_PRNG"
55
56 static struct rt_mutex s_AES_mutex;
57 static struct rt_mutex s_SHA_mutex;
58 static struct rt_mutex s_PRNG_mutex;
59
nu_aes_crypt_run(rt_bool_t bEncrypt,uint32_t u32OpMode,uint8_t * pu8Key,uint32_t u32KeySize,uint8_t * pu8IV,uint8_t * pu8InData,uint8_t * pu8OutData,uint32_t u32DataLen)60 static rt_err_t nu_aes_crypt_run(
61 rt_bool_t bEncrypt,
62 uint32_t u32OpMode,
63 uint8_t *pu8Key,
64 uint32_t u32KeySize,
65 uint8_t *pu8IV,
66 uint8_t *pu8InData,
67 uint8_t *pu8OutData,
68 uint32_t u32DataLen
69 )
70 {
71 uint32_t au32SwapKey[8];
72 uint32_t au32SwapIV[4];
73 rt_err_t result;
74
75 au32SwapKey[0] = nu_get32_be(&pu8Key[0]);
76 au32SwapKey[1] = nu_get32_be(&pu8Key[4]);
77 au32SwapKey[2] = nu_get32_be(&pu8Key[8]);
78 au32SwapKey[3] = nu_get32_be(&pu8Key[12]);
79
80 if ((u32KeySize == AES_KEY_SIZE_192) || (u32KeySize == AES_KEY_SIZE_256))
81 {
82 au32SwapKey[4] = nu_get32_be(&pu8Key[16]);
83 au32SwapKey[5] = nu_get32_be(&pu8Key[20]);
84 }
85
86 if (u32KeySize == AES_KEY_SIZE_256)
87 {
88 au32SwapKey[6] = nu_get32_be(&pu8Key[24]);
89 au32SwapKey[7] = nu_get32_be(&pu8Key[28]);
90 }
91
92 au32SwapIV[0] = nu_get32_be(&pu8IV[0]);
93 au32SwapIV[1] = nu_get32_be(&pu8IV[4]);
94 au32SwapIV[2] = nu_get32_be(&pu8IV[8]);
95 au32SwapIV[3] = nu_get32_be(&pu8IV[12]);
96
97 result = rt_mutex_take(&s_AES_mutex, RT_WAITING_FOREVER);
98 RT_ASSERT(result == RT_EOK);
99
100 AES_Open(CRPT, bEncrypt, u32OpMode, u32KeySize, AES_IN_OUT_SWAP);
101 AES_SetKey(CRPT, (uint32_t *)&au32SwapKey[0], u32KeySize);
102 AES_SetInitVect(CRPT, (uint32_t *)au32SwapIV);
103
104 /* Setup AES DMA Description */
105 AES_SetDMATransfer(CRPT, (uint32_t)pu8InData, (uint32_t)pu8OutData, u32DataLen);
106
107 #if defined(BSP_USING_MMU)
108 /* Writeback data in dcache to memory before transferring. */
109 {
110 /* Flush Src buffer into memory. */
111 if (pu8InData)
112 mmu_clean_invalidated_dcache((uint32_t)pu8InData, u32DataLen);
113
114 /* Flush Dst buffer into memory. */
115 if (pu8OutData)
116 mmu_clean_invalidated_dcache((uint32_t)pu8OutData, u32DataLen);
117 }
118 #endif
119
120 /* Clear AES interrupt status */
121 AES_CLR_INT_FLAG(CRPT);
122
123 /* Start AES encryption/decryption */
124 AES_Start(CRPT, CRYPTO_DMA_ONE_SHOT);
125
126 /* Wait done */
127 while (!(CRPT->INTSTS & CRPT_INTEN_AESIEN_Msk)) {};
128
129 if ((u32DataLen % 16) && (CRPT->AES_STS & (CRPT_AES_STS_OUTBUFEMPTY_Msk | CRPT_AES_STS_INBUFEMPTY_Msk)))
130 rt_kprintf("AES WARNING - AES Data length(%d) is not enough. -> %d \n", u32DataLen, RT_ALIGN(u32DataLen, 16));
131 else if (CRPT->INTSTS & (CRPT_INTSTS_AESEIF_Msk) || (CRPT->AES_STS & (CRPT_AES_STS_BUSERR_Msk | CRPT_AES_STS_CNTERR_Msk)))
132 rt_kprintf("AES ERROR - CRPT->INTSTS-%08x, CRPT->AES_STS-%08x\n", CRPT->INTSTS, CRPT->AES_STS);
133
134 /* Clear AES interrupt status */
135 AES_CLR_INT_FLAG(CRPT);
136
137 result = rt_mutex_release(&s_AES_mutex);
138 RT_ASSERT(result == RT_EOK);
139
140 return RT_EOK;
141 }
142
143 //Using PRNG instead of TRNG
nu_prng_open(uint32_t u32Seed)144 static void nu_prng_open(uint32_t u32Seed)
145 {
146 rt_err_t result;
147
148 result = rt_mutex_take(&s_PRNG_mutex, RT_WAITING_FOREVER);
149 RT_ASSERT(result == RT_EOK);
150
151 //Open PRNG 64 bits
152 PRNG_Open(CRPT, PRNG_KEY_SIZE_64, PRNG_SEED_RELOAD, u32Seed);
153
154 result = rt_mutex_release(&s_PRNG_mutex);
155 RT_ASSERT(result == RT_EOK);
156 }
157
nu_prng_run(void)158 static rt_uint32_t nu_prng_run(void)
159 {
160 uint32_t au32RNGValue[2];
161 rt_err_t result;
162
163 result = rt_mutex_take(&s_PRNG_mutex, RT_WAITING_FOREVER);
164 RT_ASSERT(result == RT_EOK);
165
166 PRNG_Start(CRPT);
167 while ((CRPT->PRNG_CTL & CRPT_PRNG_CTL_BUSY_Msk)) {};
168
169 /* Clear PRNG interrupt status */
170 PRNG_CLR_INT_FLAG(CRPT);
171
172 PRNG_Read(CRPT, &au32RNGValue[0]);
173
174 result = rt_mutex_release(&s_PRNG_mutex);
175 RT_ASSERT(result == RT_EOK);
176
177 return au32RNGValue[0] ^ au32RNGValue[1];
178 }
179
nu_aes_crypt(struct hwcrypto_symmetric * symmetric_ctx,struct hwcrypto_symmetric_info * symmetric_info)180 static rt_err_t nu_aes_crypt(struct hwcrypto_symmetric *symmetric_ctx, struct hwcrypto_symmetric_info *symmetric_info)
181 {
182 uint32_t u32AESOpMode;
183 uint32_t u32AESKeySize;
184 unsigned char *in, *out;
185 unsigned char in_align_flag = 0;
186 unsigned char out_align_flag = 0;
187 unsigned char iv_temp[16];
188 RT_ASSERT(symmetric_ctx != RT_NULL);
189 RT_ASSERT(symmetric_info != RT_NULL);
190
191 if ((symmetric_info->length % 4) != 0)
192 {
193 return -RT_EINVAL;
194 }
195
196 //Checking key length
197 if (symmetric_ctx->key_bitlen == 128)
198 {
199 u32AESKeySize = AES_KEY_SIZE_128;
200 }
201 else if (symmetric_ctx->key_bitlen == 192)
202 {
203 u32AESKeySize = AES_KEY_SIZE_192;
204 }
205 else if (symmetric_ctx->key_bitlen == 256)
206 {
207 u32AESKeySize = AES_KEY_SIZE_256;
208 }
209 else
210 {
211 return -RT_EINVAL;
212 }
213
214 //Select AES operation mode
215 switch (symmetric_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
216 {
217 case HWCRYPTO_TYPE_AES_ECB:
218 u32AESOpMode = AES_MODE_ECB;
219 break;
220 case HWCRYPTO_TYPE_AES_CBC:
221 u32AESOpMode = AES_MODE_CBC;
222 break;
223 case HWCRYPTO_TYPE_AES_CFB:
224 u32AESOpMode = AES_MODE_CFB;
225 break;
226 case HWCRYPTO_TYPE_AES_OFB:
227 u32AESOpMode = AES_MODE_OFB;
228 break;
229 case HWCRYPTO_TYPE_AES_CTR:
230 u32AESOpMode = AES_MODE_CTR;
231 break;
232 default :
233 return -RT_ERROR;
234 }
235
236 in = (unsigned char *)symmetric_info->in;
237 out = (unsigned char *)symmetric_info->out;
238
239 //Checking in/out data buffer address not alignment or out of SRAM
240 if (((rt_uint32_t)in % 4) != 0)
241 {
242 in = rt_malloc(symmetric_info->length);
243 if (in == RT_NULL)
244 {
245 LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
246 return -RT_ENOMEM;
247 }
248
249 rt_memcpy(in, symmetric_info->in, symmetric_info->length);
250 in_align_flag = 1;
251 }
252
253 if (((rt_uint32_t)out % 4) != 0)
254 {
255 out = rt_malloc(symmetric_info->length);
256 if (out == RT_NULL)
257 {
258 if (in_align_flag)
259 rt_free(in);
260 LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
261 return -RT_ENOMEM;
262 }
263
264 out_align_flag = 1;
265 }
266
267 if ((u32AESOpMode == AES_MODE_CBC) && (symmetric_info->mode == HWCRYPTO_MODE_DECRYPT))
268 {
269 uint32_t loop;
270
271 loop = (symmetric_info->length - 1) / 16;
272 rt_memcpy(iv_temp, in + (loop * 16), 16);
273 }
274
275 nu_aes_crypt_run(symmetric_info->mode == HWCRYPTO_MODE_ENCRYPT ? TRUE : FALSE, u32AESOpMode, symmetric_ctx->key, u32AESKeySize, symmetric_ctx->iv, in, out, symmetric_info->length);
276
277 if (u32AESOpMode == AES_MODE_CBC)
278 {
279 if (symmetric_info->mode == HWCRYPTO_MODE_DECRYPT)
280 {
281 rt_memcpy(symmetric_ctx->iv, iv_temp, 16);
282 }
283 else
284 {
285 uint32_t loop;
286
287 loop = (symmetric_info->length - 1) / 16;
288 rt_memcpy(symmetric_ctx->iv, out + (loop * 16), 16);
289 }
290 }
291
292 if (out_align_flag)
293 {
294 rt_memcpy(symmetric_info->out, out, symmetric_info->length);
295 rt_free(out);
296 }
297
298 if (in_align_flag)
299 {
300 rt_free(in);
301 }
302
303 return RT_EOK;
304 }
305
SHABlockUpdate(uint32_t u32OpMode,uint32_t u32SrcAddr,uint32_t u32Len,uint32_t u32Mode)306 static void SHABlockUpdate(uint32_t u32OpMode, uint32_t u32SrcAddr, uint32_t u32Len, uint32_t u32Mode)
307 {
308 SHA_Open(CRPT, u32OpMode, SHA_IN_OUT_SWAP, 0);
309
310 //Setup SHA DMA
311 SHA_SetDMATransfer(CRPT, u32SrcAddr, u32Len);
312
313 if (u32Mode == CRYPTO_DMA_FIRST)
314 {
315 u32Mode = CRYPTO_DMA_CONTINUE;
316 }
317
318 #if defined(BSP_USING_MMU)
319 /* Writeback data in dcache to memory before transferring. */
320 {
321 /* Flush Src buffer into memory. */
322 if (u32SrcAddr)
323 mmu_clean_invalidated_dcache(u32SrcAddr, u32Len);
324 }
325 #endif
326
327 //Start SHA
328 SHA_CLR_INT_FLAG(CRPT);
329 SHA_Start(CRPT, u32Mode);
330
331 /* Wait done */
332 while (!(CRPT->INTSTS & CRPT_INTSTS_HMACIF_Msk)) {};
333
334 if (CRPT->INTSTS & (CRPT_INTSTS_HMACEIF_Msk) || (CRPT->HMAC_STS & (CRPT_HMAC_STS_DMAERR_Msk)))
335 rt_kprintf("SHA ERROR - CRPT->INTSTS-%08x, CRPT->HMAC_STS-%08x\n", CRPT->INTSTS, CRPT->HMAC_STS);
336
337 /* Clear SHA interrupt status */
338 SHA_CLR_INT_FLAG(CRPT);
339 }
340
nu_sha_hash_run(S_SHA_CONTEXT * psSHACtx,uint32_t u32OpMode,uint8_t * pu8InData,uint32_t u32DataLen)341 static rt_err_t nu_sha_hash_run(
342 S_SHA_CONTEXT *psSHACtx,
343 uint32_t u32OpMode,
344 uint8_t *pu8InData,
345 uint32_t u32DataLen
346 )
347 {
348 rt_err_t result;
349
350 RT_ASSERT(psSHACtx != RT_NULL);
351 RT_ASSERT(pu8InData != RT_NULL);
352
353 result = rt_mutex_take(&s_SHA_mutex, RT_WAITING_FOREVER);
354 RT_ASSERT(result == RT_EOK);
355
356 uint8_t *pu8SrcAddr = (uint8_t *)pu8InData;
357 uint32_t u32CopyLen = 0;
358
359 while ((psSHACtx->u32SHATempBufLen + u32DataLen) > psSHACtx->u32BlockSize)
360 {
361 if (psSHACtx->pu8SHATempBuf)
362 {
363 if (psSHACtx->u32SHATempBufLen == psSHACtx->u32BlockSize)
364 {
365 //Trigger SHA block update
366 SHABlockUpdate(u32OpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32BlockSize, psSHACtx->u32DMAMode);
367 psSHACtx->u32DMAMode = CRYPTO_DMA_CONTINUE;
368 //free SHATempBuff
369 rt_free(psSHACtx->pu8SHATempBuf);
370 psSHACtx->pu8SHATempBuf = NULL;
371 psSHACtx->u32SHATempBufLen = 0;
372 continue;
373 }
374 else
375 {
376 u32CopyLen = psSHACtx->u32BlockSize - psSHACtx->u32SHATempBufLen;
377 if (u32DataLen < u32CopyLen)
378 u32CopyLen = u32DataLen;
379 rt_memcpy(psSHACtx->pu8SHATempBuf + psSHACtx->u32SHATempBufLen, pu8SrcAddr, u32CopyLen);
380 psSHACtx->u32SHATempBufLen += u32CopyLen;
381 pu8SrcAddr += u32CopyLen;
382 u32DataLen -= u32CopyLen;
383 continue;
384 }
385 }
386
387 if ((uint32_t) pu8SrcAddr & 3) //address not aligned 4
388 {
389 psSHACtx->pu8SHATempBuf = rt_malloc(psSHACtx->u32BlockSize);
390
391 if (psSHACtx->pu8SHATempBuf == RT_NULL)
392 {
393 LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, psSHACtx->u32BlockSize);
394 result = rt_mutex_release(&s_SHA_mutex);
395 RT_ASSERT(result == RT_EOK);
396 return -RT_ENOMEM;
397 }
398
399 rt_memcpy(psSHACtx->pu8SHATempBuf, pu8SrcAddr, psSHACtx->u32BlockSize);
400 psSHACtx->u32SHATempBufLen = psSHACtx->u32BlockSize;
401 pu8SrcAddr += psSHACtx->u32BlockSize;
402 u32DataLen -= psSHACtx->u32BlockSize;
403 continue;
404 }
405
406 //Trigger SHA block update
407 SHABlockUpdate(u32OpMode, (uint32_t)pu8SrcAddr, psSHACtx->u32BlockSize, psSHACtx->u32DMAMode);
408 psSHACtx->u32DMAMode = CRYPTO_DMA_CONTINUE;
409
410 pu8SrcAddr += psSHACtx->u32BlockSize;
411 u32DataLen -= psSHACtx->u32BlockSize;
412 }
413
414 if (u32DataLen)
415 {
416 if (psSHACtx->pu8SHATempBuf == NULL)
417 {
418 psSHACtx->pu8SHATempBuf = rt_malloc(psSHACtx->u32BlockSize);
419
420 if (psSHACtx->pu8SHATempBuf == RT_NULL)
421 {
422 LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, psSHACtx->u32BlockSize);
423 result = rt_mutex_release(&s_SHA_mutex);
424 RT_ASSERT(result == RT_EOK);
425 return -RT_ENOMEM;
426 }
427
428 psSHACtx->u32SHATempBufLen = 0;
429 }
430
431 rt_memcpy(psSHACtx->pu8SHATempBuf, pu8SrcAddr, u32DataLen);
432 psSHACtx->u32SHATempBufLen += u32DataLen;
433 }
434
435 result = rt_mutex_release(&s_SHA_mutex);
436 RT_ASSERT(result == RT_EOK);
437
438 return RT_EOK;
439 }
440
nu_sha_update(struct hwcrypto_hash * hash_ctx,const rt_uint8_t * in,rt_size_t length)441 static rt_err_t nu_sha_update(struct hwcrypto_hash *hash_ctx, const rt_uint8_t *in, rt_size_t length)
442 {
443 uint32_t u32SHAOpMode;
444 unsigned char *nu_in;
445 unsigned char in_align_flag = 0;
446 RT_ASSERT(hash_ctx != RT_NULL);
447 RT_ASSERT(in != RT_NULL);
448
449 //Select SHA operation mode
450 switch (hash_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
451 {
452 case HWCRYPTO_TYPE_SHA1:
453 u32SHAOpMode = SHA_MODE_SHA1;
454 break;
455 case HWCRYPTO_TYPE_SHA224:
456 u32SHAOpMode = SHA_MODE_SHA224;
457 break;
458 case HWCRYPTO_TYPE_SHA256:
459 u32SHAOpMode = SHA_MODE_SHA256;
460 break;
461 case HWCRYPTO_TYPE_SHA384:
462 u32SHAOpMode = SHA_MODE_SHA384;
463 break;
464 case HWCRYPTO_TYPE_SHA512:
465 u32SHAOpMode = SHA_MODE_SHA512;
466 break;
467 default :
468 return -RT_ERROR;
469 }
470
471 nu_in = (unsigned char *)in;
472
473 //Checking in data buffer address not alignment or out of SRAM
474 if (((rt_uint32_t)nu_in % 4) != 0)
475 {
476 nu_in = rt_malloc(length);
477 if (nu_in == RT_NULL)
478 {
479 LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, length);
480 return -RT_ENOMEM;
481 }
482
483 rt_memcpy(nu_in, in, length);
484 in_align_flag = 1;
485 }
486
487 nu_sha_hash_run(hash_ctx->parent.contex, u32SHAOpMode, nu_in, length);
488
489 if (in_align_flag)
490 {
491 rt_free(nu_in);
492 }
493
494 return RT_EOK;
495 }
496
nu_sha_finish(struct hwcrypto_hash * hash_ctx,rt_uint8_t * out,rt_size_t length)497 static rt_err_t nu_sha_finish(struct hwcrypto_hash *hash_ctx, rt_uint8_t *out, rt_size_t length)
498 {
499 unsigned char *nu_out;
500 unsigned char out_align_flag = 0;
501 uint32_t u32SHAOpMode;
502 S_SHA_CONTEXT *psSHACtx = RT_NULL;
503 RT_ASSERT(hash_ctx != RT_NULL);
504 RT_ASSERT(out != RT_NULL);
505
506 psSHACtx = hash_ctx->parent.contex;
507
508 //Check SHA Hash value buffer length
509 switch (hash_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
510 {
511 case HWCRYPTO_TYPE_SHA1:
512 u32SHAOpMode = SHA_MODE_SHA1;
513 if (length < 5UL)
514 {
515 return -RT_EINVAL;
516 }
517 break;
518 case HWCRYPTO_TYPE_SHA224:
519 u32SHAOpMode = SHA_MODE_SHA224;
520 if (length < 7UL)
521 {
522 return -RT_EINVAL;
523 }
524 break;
525 case HWCRYPTO_TYPE_SHA256:
526 u32SHAOpMode = SHA_MODE_SHA256;
527 if (length < 8UL)
528 {
529 return -RT_EINVAL;
530 }
531 break;
532 case HWCRYPTO_TYPE_SHA384:
533 u32SHAOpMode = SHA_MODE_SHA384;
534 if (length < 12UL)
535 {
536 return -RT_EINVAL;
537 }
538 break;
539 case HWCRYPTO_TYPE_SHA512:
540 u32SHAOpMode = SHA_MODE_SHA512;
541 if (length < 16UL)
542 {
543 return -RT_EINVAL;
544 }
545 break;
546 default :
547 return -RT_ERROR;
548 }
549
550 nu_out = (unsigned char *)out;
551
552 //Checking out data buffer address alignment or not
553 if (((rt_uint32_t)nu_out % 4) != 0)
554 {
555 nu_out = rt_malloc(length);
556 if (nu_out == RT_NULL)
557 {
558 LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, length);
559 return -RT_ENOMEM;
560 }
561
562 out_align_flag = 1;
563 }
564
565 if (psSHACtx->pu8SHATempBuf)
566 {
567 if (psSHACtx->u32DMAMode == CRYPTO_DMA_FIRST)
568 SHABlockUpdate(u32SHAOpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32SHATempBufLen, CRYPTO_DMA_ONE_SHOT);
569 else
570 SHABlockUpdate(u32SHAOpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32SHATempBufLen, CRYPTO_DMA_LAST);
571
572 //free SHATempBuf
573 rt_free(psSHACtx->pu8SHATempBuf);
574 psSHACtx->pu8SHATempBuf = RT_NULL;
575 psSHACtx->u32SHATempBufLen = 0;
576 }
577 else
578 {
579 SHABlockUpdate(u32SHAOpMode, (uint32_t)NULL, 0, CRYPTO_DMA_LAST);
580 }
581
582 SHA_Read(CRPT, (uint32_t *)nu_out);
583
584 if (out_align_flag)
585 {
586 rt_memcpy(out, nu_out, length);
587 rt_free(nu_out);
588 }
589
590 return RT_EOK;
591 }
592
nu_prng_rand(struct hwcrypto_rng * ctx)593 static rt_uint32_t nu_prng_rand(struct hwcrypto_rng *ctx)
594 {
595 return nu_prng_run();
596 }
597
598 static const struct hwcrypto_symmetric_ops nu_aes_ops =
599 {
600 .crypt = nu_aes_crypt,
601 };
602
603 static const struct hwcrypto_hash_ops nu_sha_ops =
604 {
605 .update = nu_sha_update,
606 .finish = nu_sha_finish,
607 };
608
609 /* PRNG operation ------------------------------------------------------------*/
610 static const struct hwcrypto_rng_ops nu_rng_ops =
611 {
612 .update = nu_prng_rand,
613 };
614
615 /* Register crypto interface ----------------------------------------------------------*/
nu_hwcrypto_create(struct rt_hwcrypto_ctx * ctx)616 static rt_err_t nu_hwcrypto_create(struct rt_hwcrypto_ctx *ctx)
617 {
618 rt_err_t res = RT_EOK;
619 RT_ASSERT(ctx != RT_NULL);
620
621 switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
622 {
623
624 case HWCRYPTO_TYPE_AES:
625 {
626 ctx->contex = RT_NULL;
627 //Setup AES operation
628 ((struct hwcrypto_symmetric *)ctx)->ops = &nu_aes_ops;
629 break;
630 }
631
632 case HWCRYPTO_TYPE_SHA1:
633 case HWCRYPTO_TYPE_SHA2:
634 {
635 ctx->contex = rt_malloc(sizeof(S_SHA_CONTEXT));
636
637 if (ctx->contex == RT_NULL)
638 return -RT_ERROR;
639
640 rt_memset(ctx->contex, 0, sizeof(S_SHA_CONTEXT));
641 //Setup SHA2 operation
642 ((struct hwcrypto_hash *)ctx)->ops = &nu_sha_ops;
643 break;
644 }
645
646 case HWCRYPTO_TYPE_RNG:
647 {
648 ctx->contex = RT_NULL;
649 ((struct hwcrypto_rng *)ctx)->ops = &nu_rng_ops;
650 #if defined(NU_PRNG_USE_SEED)
651 nu_prng_open(NU_PRNG_SEED_VALUE);
652 #else
653 nu_prng_open(rt_tick_get());
654 #endif
655 break;
656 }
657
658 default:
659 res = -RT_ERROR;
660 break;
661 }
662
663 nu_hwcrypto_reset(ctx);
664
665 return res;
666 }
667
nu_hwcrypto_destroy(struct rt_hwcrypto_ctx * ctx)668 static void nu_hwcrypto_destroy(struct rt_hwcrypto_ctx *ctx)
669 {
670 RT_ASSERT(ctx != RT_NULL);
671
672 if (ctx->contex)
673 rt_free(ctx->contex);
674 }
675
nu_hwcrypto_clone(struct rt_hwcrypto_ctx * des,const struct rt_hwcrypto_ctx * src)676 static rt_err_t nu_hwcrypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src)
677 {
678 rt_err_t res = RT_EOK;
679 RT_ASSERT(des != RT_NULL);
680 RT_ASSERT(src != RT_NULL);
681
682 if (des->contex && src->contex)
683 {
684 rt_memcpy(des->contex, src->contex, sizeof(struct rt_hwcrypto_ctx));
685 }
686 else
687 return -RT_EINVAL;
688 return res;
689 }
690
nu_hwcrypto_reset(struct rt_hwcrypto_ctx * ctx)691 static void nu_hwcrypto_reset(struct rt_hwcrypto_ctx *ctx)
692 {
693 switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
694 {
695 case HWCRYPTO_TYPE_RNG:
696 {
697 #if defined(NU_PRNG_USE_SEED)
698 nu_prng_open(NU_PRNG_SEED_VALUE);
699 #else
700 nu_prng_open(rt_tick_get());
701 #endif
702 break;
703 }
704 case HWCRYPTO_TYPE_SHA1:
705 case HWCRYPTO_TYPE_SHA2:
706 {
707 S_SHA_CONTEXT *psSHACtx = (S_SHA_CONTEXT *)ctx->contex;
708
709 if (psSHACtx->pu8SHATempBuf)
710 {
711 rt_free(psSHACtx->pu8SHATempBuf);
712 }
713
714 psSHACtx->pu8SHATempBuf = RT_NULL;
715 psSHACtx->u32SHATempBufLen = 0;
716 psSHACtx->u32DMAMode = CRYPTO_DMA_FIRST;
717
718 if ((ctx->type == HWCRYPTO_TYPE_SHA384) || (ctx->type == HWCRYPTO_TYPE_SHA512))
719 {
720 psSHACtx->u32BlockSize = 128;
721 }
722 else
723 {
724 psSHACtx->u32BlockSize = 64;
725 }
726 break;
727 }
728
729 default:
730 break;
731 }
732 }
733
734 /* Init and register nu_hwcrypto_dev */
735
nu_hwcrypto_device_init(void)736 int nu_hwcrypto_device_init(void)
737 {
738 rt_err_t result;
739 static struct rt_hwcrypto_device nu_hwcrypto_dev;
740
741 nu_hwcrypto_dev.ops = &nu_hwcrypto_ops;
742 nu_hwcrypto_dev.id = 0;
743 nu_hwcrypto_dev.user_data = &nu_hwcrypto_dev;
744
745 nu_sys_ipclk_enable(CRYPTOCKEN);
746 nu_sys_ip_reset(CRYPTORST);
747
748 /* init cipher mutex */
749 #if defined(RT_HWCRYPTO_USING_AES)
750 result = rt_mutex_init(&s_AES_mutex, NU_HWCRYPTO_AES_NAME, RT_IPC_FLAG_PRIO);
751 RT_ASSERT(result == RT_EOK);
752 AES_ENABLE_INT(CRPT);
753 #endif
754
755 #if defined(RT_HWCRYPTO_USING_SHA1) || defined(RT_HWCRYPTO_USING_SHA2)
756 result = rt_mutex_init(&s_SHA_mutex, NU_HWCRYPTO_SHA_NAME, RT_IPC_FLAG_PRIO);
757 RT_ASSERT(result == RT_EOK);
758 SHA_ENABLE_INT(CRPT);
759 #endif
760
761 #if defined(RT_HWCRYPTO_USING_RNG)
762 result = rt_mutex_init(&s_PRNG_mutex, NU_HWCRYPTO_PRNG_NAME, RT_IPC_FLAG_PRIO);
763 RT_ASSERT(result == RT_EOK);
764 #endif
765
766 /* register hwcrypto operation */
767 result = rt_hwcrypto_register(&nu_hwcrypto_dev, RT_HWCRYPTO_DEFAULT_NAME);
768 RT_ASSERT(result == RT_EOK);
769
770 return 0;
771 }
772 INIT_DEVICE_EXPORT(nu_hwcrypto_device_init);
773
774 #endif //#if defined(BSP_USING_CRYPTO) && defined(RT_USING_HWCRYPTO)
775