1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright 2018-2021, 2024 NXP
4  *
5  * Implementation of Cipher functions
6  */
7 #include <caam_cipher.h>
8 #include <caam_common.h>
9 #include <caam_io.h>
10 #include <caam_jr.h>
11 #include <caam_utils_mem.h>
12 #include <caam_utils_status.h>
13 #include <mm/core_memprot.h>
14 #include <string.h>
15 #include <tee/cache.h>
16 #include <tee/tee_cryp_utl.h>
17 #include <utee_defines.h>
18 
19 #include "local.h"
20 
21 /* Local Function declaration */
22 static TEE_Result do_update_streaming(struct drvcrypt_cipher_update *dupdate);
23 static TEE_Result do_update_cipher(struct drvcrypt_cipher_update *dupdate);
24 
25 /*
26  * Constants definition of the AES algorithm
27  */
28 static const struct cipheralg aes_alg[] = {
29 	[TEE_CHAIN_MODE_ECB_NOPAD] = {
30 		.type = OP_ALGO(AES) | ALGO_AAI(AES_ECB),
31 		.size_block = TEE_AES_BLOCK_SIZE,
32 		.size_ctx = 0,
33 		.ctx_offset = 0,
34 		.require_key = NEED_KEY1,
35 		.def_key = { .min = 16, .max = 32, .mod = 8 },
36 		.update = do_update_cipher,
37 	},
38 	[TEE_CHAIN_MODE_CBC_NOPAD] = {
39 		.type = OP_ALGO(AES) | ALGO_AAI(AES_CBC),
40 		.size_block = TEE_AES_BLOCK_SIZE,
41 		.size_ctx = 2 * sizeof(uint64_t),
42 		.ctx_offset = 0,
43 		.require_key = NEED_KEY1 | NEED_IV,
44 		.def_key = { .min = 16, .max = 32, .mod = 8 },
45 		.update = do_update_cipher,
46 	},
47 	[TEE_CHAIN_MODE_CTR] = {
48 		.type = OP_ALGO(AES) | ALGO_AAI(AES_CTR_MOD128),
49 		.size_block = TEE_AES_BLOCK_SIZE,
50 		.size_ctx = 2 * sizeof(uint64_t),
51 		.ctx_offset = 16,
52 		.require_key = NEED_KEY1 | NEED_IV,
53 		.def_key = { .min = 16, .max = 32, .mod = 8 },
54 		.update = do_update_streaming,
55 	},
56 	[TEE_CHAIN_MODE_CTS] = {
57 		.type = 0,
58 	},
59 	[TEE_CHAIN_MODE_XTS] = {
60 		.type = OP_ALGO(AES) | ALGO_AAI(AES_ECB),
61 		.size_block = TEE_AES_BLOCK_SIZE,
62 		.size_ctx = 0,
63 		.ctx_offset = 0,
64 		.require_key = NEED_KEY1 | NEED_KEY2 | NEED_TWEAK,
65 		.def_key = { .min = 16, .max = 32, .mod = 8 },
66 		.update = caam_cipher_update_xts,
67 	},
68 };
69 
70 /*
71  * Constants definition of the DES algorithm
72  */
73 static const struct cipheralg des_alg[] = {
74 	[TEE_CHAIN_MODE_ECB_NOPAD] = {
75 		.type = OP_ALGO(DES) | ALGO_AAI(DES_ECB),
76 		.size_block = TEE_DES_BLOCK_SIZE,
77 		.size_ctx = 0,
78 		.ctx_offset = 0,
79 		.require_key = NEED_KEY1,
80 		.def_key = { .min = 8, .max = 8, .mod = 8 },
81 		.update = do_update_cipher,
82 	},
83 	[TEE_CHAIN_MODE_CBC_NOPAD] = {
84 		.type = OP_ALGO(DES) | ALGO_AAI(DES_CBC),
85 		.size_block = TEE_DES_BLOCK_SIZE,
86 		.size_ctx = sizeof(uint64_t),
87 		.ctx_offset = 0,
88 		.require_key = NEED_KEY1 | NEED_IV,
89 		.def_key = { .min = 8, .max = 8, .mod = 8 },
90 		.update = do_update_cipher,
91 	},
92 };
93 
94 /*
95  * Constants definition of the DES3 algorithm
96  */
97 static const struct cipheralg des3_alg[] = {
98 	[TEE_CHAIN_MODE_ECB_NOPAD] = {
99 		.type = OP_ALGO(3DES) | ALGO_AAI(DES_ECB),
100 		.size_block = TEE_DES_BLOCK_SIZE,
101 		.size_ctx = 0,
102 		.ctx_offset = 0,
103 		.require_key = NEED_KEY1,
104 		.def_key = { .min = 16, .max = 24, .mod = 8 },
105 		.update = do_update_cipher,
106 	},
107 	[TEE_CHAIN_MODE_CBC_NOPAD] = {
108 		/* Triple-DES CBC No Pad */
109 		.type = OP_ALGO(3DES) | ALGO_AAI(DES_CBC),
110 		.size_block = TEE_DES_BLOCK_SIZE,
111 		.size_ctx = sizeof(uint64_t),
112 		.ctx_offset = 0,
113 		.require_key = NEED_KEY1 | NEED_IV,
114 		.def_key = { .min = 16, .max = 24, .mod = 8 },
115 		.update = do_update_cipher,
116 	},
117 };
118 
119 /*
120  * Verify the input key size with the requirements
121  *
122  * @def  Key requirements
123  * @size Key size to verify
124  */
do_check_keysize(const struct caamdefkey * def,size_t size)125 static enum caam_status do_check_keysize(const struct caamdefkey *def,
126 					 size_t size)
127 {
128 	if (size >= def->min && size <= def->max && !(size % def->mod))
129 		return CAAM_NO_ERROR;
130 
131 	return CAAM_BAD_PARAM;
132 }
133 
caam_cipher_block(struct cipherdata * ctx,bool savectx,uint8_t keyid,bool encrypt,struct caamdmaobj * src,struct caamdmaobj * dst)134 enum caam_status caam_cipher_block(struct cipherdata *ctx, bool savectx,
135 				   uint8_t keyid, bool encrypt,
136 				   struct caamdmaobj *src,
137 				   struct caamdmaobj *dst)
138 {
139 	enum caam_status retstatus = CAAM_FAILURE;
140 	struct caam_jobctx jobctx = { };
141 	uint32_t *desc = ctx->descriptor;
142 
143 	caam_desc_init(desc);
144 	caam_desc_add_word(desc, DESC_HEADER(0));
145 
146 	if (keyid == NEED_KEY1) {
147 		/* Build the descriptor */
148 		caam_desc_add_word(desc, LD_KEY_PLAIN(CLASS_1, REG,
149 						      ctx->key1.length));
150 		caam_desc_add_ptr(desc, ctx->key1.paddr);
151 	} else if (keyid == NEED_KEY2) {
152 		/* Build the descriptor */
153 		caam_desc_add_word(desc, LD_KEY_PLAIN(CLASS_1, REG,
154 						      ctx->key2.length));
155 		caam_desc_add_ptr(desc, ctx->key2.paddr);
156 	}
157 
158 	/* If there is a context register load it */
159 	if (ctx->ctx.length && ctx->alg->size_ctx) {
160 		caam_desc_add_word(desc, LD_NOIMM_OFF(CLASS_1, REG_CTX,
161 						      ctx->ctx.length,
162 						      ctx->alg->ctx_offset));
163 		caam_desc_add_ptr(desc, ctx->ctx.paddr);
164 		/* Operation with the direction */
165 		caam_desc_add_word(desc, CIPHER_INIT(ctx->alg->type, encrypt));
166 	} else {
167 		/* Operation with the direction */
168 		caam_desc_add_word(desc,
169 				   CIPHER_INITFINAL(ctx->alg->type, encrypt));
170 	}
171 
172 	/* Load the source data if any */
173 	if (src) {
174 		caam_desc_fifo_load(desc, src, CLASS_1, MSG, LAST_C1);
175 		caam_dmaobj_cache_push(src);
176 	}
177 
178 	/* Store the output data if any */
179 	if (dst) {
180 		caam_desc_fifo_store(desc, dst, MSG_DATA);
181 		caam_dmaobj_cache_push(dst);
182 	}
183 
184 	if (ctx->ctx.length && ctx->alg->size_ctx) {
185 		if (savectx) {
186 			/* Store the context */
187 			caam_desc_add_word(desc,
188 					   ST_NOIMM_OFF(CLASS_1, REG_CTX,
189 							ctx->ctx.length,
190 							ctx->alg->ctx_offset));
191 			caam_desc_add_ptr(desc, ctx->ctx.paddr);
192 		}
193 
194 		/* Ensure Context register data are not in cache */
195 		cache_operation(TEE_CACHEINVALIDATE, ctx->ctx.data,
196 				ctx->ctx.length);
197 	}
198 
199 	CIPHER_DUMPDESC(desc);
200 
201 	jobctx.desc = desc;
202 	retstatus = caam_jr_enqueue(&jobctx, NULL);
203 
204 	if (retstatus != CAAM_NO_ERROR) {
205 		CIPHER_TRACE("CAAM return 0x%08x Status 0x%08" PRIx32,
206 			     retstatus, jobctx.status);
207 		retstatus = CAAM_FAILURE;
208 	}
209 
210 	return retstatus;
211 }
212 
213 /*
214  * Checks if the algorithm @algo is supported and returns the
215  * local algorithm entry in the corresponding cipher array
216  */
get_cipheralgo(uint32_t algo)217 static const struct cipheralg *get_cipheralgo(uint32_t algo)
218 {
219 	unsigned int algo_id = TEE_ALG_GET_MAIN_ALG(algo);
220 	unsigned int algo_md = TEE_ALG_GET_CHAIN_MODE(algo);
221 	const struct cipheralg *ca = NULL;
222 
223 	CIPHER_TRACE("Algo id:%" PRId32 " md:%" PRId32, algo_id, algo_md);
224 
225 	switch (algo_id) {
226 	case TEE_MAIN_ALGO_AES:
227 		if (algo_md < ARRAY_SIZE(aes_alg))
228 			ca = &aes_alg[algo_md];
229 		break;
230 
231 	case TEE_MAIN_ALGO_DES:
232 		if (algo_md < ARRAY_SIZE(des_alg))
233 			ca = &des_alg[algo_md];
234 		break;
235 
236 	case TEE_MAIN_ALGO_DES3:
237 		if (algo_md < ARRAY_SIZE(des3_alg))
238 			ca = &des3_alg[algo_md];
239 		break;
240 
241 	default:
242 		break;
243 	}
244 
245 	if (ca && ca->type)
246 		return ca;
247 
248 	return NULL;
249 }
250 
251 /*
252  * Allocate the SW cipher data context
253  *
254  * @ctx   [out] Caller context variable
255  * @algo  Algorithm ID of the context
256  */
do_allocate(void ** ctx,uint32_t algo)257 static TEE_Result do_allocate(void **ctx, uint32_t algo)
258 {
259 	TEE_Result ret = TEE_ERROR_NOT_IMPLEMENTED;
260 	struct cipherdata *cipherdata = NULL;
261 	const struct cipheralg *alg = NULL;
262 
263 	CIPHER_TRACE("Allocate Algo 0x%" PRIX32 " Context (%p)", algo, ctx);
264 
265 	alg = get_cipheralgo(algo);
266 	if (!alg) {
267 		CIPHER_TRACE("Algorithm not supported");
268 		return TEE_ERROR_NOT_IMPLEMENTED;
269 	}
270 
271 	cipherdata = caam_calloc(sizeof(*cipherdata));
272 	if (!cipherdata) {
273 		CIPHER_TRACE("Allocation Cipher data error");
274 		return TEE_ERROR_OUT_OF_MEMORY;
275 	}
276 
277 	/* Allocate the descriptor */
278 	cipherdata->descriptor = caam_calloc_desc(MAX_DESC_ENTRIES);
279 	if (!cipherdata->descriptor) {
280 		CIPHER_TRACE("Allocation descriptor error");
281 		ret = TEE_ERROR_OUT_OF_MEMORY;
282 		goto out;
283 	}
284 
285 	/* Setup the Algorithm pointer */
286 	cipherdata->alg = alg;
287 
288 	/* Initialize the block buffer */
289 	cipherdata->blockbuf.max = cipherdata->alg->size_block;
290 
291 	*ctx = cipherdata;
292 
293 	return TEE_SUCCESS;
294 
295 out:
296 	caam_free_desc(&cipherdata->descriptor);
297 	caam_free(cipherdata);
298 
299 	return ret;
300 }
301 
302 /*
303  * Free the internal cipher data context
304  *
305  * @ctx    Caller context variable or NULL
306  */
do_free_intern(struct cipherdata * ctx)307 static void do_free_intern(struct cipherdata *ctx)
308 {
309 	CIPHER_TRACE("Free Context (%p)", ctx);
310 
311 	if (ctx) {
312 		/* Free the descriptor */
313 		caam_free_desc(&ctx->descriptor);
314 
315 		/* Free the Key 1  */
316 		caam_free_buf(&ctx->key1);
317 
318 		/* Free the Key 2  */
319 		caam_free_buf(&ctx->key2);
320 
321 		/* Free the Tweak */
322 		caam_free_buf(&ctx->tweak);
323 
324 		/* Free the Context Register */
325 		caam_free_buf(&ctx->ctx);
326 
327 		/* Free Temporary buffer */
328 		caam_free_buf(&ctx->blockbuf.buf);
329 	}
330 }
331 
caam_cipher_free(void * ctx)332 void caam_cipher_free(void *ctx)
333 {
334 	CIPHER_TRACE("Free Context (%p)", ctx);
335 
336 	if (ctx) {
337 		do_free_intern(ctx);
338 		caam_free(ctx);
339 	}
340 }
341 
caam_cipher_copy_state(void * dst_ctx,void * src_ctx)342 void caam_cipher_copy_state(void *dst_ctx, void *src_ctx)
343 {
344 	struct cipherdata *dst = dst_ctx;
345 	struct cipherdata *src = src_ctx;
346 
347 	CIPHER_TRACE("Copy State context (%p) to (%p)", src_ctx, dst_ctx);
348 
349 	dst->alg = src->alg;
350 	dst->encrypt = src->encrypt;
351 
352 	if (src->blockbuf.filled) {
353 		struct caambuf srcdata = {
354 			.data = src->blockbuf.buf.data,
355 			.length = src->blockbuf.filled
356 		};
357 		caam_cpy_block_src(&dst->blockbuf, &srcdata, 0);
358 	}
359 
360 	if (src->key1.length)
361 		caam_cpy_buf(&dst->key1, src->key1.data, src->key1.length);
362 
363 	if (src->key2.length)
364 		caam_cpy_buf(&dst->key2, src->key2.data, src->key2.length);
365 
366 	if (src->ctx.length) {
367 		cache_operation(TEE_CACHEINVALIDATE, src->ctx.data,
368 				src->ctx.length);
369 		caam_cpy_buf(&dst->ctx, src->ctx.data, src->ctx.length);
370 	}
371 
372 	if (src->tweak.length)
373 		caam_cpy_buf(&dst->tweak, src->tweak.data, src->tweak.length);
374 }
375 
caam_cipher_initialize(struct drvcrypt_cipher_init * dinit)376 TEE_Result caam_cipher_initialize(struct drvcrypt_cipher_init *dinit)
377 {
378 	TEE_Result ret = TEE_ERROR_BAD_PARAMETERS;
379 	enum caam_status retstatus = CAAM_FAILURE;
380 	struct cipherdata *cipherdata = dinit->ctx;
381 	const struct cipheralg *alg = NULL;
382 
383 	CIPHER_TRACE("Action %s", dinit->encrypt ? "Encrypt" : "Decrypt");
384 
385 	if (!cipherdata)
386 		return ret;
387 
388 	alg = cipherdata->alg;
389 
390 	/* Check if all required keys are defined */
391 	if (alg->require_key & NEED_KEY1) {
392 		if (!dinit->key1.data || !dinit->key1.length)
393 			goto out;
394 
395 		retstatus = do_check_keysize(&alg->def_key, dinit->key1.length);
396 		if (retstatus != CAAM_NO_ERROR) {
397 			CIPHER_TRACE("Bad Key 1 size");
398 			goto out;
399 		}
400 
401 		/* Copy the key 1 */
402 		retstatus = caam_cpy_buf(&cipherdata->key1, dinit->key1.data,
403 					 dinit->key1.length);
404 		CIPHER_TRACE("Copy Key 1 returned 0x%" PRIx32, retstatus);
405 
406 		if (retstatus != CAAM_NO_ERROR) {
407 			ret = TEE_ERROR_OUT_OF_MEMORY;
408 			goto out;
409 		}
410 	}
411 
412 	if (alg->require_key & NEED_KEY2) {
413 		if (!dinit->key2.data || !dinit->key2.length)
414 			goto out;
415 
416 		retstatus = do_check_keysize(&alg->def_key, dinit->key2.length);
417 		if (retstatus != CAAM_NO_ERROR) {
418 			CIPHER_TRACE("Bad Key 2 size");
419 			goto out;
420 		}
421 
422 		/* Copy the key 2 */
423 		retstatus = caam_cpy_buf(&cipherdata->key2, dinit->key2.data,
424 					 dinit->key2.length);
425 		CIPHER_TRACE("Copy Key 2 returned 0x%" PRIx32, retstatus);
426 
427 		if (retstatus != CAAM_NO_ERROR) {
428 			ret = TEE_ERROR_OUT_OF_MEMORY;
429 			goto out;
430 		}
431 	}
432 
433 	if (alg->require_key & NEED_IV) {
434 		if (!dinit->iv.data || !dinit->iv.length)
435 			goto out;
436 
437 		if (dinit->iv.length != alg->size_ctx) {
438 			CIPHER_TRACE("Bad IV size %zu (expected %" PRId32 ")",
439 				     dinit->iv.length, alg->size_ctx);
440 			goto out;
441 		}
442 
443 		CIPHER_TRACE("Allocate CAAM Context Register (%" PRId32
444 			     " bytes)",
445 			     alg->size_ctx);
446 
447 		/* Copy the IV into the context register */
448 		retstatus = caam_cpy_buf(&cipherdata->ctx, dinit->iv.data,
449 					 dinit->iv.length);
450 		CIPHER_TRACE("Copy IV returned 0x%" PRIx32, retstatus);
451 
452 		if (retstatus != CAAM_NO_ERROR) {
453 			ret = TEE_ERROR_OUT_OF_MEMORY;
454 			goto out;
455 		}
456 	}
457 
458 	if (alg->require_key & NEED_TWEAK) {
459 		/* This is accepted to start with a NULL Tweak */
460 		if (dinit->iv.length) {
461 			if (dinit->iv.length != alg->size_block) {
462 				CIPHER_TRACE("Bad tweak 2 size");
463 				goto out;
464 			}
465 
466 			/* Copy the tweak */
467 			retstatus = caam_cpy_buf(&cipherdata->tweak,
468 						 dinit->iv.data,
469 						 dinit->iv.length);
470 			CIPHER_TRACE("Copy Tweak returned 0x%" PRIx32,
471 				     retstatus);
472 
473 			if (retstatus != CAAM_NO_ERROR) {
474 				ret = TEE_ERROR_OUT_OF_MEMORY;
475 				goto out;
476 			}
477 		} else {
478 			/* Create tweak 0's */
479 			if (!cipherdata->tweak.data) {
480 				/*
481 				 * Allocate the destination buffer and
482 				 * fill it with 0's
483 				 */
484 				ret = caam_calloc_align_buf(&cipherdata->tweak,
485 							    alg->size_block);
486 				if (ret != CAAM_NO_ERROR)
487 					goto out;
488 			} else {
489 				/* Fill it with 0's */
490 				memset(cipherdata->tweak.data, 0,
491 				       cipherdata->tweak.length);
492 			}
493 
494 			/* Push data to physical memory */
495 			cache_operation(TEE_CACHEFLUSH, cipherdata->tweak.data,
496 					cipherdata->tweak.length);
497 		}
498 	}
499 
500 	/* Save the operation direction */
501 	cipherdata->encrypt = dinit->encrypt;
502 	cipherdata->blockbuf.filled = 0;
503 
504 	ret = TEE_SUCCESS;
505 
506 out:
507 	/* Free the internal context in case of error */
508 	if (ret != TEE_SUCCESS)
509 		do_free_intern(cipherdata);
510 
511 	return ret;
512 }
513 
514 /*
515  * Update of the cipher operation in streaming mode, meaning
516  * doing partial intermediate block.
517  * If there is a context, the context is saved only when a
518  * full block is done.
519  * The partial block (if not the last block) is encrypted or
520  * decrypted to return the result and it's saved to be concatened
521  * to next data to rebuild a full block.
522  *
523  * @dupdate  Data update object
524  */
do_update_streaming(struct drvcrypt_cipher_update * dupdate)525 static TEE_Result do_update_streaming(struct drvcrypt_cipher_update *dupdate)
526 {
527 	TEE_Result ret = TEE_ERROR_GENERIC;
528 	enum caam_status retstatus = CAAM_FAILURE;
529 	struct cipherdata *ctx = dupdate->ctx;
530 	struct caamdmaobj src = { };
531 	struct caamdmaobj dst = { };
532 	struct caamblock trash_bck = { };
533 	size_t fullsize = 0;
534 	size_t size_topost = 0;
535 	size_t size_todo = 0;
536 	size_t size_inmade = 0;
537 	size_t size_done = 0;
538 	size_t offset = 0;
539 
540 	CIPHER_TRACE("Length=%zu - %s", dupdate->src.length,
541 		     ctx->encrypt ? "Encrypt" : "Decrypt");
542 
543 	/* Calculate the total data to be handled */
544 	fullsize = ctx->blockbuf.filled + dupdate->src.length;
545 	CIPHER_TRACE("Fullsize %zu", fullsize);
546 	if (fullsize < ctx->alg->size_block) {
547 		size_topost = dupdate->src.length;
548 		goto end_streaming_post;
549 	} else {
550 		size_topost = fullsize % ctx->alg->size_block;
551 		/* Total size that is a cipher block multiple */
552 		size_todo = fullsize - size_topost;
553 		size_inmade = size_todo - ctx->blockbuf.filled;
554 	}
555 
556 	CIPHER_TRACE("FullSize %zu - posted %zu - todo %zu", fullsize,
557 		     size_topost, size_todo);
558 
559 	if (size_todo) {
560 		ret = caam_dmaobj_init_input(&src, dupdate->src.data,
561 					     dupdate->src.length);
562 		if (ret)
563 			goto end_streaming;
564 
565 		ret = caam_dmaobj_init_output(&dst, dupdate->dst.data,
566 					      dupdate->dst.length,
567 					      dupdate->dst.length);
568 		if (ret)
569 			goto end_streaming;
570 
571 		ret = caam_dmaobj_prepare(&src, &dst, ctx->alg->size_block);
572 		if (ret)
573 			goto end_streaming;
574 	}
575 
576 	/*
577 	 * Check first if there is some data saved to complete the
578 	 * buffer.
579 	 */
580 	if (ctx->blockbuf.filled) {
581 		ret = caam_dmaobj_add_first_block(&src, &ctx->blockbuf);
582 		if (ret)
583 			goto end_streaming;
584 
585 		ret = caam_dmaobj_add_first_block(&dst, &ctx->blockbuf);
586 		if (ret)
587 			goto end_streaming;
588 
589 		ctx->blockbuf.filled = 0;
590 	}
591 
592 	size_done = size_todo;
593 	dupdate->dst.length = 0;
594 	for (offset = 0; size_todo;
595 	     offset += size_done, size_todo -= size_done) {
596 		CIPHER_TRACE("Do input %zu bytes (%zu), offset %zu", size_done,
597 			     size_todo, offset);
598 
599 		size_done = size_todo;
600 		ret = caam_dmaobj_sgtbuf_inout_build(&src, &dst, &size_done,
601 						     offset,
602 						     ctx->alg->size_block);
603 		if (ret)
604 			goto end_streaming;
605 
606 		retstatus = caam_cipher_block(ctx, true, NEED_KEY1,
607 					      ctx->encrypt, &src, &dst);
608 
609 		if (retstatus != CAAM_NO_ERROR) {
610 			ret = caam_status_to_tee_result(retstatus);
611 			goto end_streaming;
612 		}
613 
614 		dupdate->dst.length += caam_dmaobj_copy_to_orig(&dst);
615 	}
616 
617 	CIPHER_DUMPBUF("Source", dupdate->src.data, dupdate->src.length);
618 	CIPHER_DUMPBUF("Result", dupdate->dst.data, dupdate->dst.length);
619 
620 end_streaming_post:
621 	if (size_topost) {
622 		/*
623 		 * Save the input data in the block buffer for next operation
624 		 * and prepare the source DMA Object with the overall saved
625 		 * data to generate destination bytes.
626 		 */
627 		struct caambuf cpysrc = {
628 			.data = dupdate->src.data,
629 			.length = dupdate->src.length
630 		};
631 
632 		caam_dmaobj_free(&src);
633 		caam_dmaobj_free(&dst);
634 		CIPHER_TRACE("Save input data %zu bytes (done %zu) - off %zu",
635 			     size_topost, size_inmade, offset);
636 
637 		size_todo = size_topost + ctx->blockbuf.filled;
638 
639 		/*
640 		 * Prepare the destination DMA Object:
641 		 *  - Use given destination parameter bytes to return
642 		 *  - If the previous operation saved data, use a trash
643 		 *    buffer to do the operation but not use unneeded data.
644 		 */
645 		ret = caam_dmaobj_init_output(&dst,
646 					      dupdate->dst.data + size_inmade,
647 					      size_topost, size_topost);
648 		if (ret)
649 			goto end_streaming;
650 
651 		ret = caam_dmaobj_prepare(NULL, &dst, ctx->alg->size_block);
652 		if (ret)
653 			goto end_streaming;
654 
655 		if (ctx->blockbuf.filled) {
656 			/*
657 			 * Because there are some bytes to trash, use
658 			 * a block buffer that will be added to the
659 			 * destination SGT/Buffer structure to do the
660 			 * cipher operation.
661 			 */
662 			ret = caam_alloc_align_buf(&trash_bck.buf,
663 						   ctx->blockbuf.filled);
664 			if (ret != CAAM_NO_ERROR) {
665 				CIPHER_TRACE("Allocation Trash Block error");
666 				goto end_streaming;
667 			}
668 			trash_bck.filled = ctx->blockbuf.filled;
669 
670 			ret = caam_dmaobj_add_first_block(&dst, &trash_bck);
671 			if (ret)
672 				goto end_streaming;
673 		}
674 
675 		retstatus = caam_cpy_block_src(&ctx->blockbuf, &cpysrc,
676 					       size_inmade);
677 		if (retstatus != CAAM_NO_ERROR) {
678 			ret = caam_status_to_tee_result(retstatus);
679 			goto end_streaming;
680 		}
681 
682 		ret = caam_dmaobj_init_input(&src, ctx->blockbuf.buf.data,
683 					     ctx->blockbuf.filled);
684 		if (ret)
685 			goto end_streaming;
686 
687 		ret = caam_dmaobj_prepare(&src, NULL, ctx->alg->size_block);
688 		if (ret)
689 			goto end_streaming;
690 
691 		/*
692 		 * Build input and output DMA Object with the same size.
693 		 */
694 		size_done = size_todo;
695 		ret = caam_dmaobj_sgtbuf_inout_build(&src, &dst, &size_done, 0,
696 						     size_todo);
697 		if (ret)
698 			goto end_streaming;
699 
700 		if (size_todo != size_done) {
701 			CIPHER_TRACE("Invalid end streaming size %zu vs %zu",
702 				     size_done, size_todo);
703 			ret = TEE_ERROR_GENERIC;
704 			goto end_streaming;
705 		}
706 
707 		retstatus = caam_cipher_block(ctx, false, NEED_KEY1,
708 					      ctx->encrypt, &src, &dst);
709 
710 		if (retstatus != CAAM_NO_ERROR) {
711 			ret = caam_status_to_tee_result(retstatus);
712 			goto end_streaming;
713 		}
714 
715 		dupdate->dst.length += caam_dmaobj_copy_to_orig(&dst);
716 
717 		CIPHER_DUMPBUF("Source", ctx->blockbuf.buf.data,
718 			       ctx->blockbuf.filled);
719 		CIPHER_DUMPBUF("Result", dupdate->dst.data + size_inmade,
720 			       size_topost);
721 	}
722 
723 	ret = TEE_SUCCESS;
724 
725 end_streaming:
726 	caam_dmaobj_free(&src);
727 	caam_dmaobj_free(&dst);
728 
729 	/* Free Trash block buffer */
730 	caam_free_buf(&trash_bck.buf);
731 
732 	return ret;
733 }
734 
735 /*
736  * Update of the cipher operation with complete block except
737  * if last block. Last block can be partial block.
738  *
739  * @dupdate  Data update object
740  */
do_update_cipher(struct drvcrypt_cipher_update * dupdate)741 static TEE_Result do_update_cipher(struct drvcrypt_cipher_update *dupdate)
742 {
743 	TEE_Result ret = TEE_ERROR_GENERIC;
744 	enum caam_status retstatus = CAAM_FAILURE;
745 	struct cipherdata *ctx = dupdate->ctx;
746 	struct caamdmaobj src = { };
747 	struct caamdmaobj dst = { };
748 	size_t offset = 0;
749 	size_t size_todo = 0;
750 	size_t size_done = 0;
751 
752 	CIPHER_TRACE("Length=%zu - %s", dupdate->src.length,
753 		     (ctx->encrypt ? "Encrypt" : "Decrypt"));
754 
755 	/*
756 	 * Check the length of the payload/cipher to be at least
757 	 * one or n cipher block.
758 	 */
759 	if (dupdate->src.length < ctx->alg->size_block ||
760 	    dupdate->src.length % ctx->alg->size_block) {
761 		CIPHER_TRACE("Bad payload/cipher size %zu bytes",
762 			     dupdate->src.length);
763 		return TEE_ERROR_BAD_PARAMETERS;
764 	}
765 
766 	ret = caam_dmaobj_init_input(&src, dupdate->src.data,
767 				     dupdate->src.length);
768 	if (ret)
769 		goto end_cipher;
770 
771 	ret = caam_dmaobj_init_output(&dst, dupdate->dst.data,
772 				      dupdate->dst.length, dupdate->dst.length);
773 	if (ret)
774 		goto end_cipher;
775 
776 	ret = caam_dmaobj_prepare(&src, &dst, ctx->alg->size_block);
777 	if (ret)
778 		goto end_cipher;
779 
780 	size_todo = dupdate->src.length;
781 	dupdate->dst.length = 0;
782 	for (offset = 0; size_todo;
783 	     offset += size_done, size_todo -= size_done) {
784 		size_done = size_todo;
785 		CIPHER_TRACE("Do input %zu bytes, offset %zu", size_done,
786 			     offset);
787 		ret = caam_dmaobj_sgtbuf_inout_build(&src, &dst, &size_done,
788 						     offset,
789 						     ctx->alg->size_block);
790 		if (ret)
791 			goto end_cipher;
792 
793 		retstatus = caam_cipher_block(ctx, true, NEED_KEY1,
794 					      ctx->encrypt, &src, &dst);
795 
796 		if (retstatus != CAAM_NO_ERROR) {
797 			ret = caam_status_to_tee_result(retstatus);
798 			goto end_cipher;
799 		}
800 
801 		dupdate->dst.length += caam_dmaobj_copy_to_orig(&dst);
802 	}
803 
804 	ret = TEE_SUCCESS;
805 
806 end_cipher:
807 	caam_dmaobj_free(&src);
808 	caam_dmaobj_free(&dst);
809 
810 	return ret;
811 }
812 
813 /*
814  * Update of the cipher operation. Call the algorithm update
815  * function associated.
816  *
817  * @dupdate  Data update object
818  */
do_update(struct drvcrypt_cipher_update * dupdate)819 static TEE_Result do_update(struct drvcrypt_cipher_update *dupdate)
820 {
821 	struct cipherdata *cipherdata = dupdate->ctx;
822 
823 	return cipherdata->alg->update(dupdate);
824 }
825 
826 /*
827  * Finalize of the cipher operation
828  *
829  * @ctx    Caller context variable or NULL
830  */
do_final(void * ctx __unused)831 static void do_final(void *ctx __unused)
832 {
833 }
834 
835 /*
836  * Registration of the Cipher Driver
837  */
838 static struct drvcrypt_cipher driver_cipher = {
839 	.alloc_ctx = do_allocate,
840 	.free_ctx = caam_cipher_free,
841 	.init = caam_cipher_initialize,
842 	.update = do_update,
843 	.final = do_final,
844 	.copy_state = caam_cipher_copy_state,
845 };
846 
847 /*
848  * Initialize the Cipher module
849  *
850  * @ctrl_addr   Controller base address
851  */
caam_cipher_init(vaddr_t ctrl_addr __unused)852 enum caam_status caam_cipher_init(vaddr_t ctrl_addr __unused)
853 {
854 	enum caam_status retstatus = CAAM_FAILURE;
855 
856 	if (drvcrypt_register_cipher(&driver_cipher) == TEE_SUCCESS)
857 		retstatus = CAAM_NO_ERROR;
858 
859 	return retstatus;
860 }
861