1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2022, Aspeed Technology Inc.
4  */
5 #include <stdint.h>
6 #include <stddef.h>
7 #include <string.h>
8 #include <io.h>
9 #include <util.h>
10 #include <drvcrypt_hash.h>
11 #include <mm/core_mmu.h>
12 #include <mm/core_memprot.h>
13 #include <kernel/delay.h>
14 #include <tee/cache.h>
15 
16 #include "hace_ast2600.h"
17 
18 #define HACE_BASE	0x1e6d0000
19 
20 /* register offsets and bit fields */
21 #define HACE_STS		0x1C
22 #define   HACE_STS_HASH_INT		BIT(9)
23 #define   HACE_STS_HASH_BUSY		BIT(0)
24 #define HACE_HASH_DATA		0x20
25 #define HACE_HASH_DIGEST	0x24
26 #define HACE_HASH_HMAC_KEY	0x28
27 #define HACE_HASH_DATA_LEN	0x2C
28 #define HACE_HASH_CMD		0x30
29 #define   HACE_HASH_CMD_ACCUM		BIT(8)
30 #define   HACE_HASH_CMD_ALG_SHA1	BIT(5)
31 #define   HACE_HASH_CMD_ALG_SHA256	(BIT(6) | BIT(4))
32 #define   HACE_HASH_CMD_ALG_SHA384	(BIT(10) | BIT(6) | BIT(5))
33 #define   HACE_HASH_CMD_ALG_SHA512	(BIT(6) | BIT(5))
34 #define   HACE_HASH_CMD_SHA_BE		BIT(3)
35 
36 /* buffer size based on SHA-512 need */
37 #define HASH_BLK_BUFSZ	128
38 #define HASH_DGT_BUFSZ	64
39 
40 register_phys_mem(MEM_AREA_IO_SEC, HACE_BASE, SMALL_PAGE_SIZE);
41 
42 struct ast2600_hace_ctx {
43 	struct crypto_hash_ctx hash_ctx;
44 	uint32_t cmd;
45 	uint32_t algo;
46 	uint32_t dgt_size;
47 	uint32_t blk_size;
48 	uint32_t pad_size;
49 	uint64_t total[2];
50 
51 	/* DMA memory to interact with HACE */
52 	uint8_t *buf;
53 	uint8_t *digest;
54 };
55 
56 static vaddr_t hace_virt;
57 struct mutex hace_mtx = MUTEX_INITIALIZER;
58 
59 static const uint32_t iv_sha1[8] = {
60 	0x01234567, 0x89abcdef, 0xfedcba98, 0x76543210,
61 	0xf0e1d2c3, 0, 0, 0
62 };
63 
64 static const uint32_t iv_sha256[8] = {
65 	0x67e6096a, 0x85ae67bb, 0x72f36e3c, 0x3af54fa5,
66 	0x7f520e51, 0x8c68059b, 0xabd9831f, 0x19cde05b
67 };
68 
69 static const uint32_t iv_sha384[16] = {
70 	0x5d9dbbcb, 0xd89e05c1, 0x2a299a62, 0x07d57c36,
71 	0x5a015991, 0x17dd7030, 0xd8ec2f15, 0x39590ef7,
72 	0x67263367, 0x310bc0ff, 0x874ab48e, 0x11155868,
73 	0x0d2e0cdb, 0xa78ff964, 0x1d48b547, 0xa44ffabe
74 };
75 
76 static const uint32_t iv_sha512[16] = {
77 	0x67e6096a, 0x08c9bcf3, 0x85ae67bb, 0x3ba7ca84,
78 	0x72f36e3c, 0x2bf894fe, 0x3af54fa5, 0xf1361d5f,
79 	0x7f520e51, 0xd182e6ad, 0x8c68059b, 0x1f6c3e2b,
80 	0xabd9831f, 0x6bbd41fb, 0x19cde05b, 0x79217e13
81 };
82 
ast2600_hace_process(struct crypto_hash_ctx * ctx,const uint8_t * data,size_t len)83 static TEE_Result ast2600_hace_process(struct crypto_hash_ctx *ctx,
84 				       const uint8_t *data, size_t len)
85 {
86 	TEE_Result rc = TEE_ERROR_GENERIC;
87 	uint32_t sts = 0;
88 	uint64_t tref = 0;
89 	paddr_t data_phys = 0;
90 	paddr_t digest_phys = 0;
91 	struct ast2600_hace_ctx *hctx = NULL;
92 
93 	mutex_lock(&hace_mtx);
94 
95 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
96 
97 	sts = io_read32(hace_virt + HACE_STS);
98 	if (sts & HACE_STS_HASH_BUSY) {
99 		rc = TEE_ERROR_BUSY;
100 		goto out;
101 	}
102 
103 	cache_operation(TEE_CACHEFLUSH, (void *)data, len);
104 
105 	data_phys = virt_to_phys((void *)data);
106 	digest_phys = virt_to_phys(hctx->digest);
107 
108 	io_write32(hace_virt + HACE_HASH_DATA, (uint32_t)data_phys);
109 	io_write32(hace_virt + HACE_HASH_DIGEST, (uint32_t)digest_phys);
110 	io_write32(hace_virt + HACE_HASH_HMAC_KEY, (uint32_t)digest_phys);
111 
112 	io_write32(hace_virt + HACE_HASH_DATA_LEN, len);
113 	io_write32(hace_virt + HACE_HASH_CMD, hctx->cmd);
114 
115 	/* poll for completion */
116 	tref = timeout_init_us(1000 + (len >> 3));
117 
118 	do {
119 		sts = io_read32(hace_virt + HACE_STS);
120 		if (timeout_elapsed(tref)) {
121 			rc = TEE_ERROR_TARGET_DEAD;
122 			goto out;
123 		}
124 	} while (!(sts & HACE_STS_HASH_INT));
125 
126 	io_write32(hace_virt + HACE_STS, HACE_STS_HASH_INT);
127 
128 	rc = TEE_SUCCESS;
129 out:
130 	mutex_unlock(&hace_mtx);
131 
132 	return rc;
133 }
134 
ast2600_hace_init(struct crypto_hash_ctx * ctx)135 static TEE_Result ast2600_hace_init(struct crypto_hash_ctx *ctx)
136 {
137 	struct ast2600_hace_ctx *hctx = NULL;
138 
139 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
140 
141 	switch (hctx->algo) {
142 	case TEE_ALG_SHA1:
143 		memcpy(hctx->digest, iv_sha1, sizeof(iv_sha1));
144 		break;
145 	case TEE_ALG_SHA256:
146 		memcpy(hctx->digest, iv_sha256, sizeof(iv_sha256));
147 		break;
148 	case TEE_ALG_SHA384:
149 		memcpy(hctx->digest, iv_sha384, sizeof(iv_sha384));
150 		break;
151 	case TEE_ALG_SHA512:
152 		memcpy(hctx->digest, iv_sha512, sizeof(iv_sha512));
153 		break;
154 	default:
155 		return TEE_ERROR_NOT_SUPPORTED;
156 	}
157 
158 	hctx->total[0] = 0;
159 	hctx->total[1] = 0;
160 
161 	cache_operation(TEE_CACHEFLUSH, hctx->digest, HASH_DGT_BUFSZ);
162 
163 	return TEE_SUCCESS;
164 }
165 
ast2600_hace_update(struct crypto_hash_ctx * ctx,const uint8_t * data,size_t len)166 static TEE_Result ast2600_hace_update(struct crypto_hash_ctx *ctx,
167 				      const uint8_t *data, size_t len)
168 {
169 	TEE_Result rc = TEE_ERROR_GENERIC;
170 	uint32_t left = 0;
171 	uint32_t fill = 0;
172 	size_t blk_size = 0;
173 	struct ast2600_hace_ctx *hctx = NULL;
174 
175 	if (!ctx || !data || !len)
176 		return TEE_ERROR_BAD_PARAMETERS;
177 
178 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
179 
180 	blk_size = hctx->blk_size;
181 
182 	left = hctx->total[0] & (blk_size - 1);
183 	fill = blk_size - left;
184 
185 	hctx->total[0] += len;
186 	if (hctx->total[0] < len)
187 		hctx->total[1]++;
188 
189 	if (left && len >= fill) {
190 		memcpy(hctx->buf + left, data, fill);
191 		rc = ast2600_hace_process(ctx, hctx->buf, blk_size);
192 		if (rc)
193 			return rc;
194 
195 		data += fill;
196 		len -= fill;
197 		left = 0;
198 	}
199 
200 	while (len >= blk_size) {
201 		memcpy(hctx->buf, data, blk_size);
202 		rc = ast2600_hace_process(ctx, hctx->buf, blk_size);
203 		if (rc)
204 			return rc;
205 
206 		data += blk_size;
207 		len -= blk_size;
208 	}
209 
210 	if (len)
211 		memcpy(hctx->buf + left, data, len);
212 
213 	return TEE_SUCCESS;
214 }
215 
ast2600_hace_final(struct crypto_hash_ctx * ctx,uint8_t * digest,size_t len)216 static TEE_Result ast2600_hace_final(struct crypto_hash_ctx *ctx,
217 				     uint8_t *digest, size_t len)
218 {
219 	TEE_Result rc = TEE_ERROR_GENERIC;
220 	uint32_t last = 0;
221 	uint32_t padn = 0;
222 	uint8_t pad[HASH_BLK_BUFSZ * 2] = { };
223 	uint64_t dbits[2] = { };
224 	uint64_t dbits_be[2] = { };
225 	struct ast2600_hace_ctx *hctx = NULL;
226 	size_t length = 0;
227 
228 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
229 	length = MIN(len, hctx->dgt_size);
230 
231 	memset(pad, 0, sizeof(pad));
232 	pad[0] = 0x80;
233 
234 	dbits[0] = (hctx->total[0] << 3);
235 	dbits_be[0] = get_be64(&dbits[0]);
236 
237 	dbits[1] = (hctx->total[0] >> 61) | (hctx->total[1] << 3);
238 	dbits_be[1] = get_be64(&dbits[1]);
239 
240 	last = hctx->total[0] & (hctx->blk_size - 1);
241 
242 	switch (hctx->algo) {
243 	case TEE_ALG_SHA1:
244 	case TEE_ALG_SHA256:
245 		if (last < 56)
246 			padn = 56 - last;
247 		else
248 			padn = 120 - last;
249 
250 		rc = ast2600_hace_update(ctx, pad, padn);
251 		if (rc)
252 			return rc;
253 
254 		rc = ast2600_hace_update(ctx, (uint8_t *)&dbits_be[0],
255 					 sizeof(dbits_be[0]));
256 		if (rc)
257 			return rc;
258 		break;
259 	case TEE_ALG_SHA384:
260 	case TEE_ALG_SHA512:
261 		if (last < 112)
262 			padn = 112 - last;
263 		else
264 			padn = 240 - last;
265 
266 		rc = ast2600_hace_update(ctx, pad, padn);
267 		if (rc)
268 			return rc;
269 
270 		rc = ast2600_hace_update(ctx, (uint8_t *)&dbits_be[1],
271 					 sizeof(dbits_be[1]));
272 		if (rc)
273 			return rc;
274 
275 		rc = ast2600_hace_update(ctx, (uint8_t *)&dbits_be[0],
276 					 sizeof(dbits_be[0]));
277 		if (rc)
278 			return rc;
279 		break;
280 	default:
281 		return TEE_ERROR_NOT_SUPPORTED;
282 	}
283 
284 	cache_operation(TEE_CACHEINVALIDATE, hctx->digest, HASH_DGT_BUFSZ);
285 
286 	memcpy(digest, hctx->digest, length);
287 
288 	return TEE_SUCCESS;
289 }
290 
ast2600_hace_free(struct crypto_hash_ctx * ctx)291 static void ast2600_hace_free(struct crypto_hash_ctx *ctx)
292 {
293 	struct ast2600_hace_ctx *hctx = NULL;
294 
295 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
296 
297 	free(hctx->buf);
298 	free(hctx->digest);
299 	free(hctx);
300 }
301 
ast2600_hace_copy_state(struct crypto_hash_ctx * dst_ctx,struct crypto_hash_ctx * src_ctx)302 static void ast2600_hace_copy_state(struct crypto_hash_ctx *dst_ctx,
303 				    struct crypto_hash_ctx *src_ctx)
304 {
305 	struct ast2600_hace_ctx *src_hctx = NULL;
306 	struct ast2600_hace_ctx *dst_hctx = NULL;
307 
308 	src_hctx = container_of(src_ctx, struct ast2600_hace_ctx, hash_ctx);
309 	dst_hctx = container_of(dst_ctx, struct ast2600_hace_ctx, hash_ctx);
310 
311 	dst_hctx->hash_ctx = src_hctx->hash_ctx;
312 	dst_hctx->cmd = src_hctx->cmd;
313 	dst_hctx->dgt_size = src_hctx->dgt_size;
314 	dst_hctx->blk_size = src_hctx->blk_size;
315 	dst_hctx->pad_size = src_hctx->pad_size;
316 	dst_hctx->total[0] = src_hctx->total[0];
317 	dst_hctx->total[1] = src_hctx->total[1];
318 
319 	cache_operation(TEE_CACHEINVALIDATE, src_hctx->buf, HASH_BLK_BUFSZ);
320 	memcpy(dst_hctx->buf, src_hctx->buf, HASH_BLK_BUFSZ);
321 	cache_operation(TEE_CACHEFLUSH,	dst_hctx->buf, HASH_BLK_BUFSZ);
322 
323 	cache_operation(TEE_CACHEINVALIDATE, src_hctx->digest, HASH_DGT_BUFSZ);
324 	memcpy(dst_hctx->digest, src_hctx->digest, HASH_DGT_BUFSZ);
325 	cache_operation(TEE_CACHEFLUSH,	dst_hctx->digest, HASH_DGT_BUFSZ);
326 }
327 
328 static const struct crypto_hash_ops ast2600_hace_ops = {
329 	.init = ast2600_hace_init,
330 	.update = ast2600_hace_update,
331 	.final = ast2600_hace_final,
332 	.free_ctx = ast2600_hace_free,
333 	.copy_state = ast2600_hace_copy_state,
334 };
335 
ast2600_hace_alloc(struct crypto_hash_ctx ** pctx,uint32_t algo)336 static TEE_Result ast2600_hace_alloc(struct crypto_hash_ctx **pctx,
337 				     uint32_t algo)
338 {
339 	struct ast2600_hace_ctx *hctx = calloc(1, sizeof(*hctx));
340 
341 	if (!hctx)
342 		return TEE_ERROR_OUT_OF_MEMORY;
343 	hctx->buf = memalign(HASH_BLK_BUFSZ, HASH_BLK_BUFSZ);
344 	if (!hctx->buf)
345 		return TEE_ERROR_OUT_OF_MEMORY;
346 
347 	hctx->digest = memalign(HASH_DGT_BUFSZ, HASH_DGT_BUFSZ);
348 	if (!hctx->digest)
349 		return TEE_ERROR_OUT_OF_MEMORY;
350 
351 	hctx->hash_ctx.ops = &ast2600_hace_ops;
352 	hctx->algo = algo;
353 	hctx->cmd = HACE_HASH_CMD_ACCUM | HACE_HASH_CMD_SHA_BE;
354 
355 	switch (algo) {
356 	case TEE_ALG_SHA1:
357 		hctx->dgt_size = 20;
358 		hctx->blk_size = 64;
359 		hctx->pad_size = 8;
360 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA1;
361 		break;
362 	case TEE_ALG_SHA256:
363 		hctx->dgt_size = 32;
364 		hctx->blk_size = 64;
365 		hctx->pad_size = 8;
366 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA256;
367 		break;
368 	case TEE_ALG_SHA384:
369 		hctx->dgt_size = 48;
370 		hctx->blk_size = 128;
371 		hctx->pad_size = 16;
372 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA384;
373 		break;
374 	case TEE_ALG_SHA512:
375 		hctx->dgt_size = 64;
376 		hctx->blk_size = 128;
377 		hctx->pad_size = 16;
378 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA512;
379 		break;
380 	default:
381 		free(hctx);
382 		return TEE_ERROR_NOT_IMPLEMENTED;
383 	}
384 
385 	*pctx = &hctx->hash_ctx;
386 
387 	return TEE_SUCCESS;
388 }
389 
ast2600_drvcrypt_register_hash(void)390 TEE_Result ast2600_drvcrypt_register_hash(void)
391 {
392 	hace_virt = core_mmu_get_va(HACE_BASE, MEM_AREA_IO_SEC,
393 				    SMALL_PAGE_SIZE);
394 	if (!hace_virt) {
395 		EMSG("cannot get HACE virtual address");
396 		return TEE_ERROR_GENERIC;
397 	}
398 
399 	return drvcrypt_register_hash(ast2600_hace_alloc);
400 }
401