1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright (c) 2017-2020, Linaro Limited
4 */
5
6 #include <assert.h>
7 #include <crypto/crypto.h>
8 #include <crypto/crypto_impl.h>
9 #include <crypto/internal_aes-gcm.h>
10 #include <io.h>
11 #include <string_ext.h>
12 #include <string.h>
13 #include <tee_api_types.h>
14 #include <types_ext.h>
15 #include <utee_defines.h>
16 #include <util.h>
17
xor_buf(uint8_t * dst,const uint8_t * src,size_t len)18 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
19 {
20 size_t n;
21
22 for (n = 0; n < len; n++)
23 dst[n] ^= src[n];
24 }
25
26
ghash_update_pad_zero(struct internal_aes_gcm_state * state,const uint8_t * data,size_t len)27 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state,
28 const uint8_t *data, size_t len)
29 {
30 size_t n = len / TEE_AES_BLOCK_SIZE;
31 uint64_t block[2];
32
33 if (n) {
34 if (internal_aes_gcm_ptr_is_block_aligned(data)) {
35 internal_aes_gcm_ghash_update(state, NULL, data, n);
36 } else {
37 size_t m;
38
39 for (m = 0; m < n; m++) {
40
41 memcpy(block, data + m * sizeof(block),
42 sizeof(block));
43 internal_aes_gcm_ghash_update(state, NULL,
44 (void *)block, 1);
45 }
46 }
47 }
48
49 if (len - n * TEE_AES_BLOCK_SIZE) {
50 memset(block, 0, sizeof(block));
51 memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
52 len - n * TEE_AES_BLOCK_SIZE);
53 internal_aes_gcm_ghash_update(state, block, NULL, 0);
54 }
55 }
56
ghash_update_lengths(struct internal_aes_gcm_state * state,uint32_t l1,uint32_t l2)57 static void ghash_update_lengths(struct internal_aes_gcm_state *state,
58 uint32_t l1, uint32_t l2)
59 {
60 uint64_t len_fields[2] = {
61 TEE_U64_TO_BIG_ENDIAN(l1 * 8),
62 TEE_U64_TO_BIG_ENDIAN(l2 * 8)
63 };
64
65 COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
66 internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0);
67 }
68
__gcm_init(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,TEE_OperationMode mode,const void * nonce,size_t nonce_len,size_t tag_len)69 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state,
70 const struct internal_aes_gcm_key *ek,
71 TEE_OperationMode mode, const void *nonce,
72 size_t nonce_len, size_t tag_len)
73 {
74 COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE);
75
76 if (tag_len > sizeof(state->buf_tag))
77 return TEE_ERROR_BAD_PARAMETERS;
78
79 memset(state, 0, sizeof(*state));
80
81 state->tag_len = tag_len;
82 internal_aes_gcm_set_key(state, ek);
83
84 if (nonce_len == (96 / 8)) {
85 memcpy(state->ctr, nonce, nonce_len);
86 internal_aes_gcm_inc_ctr(state);
87 } else {
88 ghash_update_pad_zero(state, nonce, nonce_len);
89 ghash_update_lengths(state, 0, nonce_len);
90
91 memcpy(state->ctr, state->hash_state, sizeof(state->ctr));
92 memset(state->hash_state, 0, sizeof(state->hash_state));
93 }
94
95 crypto_aes_enc_block(ek->data, sizeof(ek->data), ek->rounds,
96 state->ctr, state->buf_tag);
97 internal_aes_gcm_inc_ctr(state);
98 if (mode == TEE_MODE_ENCRYPT) {
99 /*
100 * Encryption uses the pre-encrypted xor-buffer to encrypt
101 * while decryption encrypts the xor-buffer when needed
102 * instead.
103 *
104 * The reason for this is that the combined encryption and
105 * ghash implementation does both operations intertwined.
106 * In the decrypt case the xor-buffer is needed at the end
107 * of processing each block, while the encryption case
108 * needs xor-buffer before processing each block.
109 *
110 * In a pure software implementation we wouldn't have any
111 * use for this kind of optimization, but since this
112 * AES-GCM implementation is aimed at being combined with
113 * accelerated routines it's more convenient to always have
114 * this optimization activated.
115 */
116 crypto_aes_enc_block(ek->data, sizeof(ek->data), ek->rounds,
117 state->ctr, state->buf_cryp);
118 internal_aes_gcm_inc_ctr(state);
119 }
120
121 return TEE_SUCCESS;
122 }
123
internal_aes_gcm_init(struct internal_aes_gcm_ctx * ctx,TEE_OperationMode mode,const void * key,size_t key_len,const void * nonce,size_t nonce_len,size_t tag_len)124 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
125 TEE_OperationMode mode, const void *key,
126 size_t key_len, const void *nonce,
127 size_t nonce_len, size_t tag_len)
128 {
129 TEE_Result res = TEE_SUCCESS;
130 struct internal_aes_gcm_key *ek = &ctx->key;
131
132 res = crypto_aes_expand_enc_key(key, key_len, ek->data,
133 sizeof(ek->data), &ek->rounds);
134 if (res)
135 return res;
136
137 return __gcm_init(&ctx->state, ek, mode, nonce, nonce_len, tag_len);
138 }
139
__gcm_update_aad(struct internal_aes_gcm_state * state,const void * data,size_t len)140 static TEE_Result __gcm_update_aad(struct internal_aes_gcm_state *state,
141 const void *data, size_t len)
142 {
143 const uint8_t *d = data;
144 size_t l = len;
145 const uint8_t *head = NULL;
146 size_t n;
147
148 if (state->payload_bytes)
149 return TEE_ERROR_BAD_PARAMETERS;
150
151 state->aad_bytes += len;
152
153 while (l) {
154 if (state->buf_pos ||
155 !internal_aes_gcm_ptr_is_block_aligned(d) ||
156 l < TEE_AES_BLOCK_SIZE) {
157 n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
158 memcpy(state->buf_hash + state->buf_pos, d, n);
159 state->buf_pos += n;
160
161 if (state->buf_pos != TEE_AES_BLOCK_SIZE)
162 return TEE_SUCCESS;
163
164 state->buf_pos = 0;
165 head = state->buf_hash;
166 d += n;
167 l -= n;
168 }
169
170 if (internal_aes_gcm_ptr_is_block_aligned(d))
171 n = l / TEE_AES_BLOCK_SIZE;
172 else
173 n = 0;
174
175 internal_aes_gcm_ghash_update(state, head, d, n);
176 l -= n * TEE_AES_BLOCK_SIZE;
177 d += n * TEE_AES_BLOCK_SIZE;
178 }
179
180 return TEE_SUCCESS;
181 }
182
internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx * ctx,const void * data,size_t len)183 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
184 const void *data, size_t len)
185 {
186 return __gcm_update_aad(&ctx->state, data, len);
187 }
188
189 static TEE_Result
__gcm_update_payload(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * ek,TEE_OperationMode mode,const void * src,size_t len,void * dst)190 __gcm_update_payload(struct internal_aes_gcm_state *state,
191 const struct internal_aes_gcm_key *ek,
192 TEE_OperationMode mode, const void *src,
193 size_t len, void *dst)
194 {
195 size_t n;
196 const uint8_t *s = src;
197 uint8_t *d = dst;
198 size_t l = len;
199
200 if (!state->payload_bytes && state->buf_pos) {
201 /* AAD part done, finish up the last bits. */
202 memset(state->buf_hash + state->buf_pos, 0,
203 TEE_AES_BLOCK_SIZE - state->buf_pos);
204 internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
205 state->buf_pos = 0;
206 }
207
208 state->payload_bytes += len;
209
210 while (l) {
211 if (state->buf_pos || l < TEE_AES_BLOCK_SIZE) {
212 n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
213
214 if (!state->buf_pos && mode == TEE_MODE_DECRYPT)
215 crypto_aes_enc_block(ek->data, sizeof(ek->data),
216 ek->rounds, state->ctr,
217 state->buf_cryp);
218
219 xor_buf(state->buf_cryp + state->buf_pos, s, n);
220 memcpy(d, state->buf_cryp + state->buf_pos, n);
221 if (mode == TEE_MODE_ENCRYPT)
222 memcpy(state->buf_hash + state->buf_pos,
223 state->buf_cryp + state->buf_pos, n);
224 else
225 memcpy(state->buf_hash + state->buf_pos, s, n);
226
227 state->buf_pos += n;
228
229 if (state->buf_pos != TEE_AES_BLOCK_SIZE)
230 return TEE_SUCCESS;
231
232 internal_aes_gcm_ghash_update(state, state->buf_hash,
233 NULL, 0);
234 state->buf_pos = 0;
235 d += n;
236 s += n;
237 l -= n;
238
239 if (mode == TEE_MODE_ENCRYPT)
240 crypto_aes_enc_block(ek->data, sizeof(ek->data),
241 ek->rounds, state->ctr,
242 state->buf_cryp);
243 internal_aes_gcm_inc_ctr(state);
244 } else {
245 n = l / TEE_AES_BLOCK_SIZE;
246 internal_aes_gcm_update_payload_blocks(state, ek, mode,
247 s, n, d);
248 s += n * TEE_AES_BLOCK_SIZE;
249 d += n * TEE_AES_BLOCK_SIZE;
250 l -= n * TEE_AES_BLOCK_SIZE;
251 }
252 }
253
254 return TEE_SUCCESS;
255 }
256
internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx * ctx,TEE_OperationMode mode,const void * src,size_t len,void * dst)257 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
258 TEE_OperationMode mode,
259 const void *src, size_t len,
260 void *dst)
261 {
262 return __gcm_update_payload(&ctx->state, &ctx->key, mode, src, len,
263 dst);
264 }
265
operation_final(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * enc_key,TEE_OperationMode m,const uint8_t * src,size_t len,uint8_t * dst)266 static TEE_Result operation_final(struct internal_aes_gcm_state *state,
267 const struct internal_aes_gcm_key *enc_key,
268 TEE_OperationMode m, const uint8_t *src,
269 size_t len, uint8_t *dst)
270 {
271 TEE_Result res;
272
273 res = __gcm_update_payload(state, enc_key, m, src, len, dst);
274 if (res)
275 return res;
276
277 if (state->buf_pos) {
278 memset(state->buf_hash + state->buf_pos, 0,
279 sizeof(state->buf_hash) - state->buf_pos);
280 internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
281 }
282
283 ghash_update_lengths(state, state->aad_bytes, state->payload_bytes);
284 /* buf_tag was filled in with the first counter block aes_gcm_init() */
285 xor_buf(state->buf_tag, state->hash_state, state->tag_len);
286
287 return TEE_SUCCESS;
288 }
289
__gcm_enc_final(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * enc_key,const void * src,size_t len,void * dst,void * tag,size_t * tag_len)290 static TEE_Result __gcm_enc_final(struct internal_aes_gcm_state *state,
291 const struct internal_aes_gcm_key *enc_key,
292 const void *src, size_t len, void *dst,
293 void *tag, size_t *tag_len)
294 {
295 TEE_Result res;
296
297 if (*tag_len < state->tag_len)
298 return TEE_ERROR_SHORT_BUFFER;
299
300 res = operation_final(state, enc_key, TEE_MODE_ENCRYPT, src, len, dst);
301 if (res)
302 return res;
303
304 memcpy(tag, state->buf_tag, state->tag_len);
305 *tag_len = state->tag_len;
306
307 return TEE_SUCCESS;
308 }
309
internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx * ctx,const void * src,size_t len,void * dst,void * tag,size_t * tag_len)310 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
311 const void *src, size_t len, void *dst,
312 void *tag, size_t *tag_len)
313 {
314 return __gcm_enc_final(&ctx->state, &ctx->key, src, len, dst, tag,
315 tag_len);
316 }
317
__gcm_dec_final(struct internal_aes_gcm_state * state,const struct internal_aes_gcm_key * enc_key,const void * src,size_t len,void * dst,const void * tag,size_t tag_len)318 static TEE_Result __gcm_dec_final(struct internal_aes_gcm_state *state,
319 const struct internal_aes_gcm_key *enc_key,
320 const void *src, size_t len, void *dst,
321 const void *tag, size_t tag_len)
322 {
323 TEE_Result res;
324
325 if (tag_len != state->tag_len)
326 return TEE_ERROR_MAC_INVALID;
327
328 res = operation_final(state, enc_key, TEE_MODE_DECRYPT, src, len, dst);
329 if (res)
330 return res;
331
332 if (consttime_memcmp(state->buf_tag, tag, tag_len))
333 return TEE_ERROR_MAC_INVALID;
334
335 return TEE_SUCCESS;
336 }
337
internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx * ctx,const void * src,size_t len,void * dst,const void * tag,size_t tag_len)338 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
339 const void *src, size_t len, void *dst,
340 const void *tag, size_t tag_len)
341 {
342 return __gcm_dec_final(&ctx->state, &ctx->key, src, len, dst, tag,
343 tag_len);
344 }
345
internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state * state)346 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state)
347 {
348 uint64_t c = 0;
349
350 c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1;
351 state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
352 if (!c) {
353 c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1;
354 state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
355 }
356 }
357
internal_aes_gcm_dec_ctr(struct internal_aes_gcm_state * state)358 void internal_aes_gcm_dec_ctr(struct internal_aes_gcm_state *state)
359 {
360 uint64_t c = 0;
361
362 c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) - 1;
363 state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
364 if (c == UINT64_MAX) {
365 c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) - 1;
366 state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
367 }
368 }
369
internal_aes_gcm_enc(const struct internal_aes_gcm_key * enc_key,const void * nonce,size_t nonce_len,const void * aad,size_t aad_len,const void * src,size_t len,void * dst,void * tag,size_t * tag_len)370 TEE_Result internal_aes_gcm_enc(const struct internal_aes_gcm_key *enc_key,
371 const void *nonce, size_t nonce_len,
372 const void *aad, size_t aad_len,
373 const void *src, size_t len, void *dst,
374 void *tag, size_t *tag_len)
375 {
376 TEE_Result res;
377 struct internal_aes_gcm_state state;
378
379 res = __gcm_init(&state, enc_key, TEE_MODE_ENCRYPT, nonce, nonce_len,
380 *tag_len);
381 if (res)
382 return res;
383
384 if (aad) {
385 res = __gcm_update_aad(&state, aad, aad_len);
386 if (res)
387 return res;
388 }
389
390 return __gcm_enc_final(&state, enc_key, src, len, dst, tag, tag_len);
391 }
392
internal_aes_gcm_dec(const struct internal_aes_gcm_key * enc_key,const void * nonce,size_t nonce_len,const void * aad,size_t aad_len,const void * src,size_t len,void * dst,const void * tag,size_t tag_len)393 TEE_Result internal_aes_gcm_dec(const struct internal_aes_gcm_key *enc_key,
394 const void *nonce, size_t nonce_len,
395 const void *aad, size_t aad_len,
396 const void *src, size_t len, void *dst,
397 const void *tag, size_t tag_len)
398 {
399 TEE_Result res;
400 struct internal_aes_gcm_state state;
401
402 res = __gcm_init(&state, enc_key, TEE_MODE_DECRYPT, nonce, nonce_len,
403 tag_len);
404 if (res)
405 return res;
406
407 if (aad) {
408 res = __gcm_update_aad(&state, aad, aad_len);
409 if (res)
410 return res;
411 }
412
413 return __gcm_dec_final(&state, enc_key, src, len, dst, tag, tag_len);
414 }
415
416
417 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
418 #include <stdlib.h>
419 #include <crypto/crypto.h>
420
421 struct aes_gcm_ctx {
422 struct crypto_authenc_ctx aec;
423 struct internal_aes_gcm_ctx ctx;
424 };
425
426 static const struct crypto_authenc_ops aes_gcm_ops;
427
428 static struct aes_gcm_ctx *
to_aes_gcm_ctx(struct crypto_authenc_ctx * aec)429 to_aes_gcm_ctx(struct crypto_authenc_ctx *aec)
430 {
431 assert(aec->ops == &aes_gcm_ops);
432
433 return container_of(aec, struct aes_gcm_ctx, aec);
434 }
435
crypto_aes_gcm_alloc_ctx(struct crypto_authenc_ctx ** ctx_ret)436 TEE_Result crypto_aes_gcm_alloc_ctx(struct crypto_authenc_ctx **ctx_ret)
437 {
438 struct aes_gcm_ctx *ctx = calloc(1, sizeof(*ctx));
439
440 if (!ctx)
441 return TEE_ERROR_OUT_OF_MEMORY;
442 ctx->aec.ops = &aes_gcm_ops;
443
444 *ctx_ret = &ctx->aec;
445
446 return TEE_SUCCESS;
447 }
448
aes_gcm_free_ctx(struct crypto_authenc_ctx * aec)449 static void aes_gcm_free_ctx(struct crypto_authenc_ctx *aec)
450 {
451 free(to_aes_gcm_ctx(aec));
452 }
453
aes_gcm_copy_state(struct crypto_authenc_ctx * dst_ctx,struct crypto_authenc_ctx * src_ctx)454 static void aes_gcm_copy_state(struct crypto_authenc_ctx *dst_ctx,
455 struct crypto_authenc_ctx *src_ctx)
456 {
457 to_aes_gcm_ctx(dst_ctx)->ctx = to_aes_gcm_ctx(src_ctx)->ctx;
458 }
459
aes_gcm_init(struct crypto_authenc_ctx * aec,TEE_OperationMode mode,const uint8_t * key,size_t key_len,const uint8_t * nonce,size_t nonce_len,size_t tag_len,size_t aad_len __unused,size_t payload_len __unused)460 static TEE_Result aes_gcm_init(struct crypto_authenc_ctx *aec,
461 TEE_OperationMode mode,
462 const uint8_t *key, size_t key_len,
463 const uint8_t *nonce, size_t nonce_len,
464 size_t tag_len, size_t aad_len __unused,
465 size_t payload_len __unused)
466 {
467 return internal_aes_gcm_init(&to_aes_gcm_ctx(aec)->ctx, mode, key,
468 key_len, nonce, nonce_len, tag_len);
469 }
470
aes_gcm_update_aad(struct crypto_authenc_ctx * aec,const uint8_t * data,size_t len)471 static TEE_Result aes_gcm_update_aad(struct crypto_authenc_ctx *aec,
472 const uint8_t *data, size_t len)
473 {
474 return internal_aes_gcm_update_aad(&to_aes_gcm_ctx(aec)->ctx, data,
475 len);
476 }
477
aes_gcm_update_payload(struct crypto_authenc_ctx * aec,TEE_OperationMode m,const uint8_t * src,size_t len,uint8_t * dst)478 static TEE_Result aes_gcm_update_payload(struct crypto_authenc_ctx *aec,
479 TEE_OperationMode m,
480 const uint8_t *src, size_t len,
481 uint8_t *dst)
482 {
483 return internal_aes_gcm_update_payload(&to_aes_gcm_ctx(aec)->ctx,
484 m, src, len, dst);
485 }
486
aes_gcm_enc_final(struct crypto_authenc_ctx * aec,const uint8_t * src,size_t len,uint8_t * dst,uint8_t * tag,size_t * tag_len)487 static TEE_Result aes_gcm_enc_final(struct crypto_authenc_ctx *aec,
488 const uint8_t *src, size_t len,
489 uint8_t *dst, uint8_t *tag, size_t *tag_len)
490 {
491 return internal_aes_gcm_enc_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
492 dst, tag, tag_len);
493 }
494
aes_gcm_dec_final(struct crypto_authenc_ctx * aec,const uint8_t * src,size_t len,uint8_t * dst,const uint8_t * tag,size_t tag_len)495 static TEE_Result aes_gcm_dec_final(struct crypto_authenc_ctx *aec,
496 const uint8_t *src, size_t len,
497 uint8_t *dst, const uint8_t *tag,
498 size_t tag_len)
499 {
500 return internal_aes_gcm_dec_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
501 dst, tag, tag_len);
502 }
503
aes_gcm_final(struct crypto_authenc_ctx * aec __unused)504 static void aes_gcm_final(struct crypto_authenc_ctx *aec __unused)
505 {
506 }
507
508 static const struct crypto_authenc_ops aes_gcm_ops = {
509 .init = aes_gcm_init,
510 .update_aad = aes_gcm_update_aad,
511 .update_payload = aes_gcm_update_payload,
512 .enc_final = aes_gcm_enc_final,
513 .dec_final = aes_gcm_dec_final,
514 .final = aes_gcm_final,
515 .free_ctx = aes_gcm_free_ctx,
516 .copy_state = aes_gcm_copy_state,
517 };
518
519 /*
520 * internal_aes_gcm_gfmul() is based on ghash_gfmul() from
521 * https://github.com/openbsd/src/blob/master/sys/crypto/gmac.c
522 */
internal_aes_gcm_gfmul(const uint64_t X[2],const uint64_t Y[2],uint64_t product[2])523 void internal_aes_gcm_gfmul(const uint64_t X[2], const uint64_t Y[2],
524 uint64_t product[2])
525 {
526 uint64_t y[2] = { 0 };
527 uint64_t z[2] = { 0 };
528 const uint8_t *x = (const uint8_t *)X;
529 uint32_t mul = 0;
530 size_t n = 0;
531
532 y[0] = TEE_U64_FROM_BIG_ENDIAN(Y[0]);
533 y[1] = TEE_U64_FROM_BIG_ENDIAN(Y[1]);
534
535 for (n = 0; n < TEE_AES_BLOCK_SIZE * 8; n++) {
536 /* update Z */
537 if (x[n >> 3] & (1 << (~n & 7)))
538 internal_aes_gcm_xor_block(z, y);
539
540 /* update Y */
541 mul = y[1] & 1;
542 y[1] = (y[0] << 63) | (y[1] >> 1);
543 y[0] = (y[0] >> 1) ^ (0xe100000000000000 * mul);
544 }
545
546 product[0] = TEE_U64_TO_BIG_ENDIAN(z[0]);
547 product[1] = TEE_U64_TO_BIG_ENDIAN(z[1]);
548 }
549 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
550