1 // Copyright 1995-2016 The OpenSSL Project Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include <openssl/ripemd.h>
16 
17 #include <string.h>
18 
19 #include "../../crypto/fipsmodule/digest/md32_common.h"
20 #include "../../crypto/internal.h"
21 
22 
23 #define RIPEMD160_A 0x67452301L
24 #define RIPEMD160_B 0xEFCDAB89L
25 #define RIPEMD160_C 0x98BADCFEL
26 #define RIPEMD160_D 0x10325476L
27 #define RIPEMD160_E 0xC3D2E1F0L
28 
RIPEMD160_Init(RIPEMD160_CTX * ctx)29 int RIPEMD160_Init(RIPEMD160_CTX *ctx) {
30   OPENSSL_memset(ctx, 0, sizeof(*ctx));
31   ctx->h[0] = RIPEMD160_A;
32   ctx->h[1] = RIPEMD160_B;
33   ctx->h[2] = RIPEMD160_C;
34   ctx->h[3] = RIPEMD160_D;
35   ctx->h[4] = RIPEMD160_E;
36   return 1;
37 }
38 
39 static void ripemd160_block_data_order(uint32_t h[5], const uint8_t *data,
40                                        size_t num);
41 
RIPEMD160_Transform(RIPEMD160_CTX * c,const uint8_t data[RIPEMD160_CBLOCK])42 void RIPEMD160_Transform(RIPEMD160_CTX *c,
43                          const uint8_t data[RIPEMD160_CBLOCK]) {
44   ripemd160_block_data_order(c->h, data, 1);
45 }
46 
47 namespace {
48 struct RIPEMD160Traits {
49   using HashContext = RIPEMD160_CTX;
50   static constexpr size_t kBlockSize = RIPEMD160_CBLOCK;
51   static constexpr bool kLengthIsBigEndian = false;
HashBlocks__anon70a8848d0111::RIPEMD160Traits52   static void HashBlocks(uint32_t *state, const uint8_t *data,
53                          size_t num_blocks) {
54     ripemd160_block_data_order(state, data, num_blocks);
55   }
56 };
57 }  // namespace
58 
RIPEMD160_Update(RIPEMD160_CTX * c,const void * data,size_t len)59 int RIPEMD160_Update(RIPEMD160_CTX *c, const void *data, size_t len) {
60   bssl::crypto_md32_update<RIPEMD160Traits>(
61       c, bssl::Span(static_cast<const uint8_t *>(data), len));
62   return 1;
63 }
64 
RIPEMD160_Final(uint8_t out[RIPEMD160_DIGEST_LENGTH],RIPEMD160_CTX * c)65 int RIPEMD160_Final(uint8_t out[RIPEMD160_DIGEST_LENGTH], RIPEMD160_CTX *c) {
66   bssl::crypto_md32_final<RIPEMD160Traits>(c);
67   CRYPTO_store_u32_le(out, c->h[0]);
68   CRYPTO_store_u32_le(out + 4, c->h[1]);
69   CRYPTO_store_u32_le(out + 8, c->h[2]);
70   CRYPTO_store_u32_le(out + 12, c->h[3]);
71   CRYPTO_store_u32_le(out + 16, c->h[4]);
72   return 1;
73 }
74 
75 // Transformed F2 and F4 are courtesy of Wei Dai <weidai@eskimo.com>
76 #define F1(x, y, z) ((x) ^ (y) ^ (z))
77 #define F2(x, y, z) ((((y) ^ (z)) & (x)) ^ (z))
78 #define F3(x, y, z) (((~(y)) | (x)) ^ (z))
79 #define F4(x, y, z) ((((x) ^ (y)) & (z)) ^ (y))
80 #define F5(x, y, z) (((~(z)) | (y)) ^ (x))
81 
82 #define RIP1(a, b, c, d, e, w, s)  \
83   {                                \
84     a += F1(b, c, d) + X(w);       \
85     a = CRYPTO_rotl_u32(a, s) + e; \
86     c = CRYPTO_rotl_u32(c, 10);    \
87   }
88 
89 #define RIP2(a, b, c, d, e, w, s, K) \
90   {                                  \
91     a += F2(b, c, d) + X(w) + K;     \
92     a = CRYPTO_rotl_u32(a, s) + e;   \
93     c = CRYPTO_rotl_u32(c, 10);      \
94   }
95 
96 #define RIP3(a, b, c, d, e, w, s, K) \
97   {                                  \
98     a += F3(b, c, d) + X(w) + K;     \
99     a = CRYPTO_rotl_u32(a, s) + e;   \
100     c = CRYPTO_rotl_u32(c, 10);      \
101   }
102 
103 #define RIP4(a, b, c, d, e, w, s, K) \
104   {                                  \
105     a += F4(b, c, d) + X(w) + K;     \
106     a = CRYPTO_rotl_u32(a, s) + e;   \
107     c = CRYPTO_rotl_u32(c, 10);      \
108   }
109 
110 #define RIP5(a, b, c, d, e, w, s, K) \
111   {                                  \
112     a += F5(b, c, d) + X(w) + K;     \
113     a = CRYPTO_rotl_u32(a, s) + e;   \
114     c = CRYPTO_rotl_u32(c, 10);      \
115   }
116 
117 #define KL0 0x00000000L
118 #define KL1 0x5A827999L
119 #define KL2 0x6ED9EBA1L
120 #define KL3 0x8F1BBCDCL
121 #define KL4 0xA953FD4EL
122 
123 #define KR0 0x50A28BE6L
124 #define KR1 0x5C4DD124L
125 #define KR2 0x6D703EF3L
126 #define KR3 0x7A6D76E9L
127 #define KR4 0x00000000L
128 
129 #define WL00 0
130 #define SL00 11
131 #define WL01 1
132 #define SL01 14
133 #define WL02 2
134 #define SL02 15
135 #define WL03 3
136 #define SL03 12
137 #define WL04 4
138 #define SL04 5
139 #define WL05 5
140 #define SL05 8
141 #define WL06 6
142 #define SL06 7
143 #define WL07 7
144 #define SL07 9
145 #define WL08 8
146 #define SL08 11
147 #define WL09 9
148 #define SL09 13
149 #define WL10 10
150 #define SL10 14
151 #define WL11 11
152 #define SL11 15
153 #define WL12 12
154 #define SL12 6
155 #define WL13 13
156 #define SL13 7
157 #define WL14 14
158 #define SL14 9
159 #define WL15 15
160 #define SL15 8
161 
162 #define WL16 7
163 #define SL16 7
164 #define WL17 4
165 #define SL17 6
166 #define WL18 13
167 #define SL18 8
168 #define WL19 1
169 #define SL19 13
170 #define WL20 10
171 #define SL20 11
172 #define WL21 6
173 #define SL21 9
174 #define WL22 15
175 #define SL22 7
176 #define WL23 3
177 #define SL23 15
178 #define WL24 12
179 #define SL24 7
180 #define WL25 0
181 #define SL25 12
182 #define WL26 9
183 #define SL26 15
184 #define WL27 5
185 #define SL27 9
186 #define WL28 2
187 #define SL28 11
188 #define WL29 14
189 #define SL29 7
190 #define WL30 11
191 #define SL30 13
192 #define WL31 8
193 #define SL31 12
194 
195 #define WL32 3
196 #define SL32 11
197 #define WL33 10
198 #define SL33 13
199 #define WL34 14
200 #define SL34 6
201 #define WL35 4
202 #define SL35 7
203 #define WL36 9
204 #define SL36 14
205 #define WL37 15
206 #define SL37 9
207 #define WL38 8
208 #define SL38 13
209 #define WL39 1
210 #define SL39 15
211 #define WL40 2
212 #define SL40 14
213 #define WL41 7
214 #define SL41 8
215 #define WL42 0
216 #define SL42 13
217 #define WL43 6
218 #define SL43 6
219 #define WL44 13
220 #define SL44 5
221 #define WL45 11
222 #define SL45 12
223 #define WL46 5
224 #define SL46 7
225 #define WL47 12
226 #define SL47 5
227 
228 #define WL48 1
229 #define SL48 11
230 #define WL49 9
231 #define SL49 12
232 #define WL50 11
233 #define SL50 14
234 #define WL51 10
235 #define SL51 15
236 #define WL52 0
237 #define SL52 14
238 #define WL53 8
239 #define SL53 15
240 #define WL54 12
241 #define SL54 9
242 #define WL55 4
243 #define SL55 8
244 #define WL56 13
245 #define SL56 9
246 #define WL57 3
247 #define SL57 14
248 #define WL58 7
249 #define SL58 5
250 #define WL59 15
251 #define SL59 6
252 #define WL60 14
253 #define SL60 8
254 #define WL61 5
255 #define SL61 6
256 #define WL62 6
257 #define SL62 5
258 #define WL63 2
259 #define SL63 12
260 
261 #define WL64 4
262 #define SL64 9
263 #define WL65 0
264 #define SL65 15
265 #define WL66 5
266 #define SL66 5
267 #define WL67 9
268 #define SL67 11
269 #define WL68 7
270 #define SL68 6
271 #define WL69 12
272 #define SL69 8
273 #define WL70 2
274 #define SL70 13
275 #define WL71 10
276 #define SL71 12
277 #define WL72 14
278 #define SL72 5
279 #define WL73 1
280 #define SL73 12
281 #define WL74 3
282 #define SL74 13
283 #define WL75 8
284 #define SL75 14
285 #define WL76 11
286 #define SL76 11
287 #define WL77 6
288 #define SL77 8
289 #define WL78 15
290 #define SL78 5
291 #define WL79 13
292 #define SL79 6
293 
294 #define WR00 5
295 #define SR00 8
296 #define WR01 14
297 #define SR01 9
298 #define WR02 7
299 #define SR02 9
300 #define WR03 0
301 #define SR03 11
302 #define WR04 9
303 #define SR04 13
304 #define WR05 2
305 #define SR05 15
306 #define WR06 11
307 #define SR06 15
308 #define WR07 4
309 #define SR07 5
310 #define WR08 13
311 #define SR08 7
312 #define WR09 6
313 #define SR09 7
314 #define WR10 15
315 #define SR10 8
316 #define WR11 8
317 #define SR11 11
318 #define WR12 1
319 #define SR12 14
320 #define WR13 10
321 #define SR13 14
322 #define WR14 3
323 #define SR14 12
324 #define WR15 12
325 #define SR15 6
326 
327 #define WR16 6
328 #define SR16 9
329 #define WR17 11
330 #define SR17 13
331 #define WR18 3
332 #define SR18 15
333 #define WR19 7
334 #define SR19 7
335 #define WR20 0
336 #define SR20 12
337 #define WR21 13
338 #define SR21 8
339 #define WR22 5
340 #define SR22 9
341 #define WR23 10
342 #define SR23 11
343 #define WR24 14
344 #define SR24 7
345 #define WR25 15
346 #define SR25 7
347 #define WR26 8
348 #define SR26 12
349 #define WR27 12
350 #define SR27 7
351 #define WR28 4
352 #define SR28 6
353 #define WR29 9
354 #define SR29 15
355 #define WR30 1
356 #define SR30 13
357 #define WR31 2
358 #define SR31 11
359 
360 #define WR32 15
361 #define SR32 9
362 #define WR33 5
363 #define SR33 7
364 #define WR34 1
365 #define SR34 15
366 #define WR35 3
367 #define SR35 11
368 #define WR36 7
369 #define SR36 8
370 #define WR37 14
371 #define SR37 6
372 #define WR38 6
373 #define SR38 6
374 #define WR39 9
375 #define SR39 14
376 #define WR40 11
377 #define SR40 12
378 #define WR41 8
379 #define SR41 13
380 #define WR42 12
381 #define SR42 5
382 #define WR43 2
383 #define SR43 14
384 #define WR44 10
385 #define SR44 13
386 #define WR45 0
387 #define SR45 13
388 #define WR46 4
389 #define SR46 7
390 #define WR47 13
391 #define SR47 5
392 
393 #define WR48 8
394 #define SR48 15
395 #define WR49 6
396 #define SR49 5
397 #define WR50 4
398 #define SR50 8
399 #define WR51 1
400 #define SR51 11
401 #define WR52 3
402 #define SR52 14
403 #define WR53 11
404 #define SR53 14
405 #define WR54 15
406 #define SR54 6
407 #define WR55 0
408 #define SR55 14
409 #define WR56 5
410 #define SR56 6
411 #define WR57 12
412 #define SR57 9
413 #define WR58 2
414 #define SR58 12
415 #define WR59 13
416 #define SR59 9
417 #define WR60 9
418 #define SR60 12
419 #define WR61 7
420 #define SR61 5
421 #define WR62 10
422 #define SR62 15
423 #define WR63 14
424 #define SR63 8
425 
426 #define WR64 12
427 #define SR64 8
428 #define WR65 15
429 #define SR65 5
430 #define WR66 10
431 #define SR66 12
432 #define WR67 4
433 #define SR67 9
434 #define WR68 1
435 #define SR68 12
436 #define WR69 5
437 #define SR69 5
438 #define WR70 8
439 #define SR70 14
440 #define WR71 7
441 #define SR71 6
442 #define WR72 6
443 #define SR72 8
444 #define WR73 2
445 #define SR73 13
446 #define WR74 13
447 #define SR74 6
448 #define WR75 14
449 #define SR75 5
450 #define WR76 0
451 #define SR76 15
452 #define WR77 3
453 #define SR77 13
454 #define WR78 9
455 #define SR78 11
456 #define WR79 11
457 #define SR79 11
458 
ripemd160_block_data_order(uint32_t h[5],const uint8_t * data,size_t num)459 static void ripemd160_block_data_order(uint32_t h[5], const uint8_t *data,
460                                        size_t num) {
461   uint32_t A, B, C, D, E;
462   uint32_t a, b, c, d, e;
463   uint32_t XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7, XX8, XX9, XX10, XX11, XX12,
464       XX13, XX14, XX15;
465 #define X(i) XX##i
466 
467   for (; num--;) {
468     A = h[0];
469     B = h[1];
470     C = h[2];
471     D = h[3];
472     E = h[4];
473 
474     X(0) = CRYPTO_load_u32_le(data);
475     data += 4;
476     X(1) = CRYPTO_load_u32_le(data);
477     data += 4;
478     RIP1(A, B, C, D, E, WL00, SL00);
479     X(2) = CRYPTO_load_u32_le(data);
480     data += 4;
481     RIP1(E, A, B, C, D, WL01, SL01);
482     X(3) = CRYPTO_load_u32_le(data);
483     data += 4;
484     RIP1(D, E, A, B, C, WL02, SL02);
485     X(4) = CRYPTO_load_u32_le(data);
486     data += 4;
487     RIP1(C, D, E, A, B, WL03, SL03);
488     X(5) = CRYPTO_load_u32_le(data);
489     data += 4;
490     RIP1(B, C, D, E, A, WL04, SL04);
491     X(6) = CRYPTO_load_u32_le(data);
492     data += 4;
493     RIP1(A, B, C, D, E, WL05, SL05);
494     X(7) = CRYPTO_load_u32_le(data);
495     data += 4;
496     RIP1(E, A, B, C, D, WL06, SL06);
497     X(8) = CRYPTO_load_u32_le(data);
498     data += 4;
499     RIP1(D, E, A, B, C, WL07, SL07);
500     X(9) = CRYPTO_load_u32_le(data);
501     data += 4;
502     RIP1(C, D, E, A, B, WL08, SL08);
503     X(10) = CRYPTO_load_u32_le(data);
504     data += 4;
505     RIP1(B, C, D, E, A, WL09, SL09);
506     X(11) = CRYPTO_load_u32_le(data);
507     data += 4;
508     RIP1(A, B, C, D, E, WL10, SL10);
509     X(12) = CRYPTO_load_u32_le(data);
510     data += 4;
511     RIP1(E, A, B, C, D, WL11, SL11);
512     X(13) = CRYPTO_load_u32_le(data);
513     data += 4;
514     RIP1(D, E, A, B, C, WL12, SL12);
515     X(14) = CRYPTO_load_u32_le(data);
516     data += 4;
517     RIP1(C, D, E, A, B, WL13, SL13);
518     X(15) = CRYPTO_load_u32_le(data);
519     data += 4;
520     RIP1(B, C, D, E, A, WL14, SL14);
521     RIP1(A, B, C, D, E, WL15, SL15);
522 
523     RIP2(E, A, B, C, D, WL16, SL16, KL1);
524     RIP2(D, E, A, B, C, WL17, SL17, KL1);
525     RIP2(C, D, E, A, B, WL18, SL18, KL1);
526     RIP2(B, C, D, E, A, WL19, SL19, KL1);
527     RIP2(A, B, C, D, E, WL20, SL20, KL1);
528     RIP2(E, A, B, C, D, WL21, SL21, KL1);
529     RIP2(D, E, A, B, C, WL22, SL22, KL1);
530     RIP2(C, D, E, A, B, WL23, SL23, KL1);
531     RIP2(B, C, D, E, A, WL24, SL24, KL1);
532     RIP2(A, B, C, D, E, WL25, SL25, KL1);
533     RIP2(E, A, B, C, D, WL26, SL26, KL1);
534     RIP2(D, E, A, B, C, WL27, SL27, KL1);
535     RIP2(C, D, E, A, B, WL28, SL28, KL1);
536     RIP2(B, C, D, E, A, WL29, SL29, KL1);
537     RIP2(A, B, C, D, E, WL30, SL30, KL1);
538     RIP2(E, A, B, C, D, WL31, SL31, KL1);
539 
540     RIP3(D, E, A, B, C, WL32, SL32, KL2);
541     RIP3(C, D, E, A, B, WL33, SL33, KL2);
542     RIP3(B, C, D, E, A, WL34, SL34, KL2);
543     RIP3(A, B, C, D, E, WL35, SL35, KL2);
544     RIP3(E, A, B, C, D, WL36, SL36, KL2);
545     RIP3(D, E, A, B, C, WL37, SL37, KL2);
546     RIP3(C, D, E, A, B, WL38, SL38, KL2);
547     RIP3(B, C, D, E, A, WL39, SL39, KL2);
548     RIP3(A, B, C, D, E, WL40, SL40, KL2);
549     RIP3(E, A, B, C, D, WL41, SL41, KL2);
550     RIP3(D, E, A, B, C, WL42, SL42, KL2);
551     RIP3(C, D, E, A, B, WL43, SL43, KL2);
552     RIP3(B, C, D, E, A, WL44, SL44, KL2);
553     RIP3(A, B, C, D, E, WL45, SL45, KL2);
554     RIP3(E, A, B, C, D, WL46, SL46, KL2);
555     RIP3(D, E, A, B, C, WL47, SL47, KL2);
556 
557     RIP4(C, D, E, A, B, WL48, SL48, KL3);
558     RIP4(B, C, D, E, A, WL49, SL49, KL3);
559     RIP4(A, B, C, D, E, WL50, SL50, KL3);
560     RIP4(E, A, B, C, D, WL51, SL51, KL3);
561     RIP4(D, E, A, B, C, WL52, SL52, KL3);
562     RIP4(C, D, E, A, B, WL53, SL53, KL3);
563     RIP4(B, C, D, E, A, WL54, SL54, KL3);
564     RIP4(A, B, C, D, E, WL55, SL55, KL3);
565     RIP4(E, A, B, C, D, WL56, SL56, KL3);
566     RIP4(D, E, A, B, C, WL57, SL57, KL3);
567     RIP4(C, D, E, A, B, WL58, SL58, KL3);
568     RIP4(B, C, D, E, A, WL59, SL59, KL3);
569     RIP4(A, B, C, D, E, WL60, SL60, KL3);
570     RIP4(E, A, B, C, D, WL61, SL61, KL3);
571     RIP4(D, E, A, B, C, WL62, SL62, KL3);
572     RIP4(C, D, E, A, B, WL63, SL63, KL3);
573 
574     RIP5(B, C, D, E, A, WL64, SL64, KL4);
575     RIP5(A, B, C, D, E, WL65, SL65, KL4);
576     RIP5(E, A, B, C, D, WL66, SL66, KL4);
577     RIP5(D, E, A, B, C, WL67, SL67, KL4);
578     RIP5(C, D, E, A, B, WL68, SL68, KL4);
579     RIP5(B, C, D, E, A, WL69, SL69, KL4);
580     RIP5(A, B, C, D, E, WL70, SL70, KL4);
581     RIP5(E, A, B, C, D, WL71, SL71, KL4);
582     RIP5(D, E, A, B, C, WL72, SL72, KL4);
583     RIP5(C, D, E, A, B, WL73, SL73, KL4);
584     RIP5(B, C, D, E, A, WL74, SL74, KL4);
585     RIP5(A, B, C, D, E, WL75, SL75, KL4);
586     RIP5(E, A, B, C, D, WL76, SL76, KL4);
587     RIP5(D, E, A, B, C, WL77, SL77, KL4);
588     RIP5(C, D, E, A, B, WL78, SL78, KL4);
589     RIP5(B, C, D, E, A, WL79, SL79, KL4);
590 
591     a = A;
592     b = B;
593     c = C;
594     d = D;
595     e = E;
596     // Do other half
597     A = h[0];
598     B = h[1];
599     C = h[2];
600     D = h[3];
601     E = h[4];
602 
603     RIP5(A, B, C, D, E, WR00, SR00, KR0);
604     RIP5(E, A, B, C, D, WR01, SR01, KR0);
605     RIP5(D, E, A, B, C, WR02, SR02, KR0);
606     RIP5(C, D, E, A, B, WR03, SR03, KR0);
607     RIP5(B, C, D, E, A, WR04, SR04, KR0);
608     RIP5(A, B, C, D, E, WR05, SR05, KR0);
609     RIP5(E, A, B, C, D, WR06, SR06, KR0);
610     RIP5(D, E, A, B, C, WR07, SR07, KR0);
611     RIP5(C, D, E, A, B, WR08, SR08, KR0);
612     RIP5(B, C, D, E, A, WR09, SR09, KR0);
613     RIP5(A, B, C, D, E, WR10, SR10, KR0);
614     RIP5(E, A, B, C, D, WR11, SR11, KR0);
615     RIP5(D, E, A, B, C, WR12, SR12, KR0);
616     RIP5(C, D, E, A, B, WR13, SR13, KR0);
617     RIP5(B, C, D, E, A, WR14, SR14, KR0);
618     RIP5(A, B, C, D, E, WR15, SR15, KR0);
619 
620     RIP4(E, A, B, C, D, WR16, SR16, KR1);
621     RIP4(D, E, A, B, C, WR17, SR17, KR1);
622     RIP4(C, D, E, A, B, WR18, SR18, KR1);
623     RIP4(B, C, D, E, A, WR19, SR19, KR1);
624     RIP4(A, B, C, D, E, WR20, SR20, KR1);
625     RIP4(E, A, B, C, D, WR21, SR21, KR1);
626     RIP4(D, E, A, B, C, WR22, SR22, KR1);
627     RIP4(C, D, E, A, B, WR23, SR23, KR1);
628     RIP4(B, C, D, E, A, WR24, SR24, KR1);
629     RIP4(A, B, C, D, E, WR25, SR25, KR1);
630     RIP4(E, A, B, C, D, WR26, SR26, KR1);
631     RIP4(D, E, A, B, C, WR27, SR27, KR1);
632     RIP4(C, D, E, A, B, WR28, SR28, KR1);
633     RIP4(B, C, D, E, A, WR29, SR29, KR1);
634     RIP4(A, B, C, D, E, WR30, SR30, KR1);
635     RIP4(E, A, B, C, D, WR31, SR31, KR1);
636 
637     RIP3(D, E, A, B, C, WR32, SR32, KR2);
638     RIP3(C, D, E, A, B, WR33, SR33, KR2);
639     RIP3(B, C, D, E, A, WR34, SR34, KR2);
640     RIP3(A, B, C, D, E, WR35, SR35, KR2);
641     RIP3(E, A, B, C, D, WR36, SR36, KR2);
642     RIP3(D, E, A, B, C, WR37, SR37, KR2);
643     RIP3(C, D, E, A, B, WR38, SR38, KR2);
644     RIP3(B, C, D, E, A, WR39, SR39, KR2);
645     RIP3(A, B, C, D, E, WR40, SR40, KR2);
646     RIP3(E, A, B, C, D, WR41, SR41, KR2);
647     RIP3(D, E, A, B, C, WR42, SR42, KR2);
648     RIP3(C, D, E, A, B, WR43, SR43, KR2);
649     RIP3(B, C, D, E, A, WR44, SR44, KR2);
650     RIP3(A, B, C, D, E, WR45, SR45, KR2);
651     RIP3(E, A, B, C, D, WR46, SR46, KR2);
652     RIP3(D, E, A, B, C, WR47, SR47, KR2);
653 
654     RIP2(C, D, E, A, B, WR48, SR48, KR3);
655     RIP2(B, C, D, E, A, WR49, SR49, KR3);
656     RIP2(A, B, C, D, E, WR50, SR50, KR3);
657     RIP2(E, A, B, C, D, WR51, SR51, KR3);
658     RIP2(D, E, A, B, C, WR52, SR52, KR3);
659     RIP2(C, D, E, A, B, WR53, SR53, KR3);
660     RIP2(B, C, D, E, A, WR54, SR54, KR3);
661     RIP2(A, B, C, D, E, WR55, SR55, KR3);
662     RIP2(E, A, B, C, D, WR56, SR56, KR3);
663     RIP2(D, E, A, B, C, WR57, SR57, KR3);
664     RIP2(C, D, E, A, B, WR58, SR58, KR3);
665     RIP2(B, C, D, E, A, WR59, SR59, KR3);
666     RIP2(A, B, C, D, E, WR60, SR60, KR3);
667     RIP2(E, A, B, C, D, WR61, SR61, KR3);
668     RIP2(D, E, A, B, C, WR62, SR62, KR3);
669     RIP2(C, D, E, A, B, WR63, SR63, KR3);
670 
671     RIP1(B, C, D, E, A, WR64, SR64);
672     RIP1(A, B, C, D, E, WR65, SR65);
673     RIP1(E, A, B, C, D, WR66, SR66);
674     RIP1(D, E, A, B, C, WR67, SR67);
675     RIP1(C, D, E, A, B, WR68, SR68);
676     RIP1(B, C, D, E, A, WR69, SR69);
677     RIP1(A, B, C, D, E, WR70, SR70);
678     RIP1(E, A, B, C, D, WR71, SR71);
679     RIP1(D, E, A, B, C, WR72, SR72);
680     RIP1(C, D, E, A, B, WR73, SR73);
681     RIP1(B, C, D, E, A, WR74, SR74);
682     RIP1(A, B, C, D, E, WR75, SR75);
683     RIP1(E, A, B, C, D, WR76, SR76);
684     RIP1(D, E, A, B, C, WR77, SR77);
685     RIP1(C, D, E, A, B, WR78, SR78);
686     RIP1(B, C, D, E, A, WR79, SR79);
687 
688     D = h[1] + c + D;
689     h[1] = h[2] + d + E;
690     h[2] = h[3] + e + A;
691     h[3] = h[4] + a + B;
692     h[4] = h[0] + b + C;
693     h[0] = D;
694   }
695 
696 #undef X
697 }
698 
RIPEMD160(const uint8_t * data,size_t len,uint8_t out[RIPEMD160_DIGEST_LENGTH])699 uint8_t *RIPEMD160(const uint8_t *data, size_t len,
700                    uint8_t out[RIPEMD160_DIGEST_LENGTH]) {
701   RIPEMD160_CTX ctx;
702 
703   if (!RIPEMD160_Init(&ctx)) {
704     return NULL;
705   }
706 
707   RIPEMD160_Update(&ctx, data, len);
708   RIPEMD160_Final(out, &ctx);
709   return out;
710 }
711