Home
last modified time | relevance | path

Searched refs:RA2 (Results 1 – 7 of 7) sorted by relevance

/arch/x86/crypto/
A Dsm4-aesni-avx-asm_64.S33 #define RA2 %xmm10 macro
159 vmovdqa RA0, RA2;
165 vmovdqu 2*16(%rdx), RA2;
175 vpshufb RTMP2, RA2, RA2;
232 vpshufb RTMP2, RA2, RA2;
240 vmovdqu RA2, 2*16(%rsi);
265 vpshufb RTMP2, RA2, RA2;
349 vpshufb RTMP2, RA2, RA2;
447 vpshufb RBSWAP, RTMP0, RA2;
468 vpxor (2 * 16)(%rdx), RA2, RA2;
[all …]
A Dsm4-aesni-avx2-asm_64.S34 #define RA2 %ymm10 macro
171 vpshufb RTMP2, RA2, RA2;
179 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
261 vpshufb RTMP2, RA2, RA2;
317 vpshufb RTMP3, RTMP0, RA2;
340 vpshufb RTMP3, RTMP0, RA2; /* +5 ; +4 */
369 vpxor (2 * 32)(%rdx), RA2, RA2;
378 vmovdqu RA2, (2 * 32)(%rsi);
407 vmovdqu (2 * 32)(%rdx), RA2;
420 vpxor (1 * 32 + 16)(%rdx), RA2, RA2;
[all …]
A Dtwofish-avx-x86_64-asm_64.S42 #define RA2 %xmm4 macro
248 inpack_blocks(RA2, RB2, RC2, RD2, RK1, RX0, RY0, RK2);
267 outunpack_blocks(RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2);
288 inpack_blocks(RC2, RD2, RA2, RB2, RK1, RX0, RY0, RK2);
289 rotate_1l(RA2);
306 outunpack_blocks(RA2, RB2, RC2, RD2, RK1, RX0, RY0, RK2);
321 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
325 store_8way(%r11, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2);
341 load_8way(%rdx, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2);
345 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
[all …]
A Dcast6-avx-x86_64-asm_64.S42 #define RA2 %xmm4 macro
270 inpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM);
294 outunpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM);
318 inpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM);
341 outunpack_blocks(RA2, RB2, RC2, RD2, RTMP, RX, RKRF, RKM);
358 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
362 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
381 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
385 store_8way(%r11, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
406 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
[all …]
A Dserpent-avx-x86_64-asm_64.S38 #define RA2 %xmm6 macro
565 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
602 write_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
618 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
668 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
672 store_8way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
686 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
704 load_8way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
A Dserpent-avx2-asm_64.S31 #define RA2 %ymm3 macro
564 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
601 write_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
617 read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
669 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
673 store_16way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
691 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
713 load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
A Dserpent-sse2-x86_64-asm_64.S28 #define RA2 %xmm5 macro
634 read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
676 write_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
682 xor_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);
698 read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2);

Completed in 20 milliseconds