| /linux/arch/arm64/crypto/ |
| A D | sm4-ce-asm.h | 20 rev64 b0.4s, b0.4s; \ 21 ext b0.16b, b0.16b, b0.16b, #8; \ 22 rev32 b0.16b, b0.16b; 25 rev32 b0.16b, b0.16b; \ 45 rev64 b0.4s, b0.4s; \ 47 ext b0.16b, b0.16b, b0.16b, #8; \ 49 rev32 b0.16b, b0.16b; \ 53 rev32 b0.16b, b0.16b; \ 90 rev64 b0.4s, b0.4s; \ 94 ext b0.16b, b0.16b, b0.16b, #8; \ [all …]
|
| A D | sm4-ce-gcm-core.S | 110 rev32 b0.16b, b0.16b; \ 112 sm4e b0.4s, v24.4s; \ 114 sm4e b0.4s, v25.4s; \ 116 sm4e b0.4s, v26.4s; \ 128 rev64 b0.4s, b0.4s; \ 130 ext b0.16b, b0.16b, b0.16b, #8; \ 131 rev32 b0.16b, b0.16b; 137 rev32 b0.16b, b0.16b; \ 191 rev64 b0.4s, b0.4s; \ 197 ext b0.16b, b0.16b, b0.16b, #8; \ [all …]
|
| A D | sm4-neon-core.S | 137 ROUND4(0, b0, b1, b2, b3); \ 138 ROUND4(1, b1, b2, b3, b0); \ 139 ROUND4(2, b2, b3, b0, b1); \ 140 ROUND4(3, b3, b0, b1, b2); \ 144 rev32 b0.16b, b0.16b; \ 149 rotate_clockwise_4x4(b0, b1, b2, b3); \ 155 rev32 b0.16b, b0.16b; \ 159 SM4_CRYPT_BLK4_BE(b0, b1, b2, b3); 221 rev32 b0.16b, b0.16b; \ 235 ROUND8(0, b0, b1, b2, b3, b4, b5, b6, b7); \ [all …]
|
| A D | aes-neonbs-core.S | 26 .macro in_bs_ch, b0, b1, b2, b3, b4, b5, b6, b7 29 eor \b3, \b3, \b0 31 eor \b5, \b5, \b0 43 eor \b0, \b0, \b6 46 eor \b2, \b2, \b0 64 eor \b2, \b2, \b0 67 eor \b0, \b0, \b6 78 eor \b5, \b5, \b0 83 eor \b3, \b3, \b0 343 ushr \t0\().2d, \b0\().2d, #\n [all …]
|
| /linux/crypto/ |
| A D | aes_generic.c | 1179 u32 b0[4], b1[4]; in crypto_aes_encrypt() local 1198 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1199 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1200 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1201 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1202 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1203 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1204 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1205 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1206 f_nround(b1, b0, kp); in crypto_aes_encrypt() [all …]
|
| /linux/fs/f2fs/ |
| A D | hash.c | 28 __u32 b0 = buf[0], b1 = buf[1]; in TEA_transform() local 34 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); in TEA_transform() 35 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); in TEA_transform() 38 buf[0] += b0; in TEA_transform()
|
| /linux/fs/reiserfs/ |
| A D | hashes.c | 28 u32 b0, b1; \ 30 b0 = h0; \ 36 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); \ 37 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); \ 40 h0 += b0; \
|
| /linux/arch/arm/nwfpe/ |
| A D | softfloat-macros | 370 bits64 b0, 385 z0 = a0 + b0; 428 bits64 b0, 443 z0 = a0 - b0; 524 bits64 b0, 562 bits64 b0, b1; 566 b0 = b>>32; /* hence b0 is 32 bits wide now */ 567 if ( b0<<32 <= a0 ) { 571 do_div( z, b0 ); 723 return ( a0 < b0 ) || ( ( a0 == b0 ) && ( a1 <= b1 ) ); [all …]
|
| /linux/drivers/crypto/nx/ |
| A D | nx-aes-ccm.c | 134 unsigned int cryptlen, u8 *b0) in generate_b0() argument 138 memcpy(b0, iv, 16); in generate_b0() 140 lp = b0[0]; in generate_b0() 144 *b0 |= (8 * ((m - 2) / 2)); in generate_b0() 148 *b0 |= 64; in generate_b0() 150 return set_msg_len(b0 + 16 - l, cryptlen, l); in generate_b0() 186 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat() 191 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat() 198 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; in generate_pat() 202 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; in generate_pat() [all …]
|
| /linux/lib/crypto/ |
| A D | curve25519-hacl64.c | 44 u64 b0 = b[0]; in modulo_carry_top() local 129 u64 b0; in fmul_shift_reduce() local 190 u128 b0; in fmul_fmul() local 201 b0 = t[0]; in fmul_fmul() 248 u128 b0; in fsquare_fsquare_() local 258 b0 = tmp[0]; in fsquare_fsquare_() 314 fmul_fmul(b0, t00, b0); in crecip_crecip() 352 u64 b0; in fdifference() local 358 b0 = tmp[0]; in fdifference() 399 u128 b0; in fscalar() local [all …]
|
| /linux/arch/riscv/crypto/ |
| A D | chacha-riscv64-zvkb.S | 76 .macro chacha_round a0, b0, c0, d0, a1, b1, c1, d1, \ 79 vadd.vv \a0, \a0, \b0 97 vxor.vv \b0, \b0, \c0 101 vror.vi \b0, \b0, 32 - 12 107 vadd.vv \a0, \a0, \b0 125 vxor.vv \b0, \b0, \c0 129 vror.vi \b0, \b0, 32 - 7
|
| /linux/drivers/media/usb/dvb-usb/ |
| A D | pctv452e.c | 520 u8 *b0, *rx; in pctv452e_power_ctrl() local 532 if (!b0) in pctv452e_power_ctrl() 535 rx = b0 + 5; in pctv452e_power_ctrl() 544 b0[0] = 0xaa; in pctv452e_power_ctrl() 545 b0[1] = state->c++; in pctv452e_power_ctrl() 546 b0[2] = PCTV_CMD_RESET; in pctv452e_power_ctrl() 547 b0[3] = 1; in pctv452e_power_ctrl() 548 b0[4] = 0; in pctv452e_power_ctrl() 554 b0[1] = state->c++; in pctv452e_power_ctrl() 555 b0[4] = 1; in pctv452e_power_ctrl() [all …]
|
| /linux/drivers/mtd/nand/ |
| A D | ecc-sw-hamming.c | 378 unsigned char b0, b1, b2, bit_addr; in ecc_sw_hamming_correct() local 387 b0 = read_ecc[0] ^ calc_ecc[0]; in ecc_sw_hamming_correct() 390 b0 = read_ecc[1] ^ calc_ecc[1]; in ecc_sw_hamming_correct() 401 if ((b0 | b1 | b2) == 0) in ecc_sw_hamming_correct() 404 if ((((b0 ^ (b0 >> 1)) & 0x55) == 0x55) && in ecc_sw_hamming_correct() 426 byte_addr = (addressbits[b1] << 4) + addressbits[b0]; in ecc_sw_hamming_correct() 429 (addressbits[b1] << 4) + addressbits[b0]; in ecc_sw_hamming_correct() 437 if ((bitsperbyte[b0] + bitsperbyte[b1] + bitsperbyte[b2]) == 1) in ecc_sw_hamming_correct()
|
| /linux/arch/alpha/include/asm/ |
| A D | bitops.h | 460 unsigned long b0, b1, ofs, tmp; in sched_find_first_bit() local 462 b0 = b[0]; in sched_find_first_bit() 464 ofs = (b0 ? 0 : 64); in sched_find_first_bit() 465 tmp = (b0 ? b0 : b1); in sched_find_first_bit()
|
| /linux/include/linux/ |
| A D | math64.h | 204 } rl, rm, rn, rh, a0, b0; in mul_u64_u64_shr() local 208 b0.ll = b; in mul_u64_u64_shr() 210 rl.ll = mul_u32_u32(a0.l.low, b0.l.low); in mul_u64_u64_shr() 211 rm.ll = mul_u32_u32(a0.l.low, b0.l.high); in mul_u64_u64_shr() 212 rn.ll = mul_u32_u32(a0.l.high, b0.l.low); in mul_u64_u64_shr() 213 rh.ll = mul_u32_u32(a0.l.high, b0.l.high); in mul_u64_u64_shr()
|
| /linux/tools/testing/selftests/kvm/x86_64/ |
| A D | hyperv_clock.c | 27 } rm, rn, rh, a0, b0; in mul_u64_u64_shr64() local 31 b0.ll = b; in mul_u64_u64_shr64() 33 rm.ll = (u64)a0.l.low * b0.l.high; in mul_u64_u64_shr64() 34 rn.ll = (u64)a0.l.high * b0.l.low; in mul_u64_u64_shr64() 35 rh.ll = (u64)a0.l.high * b0.l.high; in mul_u64_u64_shr64()
|
| /linux/arch/arm/crypto/ |
| A D | blake2s-core.S | 68 .macro _blake2s_quarterround a0, b0, c0, d0, a1, b1, c1, d1, s0, s1, s2, s3 74 add \a0, \a0, \b0, ror #brot 88 eor \b0, \c0, \b0, ror #brot 95 add \a0, \a0, \b0, ror #12 109 eor \b0, \c0, \b0, ror#12
|
| A D | aes-neonbs-core.S | 80 .macro in_bs_ch, b0, b1, b2, b3, b4, b5, b6, b7 83 veor \b3, \b3, \b0 85 veor \b5, \b5, \b0 97 veor \b0, \b0, \b6 100 veor \b2, \b2, \b0 118 veor \b2, \b2, \b0 121 veor \b0, \b0, \b6 132 veor \b5, \b5, \b0 137 veor \b3, \b3, \b0 402 vshr.u64 \t0, \b0, #\n [all …]
|
| /linux/drivers/media/usb/ttusb-dec/ |
| A D | ttusb_dec.c | 969 u8 b0[] = { 0x05 }; in ttusb_dec_start_ts_feed() local 1025 result = ttusb_dec_send_command(dec, 0x80, sizeof(b0), b0, NULL, NULL); in ttusb_dec_start_ts_feed() 1055 memcpy(&b0[0], &pid, 2); in ttusb_dec_start_sec_feed() 1056 memcpy(&b0[4], &x, 1); in ttusb_dec_start_sec_feed() 1059 result = ttusb_dec_send_command(dec, 0x60, sizeof(b0), b0, in ttusb_dec_start_sec_feed() 1117 u8 b0[] = { 0x00 }; in ttusb_dec_stop_ts_feed() local 1119 ttusb_dec_send_command(dec, 0x81, sizeof(b0), b0, NULL, NULL); in ttusb_dec_stop_ts_feed() 1131 u8 b0[] = { 0x00, 0x00 }; in ttusb_dec_stop_sec_feed() local 1135 b0[1] = finfo->stream_id; in ttusb_dec_stop_sec_feed() 1140 ttusb_dec_send_command(dec, 0x62, sizeof(b0), b0, NULL, NULL); in ttusb_dec_stop_sec_feed() [all …]
|
| /linux/arch/sh/kernel/cpu/sh4/ |
| A D | softfloat.c | 90 void add128(bits64 a0, bits64 a1, bits64 b0, bits64 b1, bits64 * z0Ptr, 645 *z0Ptr = a0 + b0 + (z1 < a1); in add128() 649 sub128(bits64 a0, bits64 a1, bits64 b0, bits64 b1, bits64 * z0Ptr, in sub128() argument 653 *z0Ptr = a0 - b0 - (a1 < b1); in sub128() 658 bits64 b0, b1; in estimateDiv128To64() local 663 b0 = b >> 32; in estimateDiv128To64() 665 do_div(tmp, b0); in estimateDiv128To64() 667 z = (b0 << 32 <= a0) ? LIT64(0xFFFFFFFF00000000) : tmp << 32; in estimateDiv128To64() 673 add128(rem0, rem1, b0, b1, &rem0, &rem1); in estimateDiv128To64() 677 do_div(tmp, b0); in estimateDiv128To64() [all …]
|
| /linux/fs/ext4/ |
| A D | hash.c | 19 __u32 b0 = buf[0], b1 = buf[1]; in TEA_transform() local 25 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); in TEA_transform() 26 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); in TEA_transform() 29 buf[0] += b0; in TEA_transform()
|
| /linux/arch/x86/crypto/ |
| A D | ghash-clmulni-intel_asm.S | 53 pclmulqdq $0x00, SHASH, DATA # DATA = a0 * b0 55 pclmulqdq $0x00, T3, T2 # T2 = (a1 + a0) * (b1 + b0) 57 pxor T1, T2 # T2 = a0 * b1 + a1 * b0
|
| A D | aria-gfni-avx512-asm_64.S | 66 #define byteslice_16x16b(a0, b0, c0, d0, \ argument 74 transpose_4x4(b0, b1, b2, b3, d2, d3); \ 87 vpshufb a0, b0, b0; \ 105 transpose_4x4(a0, b0, c0, d0, d2, d3); \ 110 vmovdqu64 b0, st0; \ 112 transpose_4x4(a2, b2, c2, d2, b0, b1); \ 113 transpose_4x4(a3, b3, c3, d3, b0, b1); \ 114 vmovdqu64 st0, b0; \ 139 vpshufb a0, b0, b0; \ 162 vmovdqu64 b0, st0; \ [all …]
|
| /linux/drivers/crypto/intel/keembay/ |
| A D | ocs-aes.c | 1058 u8 b0[16]; /* CCM B0 block is 16 bytes long. */ in ocs_aes_ccm_write_b0() local 1062 memset(b0, 0, sizeof(b0)); in ocs_aes_ccm_write_b0() 1073 b0[0] |= BIT(6); in ocs_aes_ccm_write_b0() 1079 b0[0] |= (((tag_size - 2) / 2) & 0x7) << 3; in ocs_aes_ccm_write_b0() 1085 b0[0] |= iv[0] & 0x7; in ocs_aes_ccm_write_b0() 1093 b0[i] = iv[i]; in ocs_aes_ccm_write_b0() 1099 i = sizeof(b0) - 1; in ocs_aes_ccm_write_b0() 1101 b0[i] = cryptlen & 0xff; in ocs_aes_ccm_write_b0() 1113 for (i = 0; i < sizeof(b0); i++) in ocs_aes_ccm_write_b0() 1114 iowrite8(b0[i], aes_dev->base_reg + in ocs_aes_ccm_write_b0()
|
| /linux/Documentation/userspace-api/media/v4l/ |
| A D | subdev-formats.rst | 8382 - b0\ :sub:`7` 8383 - b0\ :sub:`6` 8384 - b0\ :sub:`5` 8385 - b0\ :sub:`4` 8386 - b0\ :sub:`3` 8387 - b0\ :sub:`2` 8388 - b0\ :sub:`1` 8389 - b0\ :sub:`0` 8409 - b0\ :sub:`7` 8410 - b0\ :sub:`6` [all …]
|