/kernel/linux/linux-6.6/arch/arm64/crypto/ |
H A D | sm4-ce-asm.h | 11 #define SM4_CRYPT_BLK_BE(b0) \ 12 sm4e b0.4s, v24.4s; \ 13 sm4e b0.4s, v25.4s; \ 14 sm4e b0.4s, v26.4s; \ 15 sm4e b0.4s, v27.4s; \ 16 sm4e b0.4s, v28.4s; \ 17 sm4e b0.4s, v29.4s; \ 18 sm4e b0.4s, v30.4s; \ 19 sm4e b0.4s, v31.4s; \ 20 rev64 b0 [all...] |
H A D | sm4-ce-gcm-core.S | 109 #define SM4_CRYPT_PMUL_128x128_BLK(b0, r0, r1, m0, m1, T0, T1) \ 110 rev32 b0.16b, b0.16b; \ 112 sm4e b0.4s, v24.4s; \ 114 sm4e b0.4s, v25.4s; \ 116 sm4e b0.4s, v26.4s; \ 118 sm4e b0.4s, v27.4s; \ 120 sm4e b0.4s, v28.4s; \ 122 sm4e b0.4s, v29.4s; \ 124 sm4e b0 [all...] |
H A D | sm4-neon-core.S | 131 #define SM4_CRYPT_BLK4_BE(b0, b1, b2, b3) \ 137 ROUND4(0, b0, b1, b2, b3); \ 138 ROUND4(1, b1, b2, b3, b0); \ 139 ROUND4(2, b2, b3, b0, b1); \ 140 ROUND4(3, b3, b0, b1, b2); \ 144 rev32 b0.16b, b0.16b; \ 149 rotate_clockwise_4x4(b0, b1, b2, b3); \ 154 #define SM4_CRYPT_BLK4(b0, b1, b2, b3) \ 155 rev32 b0 [all...] |
/kernel/linux/linux-5.10/fs/reiserfs/ |
H A D | hashes.c | 28 u32 b0, b1; \ 30 b0 = h0; \ 36 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); \ 37 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); \ 40 h0 += b0; \
|
/kernel/linux/linux-6.6/fs/reiserfs/ |
H A D | hashes.c | 28 u32 b0, b1; \ 30 b0 = h0; \ 36 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); \ 37 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); \ 40 h0 += b0; \
|
/kernel/linux/linux-5.10/drivers/crypto/nx/ |
H A D | nx-aes-ccm.c | 134 unsigned int cryptlen, u8 *b0) in generate_b0() 139 memcpy(b0, iv, 16); in generate_b0() 141 lp = b0[0]; in generate_b0() 145 *b0 |= (8 * ((m - 2) / 2)); in generate_b0() 149 *b0 |= 64; in generate_b0() 151 rc = set_msg_len(b0 + 16 - l, cryptlen, l); in generate_b0() 167 u8 tmp[16], *b1 = NULL, *b0 = NULL, *result = NULL; in generate_pat() local 189 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat() 194 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat() 201 b0 in generate_pat() 133 generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, unsigned int cryptlen, u8 *b0) generate_b0() argument [all...] |
/kernel/linux/linux-6.6/drivers/crypto/nx/ |
H A D | nx-aes-ccm.c | 134 unsigned int cryptlen, u8 *b0) in generate_b0() 138 memcpy(b0, iv, 16); in generate_b0() 140 lp = b0[0]; in generate_b0() 144 *b0 |= (8 * ((m - 2) / 2)); in generate_b0() 148 *b0 |= 64; in generate_b0() 150 return set_msg_len(b0 + 16 - l, cryptlen, l); in generate_b0() 164 u8 tmp[16], *b1 = NULL, *b0 = NULL, *result = NULL; in generate_pat() local 186 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat() 191 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; in generate_pat() 198 b0 in generate_pat() 133 generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, unsigned int cryptlen, u8 *b0) generate_b0() argument [all...] |
/kernel/linux/linux-5.10/fs/f2fs/ |
H A D | hash.c | 28 __u32 b0 = buf[0], b1 = buf[1]; in TEA_transform() local 34 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); in TEA_transform() 35 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); in TEA_transform() 38 buf[0] += b0; in TEA_transform()
|
/kernel/linux/linux-6.6/fs/f2fs/ |
H A D | hash.c | 28 __u32 b0 = buf[0], b1 = buf[1]; in TEA_transform() local 34 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); in TEA_transform() 35 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); in TEA_transform() 38 buf[0] += b0; in TEA_transform()
|
/kernel/linux/linux-5.10/arch/ia64/lib/ |
H A D | ip_fast_csum.S | 77 br.ret.sptk.many b0 85 mov r34=b0 90 br.call.sptk.many b0=do_csum 94 mov b0=r34 95 br.ret.sptk.many b0 146 br.ret.sptk.many b0
|
/kernel/linux/linux-6.6/arch/ia64/lib/ |
H A D | ip_fast_csum.S | 77 br.ret.sptk.many b0 85 mov r34=b0 90 br.call.sptk.many b0=do_csum 94 mov b0=r34 95 br.ret.sptk.many b0 146 br.ret.sptk.many b0
|
/kernel/linux/linux-5.10/lib/crypto/ |
H A D | curve25519-hacl64.c | 46 u64 b0 = b[0]; in modulo_carry_top() local 48 u64 b0_ = b0 + 19 * (b4 >> 51); in modulo_carry_top() 131 u64 b0; in fmul_shift_reduce() local 153 b0 = output[0]; in fmul_shift_reduce() 154 output[0] = 19 * b0; in fmul_shift_reduce() 192 u128 b0; in fmul_fmul() local 203 b0 = t[0]; in fmul_fmul() 205 b0_ = ((b0) + (((u128)(19) * (((u64)(((b4) >> (51)))))))); in fmul_fmul() 250 u128 b0; in fsquare_fsquare_() local 260 b0 in fsquare_fsquare_() 303 u64 *b0 = buf + 10; crecip_crecip() local 354 u64 b0; fdifference() local 401 u128 b0; fscalar() local 729 u8 *b0 = output; format_fcontract_store() local [all...] |
/kernel/linux/linux-6.6/lib/crypto/ |
H A D | curve25519-hacl64.c | 44 u64 b0 = b[0]; in modulo_carry_top() local 46 u64 b0_ = b0 + 19 * (b4 >> 51); in modulo_carry_top() 129 u64 b0; in fmul_shift_reduce() local 151 b0 = output[0]; in fmul_shift_reduce() 152 output[0] = 19 * b0; in fmul_shift_reduce() 190 u128 b0; in fmul_fmul() local 201 b0 = t[0]; in fmul_fmul() 203 b0_ = ((b0) + (((u128)(19) * (((u64)(((b4) >> (51)))))))); in fmul_fmul() 248 u128 b0; in fsquare_fsquare_() local 258 b0 in fsquare_fsquare_() 301 u64 *b0 = buf + 10; crecip_crecip() local 352 u64 b0; fdifference() local 399 u128 b0; fscalar() local 727 u8 *b0 = output; format_fcontract_store() local [all...] |
/kernel/linux/linux-5.10/crypto/ |
H A D | aes_generic.c | 1179 u32 b0[4], b1[4]; in crypto_aes_encrypt() local 1183 b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); in crypto_aes_encrypt() 1184 b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); in crypto_aes_encrypt() 1185 b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); in crypto_aes_encrypt() 1186 b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); in crypto_aes_encrypt() 1189 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1190 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1194 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1195 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1198 f_nround(b1, b0, k in crypto_aes_encrypt() 1249 u32 b0[4], b1[4]; crypto_aes_decrypt() local [all...] |
/kernel/linux/linux-6.6/crypto/ |
H A D | aes_generic.c | 1179 u32 b0[4], b1[4]; in crypto_aes_encrypt() local 1183 b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); in crypto_aes_encrypt() 1184 b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); in crypto_aes_encrypt() 1185 b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); in crypto_aes_encrypt() 1186 b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); in crypto_aes_encrypt() 1189 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1190 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1194 f_nround(b1, b0, kp); in crypto_aes_encrypt() 1195 f_nround(b0, b1, kp); in crypto_aes_encrypt() 1198 f_nround(b1, b0, k in crypto_aes_encrypt() 1249 u32 b0[4], b1[4]; crypto_aes_decrypt() local [all...] |
/kernel/linux/linux-5.10/drivers/mtd/nand/raw/ |
H A D | nand_ecc.c | 394 unsigned char b0, b1, b2, bit_addr; in __nand_correct_data() local 400 * b0 to b2 indicate which bit is faulty (if any) in __nand_correct_data() 405 b0 = read_ecc[0] ^ calc_ecc[0]; in __nand_correct_data() 408 b0 = read_ecc[1] ^ calc_ecc[1]; in __nand_correct_data() 419 if ((b0 | b1 | b2) == 0) in __nand_correct_data() 422 if ((((b0 ^ (b0 >> 1)) & 0x55) == 0x55) && in __nand_correct_data() 434 * One as we have now (for b0), one for b2 in __nand_correct_data() 444 byte_addr = (addressbits[b1] << 4) + addressbits[b0]; in __nand_correct_data() 447 (addressbits[b1] << 4) + addressbits[b0]; in __nand_correct_data() [all...] |
/kernel/linux/linux-5.10/arch/alpha/include/asm/ |
H A D | bitops.h | 445 unsigned long b0, b1, ofs, tmp; in sched_find_first_bit() local 447 b0 = b[0]; in sched_find_first_bit() 449 ofs = (b0 ? 0 : 64); in sched_find_first_bit() 450 tmp = (b0 ? b0 : b1); in sched_find_first_bit()
|
/kernel/linux/linux-6.6/arch/alpha/include/asm/ |
H A D | bitops.h | 440 unsigned long b0, b1, ofs, tmp; in sched_find_first_bit() local 442 b0 = b[0]; in sched_find_first_bit() 444 ofs = (b0 ? 0 : 64); in sched_find_first_bit() 445 tmp = (b0 ? b0 : b1); in sched_find_first_bit()
|
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/x86_64/ |
H A D | hyperv_clock.c | 27 } rm, rn, rh, a0, b0; in mul_u64_u64_shr64() local 31 b0.ll = b; in mul_u64_u64_shr64() 33 rm.ll = (u64)a0.l.low * b0.l.high; in mul_u64_u64_shr64() 34 rn.ll = (u64)a0.l.high * b0.l.low; in mul_u64_u64_shr64() 35 rh.ll = (u64)a0.l.high * b0.l.high; in mul_u64_u64_shr64()
|
/kernel/linux/linux-5.10/drivers/media/usb/dvb-usb/ |
H A D | pctv452e.c | 518 u8 *b0, *rx; in pctv452e_power_ctrl() local 529 b0 = kmalloc(5 + PCTV_ANSWER_LEN, GFP_KERNEL); in pctv452e_power_ctrl() 530 if (!b0) in pctv452e_power_ctrl() 533 rx = b0 + 5; in pctv452e_power_ctrl() 542 b0[0] = 0xaa; in pctv452e_power_ctrl() 543 b0[1] = state->c++; in pctv452e_power_ctrl() 544 b0[2] = PCTV_CMD_RESET; in pctv452e_power_ctrl() 545 b0[3] = 1; in pctv452e_power_ctrl() 546 b0[4] = 0; in pctv452e_power_ctrl() 548 ret = dvb_usb_generic_rw(d, b0, in pctv452e_power_ctrl() [all...] |
/kernel/linux/linux-6.6/drivers/media/usb/dvb-usb/ |
H A D | pctv452e.c | 520 u8 *b0, *rx; in pctv452e_power_ctrl() local 531 b0 = kmalloc(5 + PCTV_ANSWER_LEN, GFP_KERNEL); in pctv452e_power_ctrl() 532 if (!b0) in pctv452e_power_ctrl() 535 rx = b0 + 5; in pctv452e_power_ctrl() 544 b0[0] = 0xaa; in pctv452e_power_ctrl() 545 b0[1] = state->c++; in pctv452e_power_ctrl() 546 b0[2] = PCTV_CMD_RESET; in pctv452e_power_ctrl() 547 b0[3] = 1; in pctv452e_power_ctrl() 548 b0[4] = 0; in pctv452e_power_ctrl() 550 ret = dvb_usb_generic_rw(d, b0, in pctv452e_power_ctrl() [all...] |
/kernel/linux/linux-5.10/fs/ext4/ |
H A D | hash.c | 19 __u32 b0 = buf[0], b1 = buf[1]; in TEA_transform() local 25 b0 += ((b1 << 4)+a) ^ (b1+sum) ^ ((b1 >> 5)+b); in TEA_transform() 26 b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d); in TEA_transform() 29 buf[0] += b0; in TEA_transform()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | math64.h | 203 } rl, rm, rn, rh, a0, b0; in mul_u64_u64_shr() local 207 b0.ll = b; in mul_u64_u64_shr() 209 rl.ll = mul_u32_u32(a0.l.low, b0.l.low); in mul_u64_u64_shr() 210 rm.ll = mul_u32_u32(a0.l.low, b0.l.high); in mul_u64_u64_shr() 211 rn.ll = mul_u32_u32(a0.l.high, b0.l.low); in mul_u64_u64_shr() 212 rh.ll = mul_u32_u32(a0.l.high, b0.l.high); in mul_u64_u64_shr()
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | math64.h | 208 } rl, rm, rn, rh, a0, b0; in mul_u64_u64_shr() local 212 b0.ll = b; in mul_u64_u64_shr() 214 rl.ll = mul_u32_u32(a0.l.low, b0.l.low); in mul_u64_u64_shr() 215 rm.ll = mul_u32_u32(a0.l.low, b0.l.high); in mul_u64_u64_shr() 216 rn.ll = mul_u32_u32(a0.l.high, b0.l.low); in mul_u64_u64_shr() 217 rh.ll = mul_u32_u32(a0.l.high, b0.l.high); in mul_u64_u64_shr()
|
/kernel/linux/linux-5.10/arch/sh/kernel/cpu/sh4/ |
H A D | softfloat.c | 90 void add128(bits64 a0, bits64 a1, bits64 b0, bits64 b1, bits64 * z0Ptr, 92 void sub128(bits64 a0, bits64 a1, bits64 b0, bits64 b1, bits64 * z0Ptr, 638 void add128(bits64 a0, bits64 a1, bits64 b0, bits64 b1, bits64 * z0Ptr, in add128() argument 645 *z0Ptr = a0 + b0 + (z1 < a1); in add128() 649 sub128(bits64 a0, bits64 a1, bits64 b0, bits64 b1, bits64 * z0Ptr, in sub128() argument 653 *z0Ptr = a0 - b0 - (a1 < b1); in sub128() 658 bits64 b0, b1; in estimateDiv128To64() local 663 b0 = b >> 32; in estimateDiv128To64() 665 do_div(tmp, b0); in estimateDiv128To64() 667 z = (b0 << 3 in estimateDiv128To64() [all...] |