Home
last modified time | relevance | path

Searched refs:K1 (Results 1 - 25 of 50) sorted by relevance

12

/kernel/linux/linux-5.10/arch/mips/kvm/
H A Dentry.c48 #define K1 27 macro
225 UASM_i_ADDIU(&p, K1, SP, -(int)sizeof(struct pt_regs)); in kvm_mips_build_vcpu_run()
229 UASM_i_SW(&p, i, offsetof(struct pt_regs, regs[i]), K1); in kvm_mips_build_vcpu_run()
234 UASM_i_SW(&p, V0, offsetof(struct pt_regs, cp0_status), K1); in kvm_mips_build_vcpu_run()
237 kvm_mips_build_save_scratch(&p, V1, K1); in kvm_mips_build_vcpu_run()
243 UASM_i_ADDIU(&p, K1, A0, offsetof(struct kvm_vcpu, arch)); in kvm_mips_build_vcpu_run()
249 UASM_i_SW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1); in kvm_mips_build_vcpu_run()
252 UASM_i_SW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1); in kvm_mips_build_vcpu_run()
263 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1); in kvm_mips_build_vcpu_run()
305 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1); in kvm_mips_build_enter_guest()
[all...]
/kernel/linux/linux-6.6/arch/mips/kvm/
H A Dentry.c48 #define K1 27 macro
219 UASM_i_ADDIU(&p, K1, SP, -(int)sizeof(struct pt_regs)); in kvm_mips_build_vcpu_run()
223 UASM_i_SW(&p, i, offsetof(struct pt_regs, regs[i]), K1); in kvm_mips_build_vcpu_run()
228 UASM_i_SW(&p, V0, offsetof(struct pt_regs, cp0_status), K1); in kvm_mips_build_vcpu_run()
231 kvm_mips_build_save_scratch(&p, V1, K1); in kvm_mips_build_vcpu_run()
237 UASM_i_ADDIU(&p, K1, A0, offsetof(struct kvm_vcpu, arch)); in kvm_mips_build_vcpu_run()
243 UASM_i_SW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1); in kvm_mips_build_vcpu_run()
246 UASM_i_SW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1); in kvm_mips_build_vcpu_run()
257 UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1); in kvm_mips_build_vcpu_run()
299 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1); in kvm_mips_build_enter_guest()
[all...]
/kernel/linux/linux-5.10/arch/x86/crypto/
H A Dnh-sse2-x86_64.S17 #define K1 %xmm5 define
77 movdqu 0x10(KEY), K1
88 _nh_stride K0, K1, K2, K3, 0x00
89 _nh_stride K1, K2, K3, K0, 0x10
90 _nh_stride K2, K3, K0, K1, 0x20
91 _nh_stride K3, K0, K1, K2, 0x30
100 _nh_stride K0, K1, K2, K3, 0x00
104 _nh_stride K1, K2, K3, K0, 0x10
108 _nh_stride K2, K3, K0, K1, 0x20
H A Dnh-avx2-x86_64.S18 #define K1 %ymm5 define
75 vmovdqu 0x10(KEY), K1
88 _nh_2xstride K0, K1, K2, K3
92 vmovdqu 0x30(KEY), K1
93 _nh_2xstride K2, K3, K0, K1
111 _nh_2xstride K0, K1, K2, K3
117 vmovdqa K3, K1
126 _nh_2xstride K0, K1, K2, K3
H A Dsha1_avx2_x86_64_asm.S684 #define K1 0x5a827999 define
691 .long K1, K1, K1, K1
692 .long K1, K1, K1, K1
H A Dsha1_ssse3_asm.S432 #define K1 0x5a827999 define
441 .long K1, K1, K1, K1
/kernel/linux/linux-6.6/arch/x86/crypto/
H A Dnh-sse2-x86_64.S18 #define K1 %xmm5 define
78 movdqu 0x10(KEY), K1
89 _nh_stride K0, K1, K2, K3, 0x00
90 _nh_stride K1, K2, K3, K0, 0x10
91 _nh_stride K2, K3, K0, K1, 0x20
92 _nh_stride K3, K0, K1, K2, 0x30
101 _nh_stride K0, K1, K2, K3, 0x00
105 _nh_stride K1, K2, K3, K0, 0x10
109 _nh_stride K2, K3, K0, K1, 0x20
H A Dnh-avx2-x86_64.S19 #define K1 %ymm5 define
76 vmovdqu 0x10(KEY), K1
89 _nh_2xstride K0, K1, K2, K3
93 vmovdqu 0x30(KEY), K1
94 _nh_2xstride K2, K3, K0, K1
112 _nh_2xstride K0, K1, K2, K3
118 vmovdqa K3, K1
127 _nh_2xstride K0, K1, K2, K3
H A Dsha1_avx2_x86_64_asm.S673 #define K1 0x5a827999 define
680 .long K1, K1, K1, K1
681 .long K1, K1, K1, K1
H A Dsha1_ssse3_asm.S433 #define K1 0x5a827999 define
442 .long K1, K1, K1, K1
/kernel/linux/linux-5.10/fs/ext4/
H A Dhash.c46 #define K1 0 macro
58 ROUND(F, a, b, c, d, in[0] + K1, 3); in half_md4_transform()
59 ROUND(F, d, a, b, c, in[1] + K1, 7); in half_md4_transform()
60 ROUND(F, c, d, a, b, in[2] + K1, 11); in half_md4_transform()
61 ROUND(F, b, c, d, a, in[3] + K1, 19); in half_md4_transform()
62 ROUND(F, a, b, c, d, in[4] + K1, 3); in half_md4_transform()
63 ROUND(F, d, a, b, c, in[5] + K1, 7); in half_md4_transform()
64 ROUND(F, c, d, a, b, in[6] + K1, 11); in half_md4_transform()
65 ROUND(F, b, c, d, a, in[7] + K1, 19); in half_md4_transform()
95 #undef K1 macro
[all...]
/kernel/linux/linux-5.10/arch/mips/mm/
H A Dtlbex.c282 #define K1 27 macro
367 r.r2 = K1; in build_get_work_registers()
380 UASM_i_LA(p, K1, (long)&handler_reg_save); in build_get_work_registers()
381 UASM_i_ADDU(p, K0, K0, K1); in build_get_work_registers()
389 r.r1 = K1; in build_get_work_registers()
424 uasm_i_lui(&p, K1, uasm_rel_hi(pgdc)); /* cp0 delay */ in build_r3000_tlb_refill_handler()
425 uasm_i_lw(&p, K1, uasm_rel_lo(pgdc), K1); in build_r3000_tlb_refill_handler()
428 uasm_i_addu(&p, K1, K1, K in build_r3000_tlb_refill_handler()
[all...]
/kernel/linux/linux-6.6/arch/mips/mm/
H A Dtlbex.c281 #define K1 27 macro
360 r.r2 = K1; in build_get_work_registers()
373 UASM_i_LA(p, K1, (long)&handler_reg_save); in build_get_work_registers()
374 UASM_i_ADDU(p, K0, K0, K1); in build_get_work_registers()
382 r.r1 = K1; in build_get_work_registers()
417 uasm_i_lui(&p, K1, uasm_rel_hi(pgdc)); /* cp0 delay */ in build_r3000_tlb_refill_handler()
418 uasm_i_lw(&p, K1, uasm_rel_lo(pgdc), K1); in build_r3000_tlb_refill_handler()
421 uasm_i_addu(&p, K1, K1, K in build_r3000_tlb_refill_handler()
[all...]
/kernel/linux/linux-6.6/arch/s390/crypto/
H A Dchacha-s390.S53 #define K1 %v17 define
90 VL K1,0,,KEY # load key
99 VREPF XB0,K1,0 # smash the key
100 VREPF XB1,K1,1
101 VREPF XB2,K1,2
102 VREPF XB3,K1,3
284 VAF XB0,XB0,K1
307 VAF XB0,XB1,K1
334 VAF XB0,XB2,K1
361 VAF XB0,XB3,K1
436 #define K1 %v24 global() define
[all...]
/kernel/linux/linux-5.10/crypto/
H A Drmd160.c26 #define K1 RMD_K1 macro
68 ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); in rmd160_transform()
69 ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); in rmd160_transform()
70 ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); in rmd160_transform()
71 ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); in rmd160_transform()
72 ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); in rmd160_transform()
73 ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); in rmd160_transform()
74 ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); in rmd160_transform()
75 ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); in rmd160_transform()
76 ROUND(cc, dd, ee, aa, bb, F1, K1, i in rmd160_transform()
[all...]
H A Drmd256.c26 #define K1 RMD_K1 macro
62 ROUND(aa, bb, cc, dd, F1, K1, in[0], 11); in rmd256_transform()
63 ROUND(dd, aa, bb, cc, F1, K1, in[1], 14); in rmd256_transform()
64 ROUND(cc, dd, aa, bb, F1, K1, in[2], 15); in rmd256_transform()
65 ROUND(bb, cc, dd, aa, F1, K1, in[3], 12); in rmd256_transform()
66 ROUND(aa, bb, cc, dd, F1, K1, in[4], 5); in rmd256_transform()
67 ROUND(dd, aa, bb, cc, F1, K1, in[5], 8); in rmd256_transform()
68 ROUND(cc, dd, aa, bb, F1, K1, in[6], 7); in rmd256_transform()
69 ROUND(bb, cc, dd, aa, F1, K1, in[7], 9); in rmd256_transform()
70 ROUND(aa, bb, cc, dd, F1, K1, i in rmd256_transform()
[all...]
H A Drmd128.c26 #define K1 RMD_K1 macro
62 ROUND(aa, bb, cc, dd, F1, K1, in[0], 11); in rmd128_transform()
63 ROUND(dd, aa, bb, cc, F1, K1, in[1], 14); in rmd128_transform()
64 ROUND(cc, dd, aa, bb, F1, K1, in[2], 15); in rmd128_transform()
65 ROUND(bb, cc, dd, aa, F1, K1, in[3], 12); in rmd128_transform()
66 ROUND(aa, bb, cc, dd, F1, K1, in[4], 5); in rmd128_transform()
67 ROUND(dd, aa, bb, cc, F1, K1, in[5], 8); in rmd128_transform()
68 ROUND(cc, dd, aa, bb, F1, K1, in[6], 7); in rmd128_transform()
69 ROUND(bb, cc, dd, aa, F1, K1, in[7], 9); in rmd128_transform()
70 ROUND(aa, bb, cc, dd, F1, K1, i in rmd128_transform()
[all...]
H A Dkhazad.c763 u64 K2, K1; in khazad_setkey() local
767 K1 = ((u64)be32_to_cpu(key[2]) << 32) | be32_to_cpu(key[3]); in khazad_setkey()
771 ctx->E[r] = T0[(int)(K1 >> 56) ] ^ in khazad_setkey()
772 T1[(int)(K1 >> 48) & 0xff] ^ in khazad_setkey()
773 T2[(int)(K1 >> 40) & 0xff] ^ in khazad_setkey()
774 T3[(int)(K1 >> 32) & 0xff] ^ in khazad_setkey()
775 T4[(int)(K1 >> 24) & 0xff] ^ in khazad_setkey()
776 T5[(int)(K1 >> 16) & 0xff] ^ in khazad_setkey()
777 T6[(int)(K1 >> 8) & 0xff] ^ in khazad_setkey()
778 T7[(int)(K1 ) in khazad_setkey()
[all...]
H A Drmd320.c26 #define K1 RMD_K1 macro
68 ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); in rmd320_transform()
69 ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); in rmd320_transform()
70 ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); in rmd320_transform()
71 ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); in rmd320_transform()
72 ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); in rmd320_transform()
73 ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); in rmd320_transform()
74 ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); in rmd320_transform()
75 ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); in rmd320_transform()
76 ROUND(cc, dd, ee, aa, bb, F1, K1, i in rmd320_transform()
[all...]
H A Danubis.c491 u32 K0, K1, K2, K3; in anubis_setkey() local
496 K1 = T4[(kappa[N - 1] >> 16) & 0xff]; in anubis_setkey()
505 K1 = T4[(kappa[i] >> 16) & 0xff] ^ in anubis_setkey()
506 (T5[(K1 >> 24) ] & 0xff000000U) ^ in anubis_setkey()
507 (T5[(K1 >> 16) & 0xff] & 0x00ff0000U) ^ in anubis_setkey()
508 (T5[(K1 >> 8) & 0xff] & 0x0000ff00U) ^ in anubis_setkey()
509 (T5[(K1 ) & 0xff] & 0x000000ffU); in anubis_setkey()
523 ctx->E[r][1] = K1; in anubis_setkey()
/kernel/linux/linux-6.6/crypto/
H A Drmd160.c26 #define K1 RMD_K1 macro
68 ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); in rmd160_transform()
69 ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); in rmd160_transform()
70 ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); in rmd160_transform()
71 ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); in rmd160_transform()
72 ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); in rmd160_transform()
73 ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); in rmd160_transform()
74 ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); in rmd160_transform()
75 ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); in rmd160_transform()
76 ROUND(cc, dd, ee, aa, bb, F1, K1, i in rmd160_transform()
[all...]
H A Dkhazad.c763 u64 K2, K1; in khazad_setkey() local
767 K1 = ((u64)be32_to_cpu(key[2]) << 32) | be32_to_cpu(key[3]); in khazad_setkey()
771 ctx->E[r] = T0[(int)(K1 >> 56) ] ^ in khazad_setkey()
772 T1[(int)(K1 >> 48) & 0xff] ^ in khazad_setkey()
773 T2[(int)(K1 >> 40) & 0xff] ^ in khazad_setkey()
774 T3[(int)(K1 >> 32) & 0xff] ^ in khazad_setkey()
775 T4[(int)(K1 >> 24) & 0xff] ^ in khazad_setkey()
776 T5[(int)(K1 >> 16) & 0xff] ^ in khazad_setkey()
777 T6[(int)(K1 >> 8) & 0xff] ^ in khazad_setkey()
778 T7[(int)(K1 ) in khazad_setkey()
[all...]
H A Danubis.c491 u32 K0, K1, K2, K3; in anubis_setkey() local
496 K1 = T4[(kappa[N - 1] >> 16) & 0xff]; in anubis_setkey()
505 K1 = T4[(kappa[i] >> 16) & 0xff] ^ in anubis_setkey()
506 (T5[(K1 >> 24) ] & 0xff000000U) ^ in anubis_setkey()
507 (T5[(K1 >> 16) & 0xff] & 0x00ff0000U) ^ in anubis_setkey()
508 (T5[(K1 >> 8) & 0xff] & 0x0000ff00U) ^ in anubis_setkey()
509 (T5[(K1 ) & 0xff] & 0x000000ffU); in anubis_setkey()
523 ctx->E[r][1] = K1; in anubis_setkey()
/kernel/linux/linux-6.6/fs/ext4/
H A Dhash.c46 #define K1 0 macro
58 ROUND(F, a, b, c, d, in[0] + K1, 3); in half_md4_transform()
59 ROUND(F, d, a, b, c, in[1] + K1, 7); in half_md4_transform()
60 ROUND(F, c, d, a, b, in[2] + K1, 11); in half_md4_transform()
61 ROUND(F, b, c, d, a, in[3] + K1, 19); in half_md4_transform()
62 ROUND(F, a, b, c, d, in[4] + K1, 3); in half_md4_transform()
63 ROUND(F, d, a, b, c, in[5] + K1, 7); in half_md4_transform()
64 ROUND(F, c, d, a, b, in[6] + K1, 11); in half_md4_transform()
65 ROUND(F, b, c, d, a, in[7] + K1, 19); in half_md4_transform()
95 #undef K1 macro
[all...]
/kernel/linux/linux-6.6/net/sunrpc/auth_gss/
H A Dgss_krb5_keys.c433 * K1 = HMAC-SHA(key, 0x00000001 | label | 0x00 | k)
445 u32 outlen, struct xdr_netobj *K1) in krb5_hmac_K1()
469 ret = crypto_shash_final(desc, K1->data); in krb5_hmac_K1()
492 * k-truncate(K1)
507 struct xdr_netobj K1 = { in krb5_kdf_hmac_sha2() local
527 K1.len = crypto_shash_digestsize(tfm); in krb5_kdf_hmac_sha2()
528 K1.data = kmalloc(K1.len, gfp_mask); in krb5_kdf_hmac_sha2()
529 if (!K1.data) { in krb5_kdf_hmac_sha2()
534 ret = krb5_hmac_K1(tfm, label, outkey->len, &K1); in krb5_kdf_hmac_sha2()
444 krb5_hmac_K1(struct crypto_shash *tfm, const struct xdr_netobj *label, u32 outlen, struct xdr_netobj *K1) krb5_hmac_K1() argument
[all...]

Completed in 17 milliseconds

12