/kernel/linux/linux-6.6/arch/powerpc/crypto/ |
H A D | aesp10-ppc.pl | 121 my ($zero,$in0,$in1,$key,$rcon,$mask,$tmp)=map("v$_",(0..6)); 172 lvx $in0,0,$inp 183 vperm $in0,$in0,$in1,$key # align [and byte swap in LE] 201 vperm $key,$in0,$in0,$mask # rotate-n-splat 202 vsldoi $tmp,$zero,$in0,12 # >>32 203 vperm $outtail,$in0,$in0,$outperm # rotate 210 vxor $in0, [all...] |
/kernel/linux/linux-5.10/lib/ |
H A D | test_memcat_p.c | 24 struct test_struct **in0, **in1, **out, **p; in test_memcat_p_init() local 27 in0 = kcalloc(INPUT_MAX, sizeof(*in0), GFP_KERNEL); in test_memcat_p_init() 28 if (!in0) in test_memcat_p_init() 36 in0[i] = kmalloc(sizeof(**in0), GFP_KERNEL); in test_memcat_p_init() 37 if (!in0[i]) in test_memcat_p_init() 42 kfree(in0[i]); in test_memcat_p_init() 48 in0[i]->num = r; in test_memcat_p_init() 50 in0[ in test_memcat_p_init() [all...] |
/kernel/linux/linux-6.6/lib/ |
H A D | test_memcat_p.c | 24 struct test_struct **in0, **in1, **out, **p; in test_memcat_p_init() local 27 in0 = kcalloc(INPUT_MAX, sizeof(*in0), GFP_KERNEL); in test_memcat_p_init() 28 if (!in0) in test_memcat_p_init() 36 in0[i] = kmalloc(sizeof(**in0), GFP_KERNEL); in test_memcat_p_init() 37 if (!in0[i]) in test_memcat_p_init() 42 kfree(in0[i]); in test_memcat_p_init() 48 in0[i]->num = r; in test_memcat_p_init() 50 in0[ in test_memcat_p_init() [all...] |
/kernel/linux/linux-5.10/arch/ia64/kernel/ |
H A D | relocate_kernel.S | 130 mov r30=in0 // in0 is page_list 134 ld8 r30=[in0], 8;; 141 (p6) and in0=r30, r16 192 add loc1=4*8, in0 // save r4 and r5 first 202 add loc1=32*8, in0 205 st8 [in0]=r0, 8 // r0 209 st8 [in0]=r1, 8 // r1 213 st8 [in0]=r2, 8 // r2 217 st8 [in0] [all...] |
H A D | pal.S | 30 * in0 Address of the PAL entry point (text address, NOT a function 37 st8 [r2]=in0 54 * in0 Index of PAL service 62 mov r28 = in0 98 * in0 Index of PAL service 106 mov r28 = in0 // Index MUST be copied to r28 107 mov out0 = in0 // AND in0 of PAL function 134 * in0 Index of PAL service 154 mov r28 = in0 // cop [all...] |
H A D | head.S | 432 add r19=IA64_NUM_DBG_REGS*8,in0 442 st8.nta [in0]=r16,8 452 lfetch.nta [in0] 454 add r19=IA64_NUM_DBG_REGS*8,in0 458 1: ld8.nta r16=[in0],8 476 adds loc0=96*16-16,in0 477 adds loc1=96*16-16-128,in0 493 adds loc2=96*16-32,in0 497 adds loc3=96*16-32-128,in0 513 adds loc0=96*16-48,in0 [all...] |
/kernel/linux/linux-6.6/arch/ia64/kernel/ |
H A D | relocate_kernel.S | 130 mov r30=in0 // in0 is page_list 134 ld8 r30=[in0], 8;; 141 (p6) and in0=r30, r16 192 add loc1=4*8, in0 // save r4 and r5 first 202 add loc1=32*8, in0 205 st8 [in0]=r0, 8 // r0 209 st8 [in0]=r1, 8 // r1 213 st8 [in0]=r2, 8 // r2 217 st8 [in0] [all...] |
H A D | pal.S | 30 * in0 Address of the PAL entry point (text address, NOT a function 37 st8 [r2]=in0 54 * in0 Index of PAL service 62 mov r28 = in0 98 * in0 Index of PAL service 106 mov r28 = in0 // Index MUST be copied to r28 107 mov out0 = in0 // AND in0 of PAL function 134 * in0 Index of PAL service 154 mov r28 = in0 // cop [all...] |
H A D | head.S | 425 add r19=IA64_NUM_DBG_REGS*8,in0 435 st8.nta [in0]=r16,8 445 lfetch.nta [in0] 447 add r19=IA64_NUM_DBG_REGS*8,in0 451 1: ld8.nta r16=[in0],8 469 adds loc0=96*16-16,in0 470 adds loc1=96*16-16-128,in0 486 adds loc2=96*16-32,in0 490 adds loc3=96*16-32-128,in0 506 adds loc0=96*16-48,in0 [all...] |
/kernel/linux/linux-5.10/arch/mips/crypto/ |
H A D | poly1305-mips.pl | 76 my ($in0,$in1,$tmp0,$tmp1,$tmp2,$tmp3,$tmp4) = ($a4,$a5,$a6,$a7,$at,$t0,$t1); 134 ld $in0,0($inp) 141 dsllv $in0,$in0,$tmp0 146 dsrlv $in0,$in0,$tmp0 151 or $in0,$in0,$tmp3 155 ldl $in0,0+MSB($inp) 157 ldr $in0, [all...] |
/kernel/linux/linux-6.6/arch/mips/crypto/ |
H A D | poly1305-mips.pl | 76 my ($in0,$in1,$tmp0,$tmp1,$tmp2,$tmp3,$tmp4) = ($a4,$a5,$a6,$a7,$at,$t0,$t1); 134 ld $in0,0($inp) 141 dsllv $in0,$in0,$tmp0 146 dsrlv $in0,$in0,$tmp0 151 or $in0,$in0,$tmp3 155 ldl $in0,0+MSB($inp) 157 ldr $in0, [all...] |
/kernel/linux/linux-5.10/arch/arm64/crypto/ |
H A D | aes-neon.S | 127 .macro sub_bytes_4x, in0, in1, in2, in3 128 sub v8.16b, \in0\().16b, v15.16b 129 tbl \in0\().16b, {v16.16b-v19.16b}, \in0\().16b 136 tbx \in0\().16b, {v20.16b-v23.16b}, v8.16b 143 tbx \in0\().16b, {v24.16b-v27.16b}, v8.16b 151 tbx \in0\().16b, {v28.16b-v31.16b}, v8.16b 158 .macro mul_by_x_2x, out0, out1, in0, in1, tmp0, tmp1, const 159 sshr \tmp0\().16b, \in0\().16b, #7 160 shl \out0\().16b, \in0\() [all...] |
/kernel/linux/linux-6.6/arch/arm64/crypto/ |
H A D | aes-neon.S | 127 .macro sub_bytes_4x, in0, in1, in2, in3 128 sub v8.16b, \in0\().16b, v15.16b 129 tbl \in0\().16b, {v16.16b-v19.16b}, \in0\().16b 136 tbx \in0\().16b, {v20.16b-v23.16b}, v8.16b 143 tbx \in0\().16b, {v24.16b-v27.16b}, v8.16b 151 tbx \in0\().16b, {v28.16b-v31.16b}, v8.16b 158 .macro mul_by_x_2x, out0, out1, in0, in1, tmp0, tmp1, const 159 sshr \tmp0\().16b, \in0\().16b, #7 160 shl \out0\().16b, \in0\() [all...] |
/kernel/linux/linux-5.10/arch/ia64/lib/ |
H A D | ip_fast_csum.S | 9 * in0: address of buffer to checksum (char *) 28 #define in0 r32 define 39 and r14=3,in0 // is it aligned on 4-byte? 40 add r15=4,in0 // second source pointer 44 (p7) ld4 r20=[in0],8 48 ld4 r22=[in0],8 51 ld4 r24=[in0] 88 mov out0=in0 100 ld4 r20=[in0],4 104 ld4 r22=[in0], [all...] |
H A D | clear_page.S | 43 mov dst_fetch = in0 44 adds dst1 = 16, in0 45 adds dst2 = 32, in0 48 adds dst3 = 48, in0 // executing this multiple times is harmless 53 adds dst4 = 64, in0
|
H A D | copy_page.S | 7 * in0: address of target page 63 adds tgt2=8,in0 66 mov tgt1=in0 67 add tgtf=512,in0 68 add tgt_last = tgt_last, in0
|
H A D | idiv32.S | 47 EXTEND in0 = in0 // in0 = a 50 setf.sig f8 = in0 73 setf.sig f7 = in0
|
H A D | strlen.S | 8 * in0 address of string 90 extr.u tmp=in0,0,3 // tmp=least significant 3 bits 91 mov orig=in0 // keep trackof initial byte address 92 dep src=0,in0,0,3 // src=8byte-aligned in0 address
|
/kernel/linux/linux-6.6/arch/ia64/lib/ |
H A D | ip_fast_csum.S | 9 * in0: address of buffer to checksum (char *) 28 #define in0 r32 define 39 and r14=3,in0 // is it aligned on 4-byte? 40 add r15=4,in0 // second source pointer 44 (p7) ld4 r20=[in0],8 48 ld4 r22=[in0],8 51 ld4 r24=[in0] 88 mov out0=in0 100 ld4 r20=[in0],4 104 ld4 r22=[in0], [all...] |
H A D | clear_page.S | 43 mov dst_fetch = in0 44 adds dst1 = 16, in0 45 adds dst2 = 32, in0 48 adds dst3 = 48, in0 // executing this multiple times is harmless 53 adds dst4 = 64, in0
|
H A D | copy_page.S | 7 * in0: address of target page 63 adds tgt2=8,in0 66 mov tgt1=in0 67 add tgtf=512,in0 68 add tgt_last = tgt_last, in0
|
H A D | idiv32.S | 47 EXTEND in0 = in0 // in0 = a 50 setf.sig f8 = in0 73 setf.sig f7 = in0
|
H A D | strlen.S | 8 * in0 address of string 90 extr.u tmp=in0,0,3 // tmp=least significant 3 bits 91 mov orig=in0 // keep trackof initial byte address 92 dep src=0,in0,0,3 // src=8byte-aligned in0 address
|
/kernel/linux/linux-5.10/drivers/crypto/vmx/ |
H A D | aesp8-ppc.pl | 121 my ($zero,$in0,$in1,$key,$rcon,$mask,$tmp)=map("v$_",(0..6)); 172 lvx $in0,0,$inp 183 vperm $in0,$in0,$in1,$key # align [and byte swap in LE] 201 vperm $key,$in0,$in0,$mask # rotate-n-splat 202 vsldoi $tmp,$zero,$in0,12 # >>32 203 vperm $outtail,$in0,$in0,$outperm # rotate 210 vxor $in0, [all...] |
/kernel/linux/linux-6.6/drivers/crypto/vmx/ |
H A D | aesp8-ppc.pl | 121 my ($zero,$in0,$in1,$key,$rcon,$mask,$tmp)=map("v$_",(0..6)); 172 lvx $in0,0,$inp 183 vperm $in0,$in0,$in1,$key # align [and byte swap in LE] 201 vperm $key,$in0,$in0,$mask # rotate-n-splat 202 vsldoi $tmp,$zero,$in0,12 # >>32 203 vperm $outtail,$in0,$in0,$outperm # rotate 210 vxor $in0, [all...] |