/kernel/linux/linux-6.6/arch/riscv/lib/ |
H A D | memmove.S | 37 * Reverse Copy: t4 - Index counter of dest 54 add t4, a0, a2 69 andi t6, t4, -SZREG 163 sub a5, a4, t4 /* Find the difference between src and dest */ 198 addi t4, t4, (-2 * SZREG) 202 REG_S t2, ( 1 * SZREG)(t4) 204 beq t4, a2, 2f 211 REG_S t2, ( 0 * SZREG)(t4) 213 bne t4, t [all...] |
/kernel/linux/linux-5.10/arch/alpha/lib/ |
H A D | ev67-strrchr.S | 36 insbl a1, 1, t4 # U : 000000000000ch00 41 or t2, t4, a1 # E : 000000000000chch 48 sll a1, 48, t4 # U : chch000000000000 50 or t4, a1, a1 # E : chch00000000chch 56 mskqh t5, a0, t4 # E : Complete garbage mask 58 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 61 andnot t1, t4, t1 # E : clear garbage from null test 62 andnot t3, t4, t3 # E : clear garbage from char test 84 negq t1, t4 # [all...] |
H A D | strrchr.S | 31 lda t4, -1 # .. e1 : build garbage mask 34 mskqh t4, a0, t4 # e0 : 37 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 39 andnot t1, t4, t1 # .. e1 : clear garbage from null test 40 andnot t3, t4, t3 # e0 : clear garbage from char test 56 negq t1, t4 # e0 : isolate first null byte match 57 and t1, t4, t4 # e [all...] |
H A D | strchr.S | 27 lda t4, -1 # .. e1 : build garbage mask 30 mskqh t4, a0, t4 # e0 : 33 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 38 andnot t0, t4, t0 # e0 : clear garbage bits 58 and t0, 0xaa, t4 # e0 : 61 cmovne t4, 1, t4 # .. e1 : 63 addq v0, t4, v [all...] |
H A D | ev67-strchr.S | 40 lda t4, -1 # E : build garbage mask 42 mskqh t4, a0, t4 # U : only want relevant part of first quad 50 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 58 andnot t0, t4, t0 # E : clear garbage bits
|
/kernel/linux/linux-6.6/arch/alpha/lib/ |
H A D | ev67-strrchr.S | 36 insbl a1, 1, t4 # U : 000000000000ch00 41 or t2, t4, a1 # E : 000000000000chch 48 sll a1, 48, t4 # U : chch000000000000 50 or t4, a1, a1 # E : chch00000000chch 56 mskqh t5, a0, t4 # E : Complete garbage mask 58 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 61 andnot t1, t4, t1 # E : clear garbage from null test 62 andnot t3, t4, t3 # E : clear garbage from char test 84 negq t1, t4 # [all...] |
H A D | strrchr.S | 31 lda t4, -1 # .. e1 : build garbage mask 34 mskqh t4, a0, t4 # e0 : 37 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 39 andnot t1, t4, t1 # .. e1 : clear garbage from null test 40 andnot t3, t4, t3 # e0 : clear garbage from char test 56 negq t1, t4 # e0 : isolate first null byte match 57 and t1, t4, t4 # e [all...] |
H A D | strchr.S | 27 lda t4, -1 # .. e1 : build garbage mask 30 mskqh t4, a0, t4 # e0 : 33 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 38 andnot t0, t4, t0 # e0 : clear garbage bits 58 and t0, 0xaa, t4 # e0 : 61 cmovne t4, 1, t4 # .. e1 : 63 addq v0, t4, v [all...] |
H A D | ev67-strchr.S | 40 lda t4, -1 # E : build garbage mask 42 mskqh t4, a0, t4 # U : only want relevant part of first quad 50 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 58 andnot t0, t4, t0 # E : clear garbage bits
|
/kernel/linux/linux-5.10/arch/ia64/lib/ |
H A D | memcpy.S | 36 # define t4 r22 190 sub t4=r0,dst // t4 = -dst 194 shl t4=t4,3 // t4 = 8*(dst & 7) 202 mov pr=t4,0x38 // (p5,p4,p3)=(dst & 7) 225 mov t4=ip 228 adds t4=.memcpy_loops-1b,t4 [all...] |
/kernel/linux/linux-6.6/arch/ia64/lib/ |
H A D | memcpy.S | 36 # define t4 r22 190 sub t4=r0,dst // t4 = -dst 194 shl t4=t4,3 // t4 = 8*(dst & 7) 202 mov pr=t4,0x38 // (p5,p4,p3)=(dst & 7) 225 mov t4=ip 228 adds t4=.memcpy_loops-1b,t4 [all...] |
/kernel/linux/linux-5.10/arch/x86/crypto/ |
H A D | camellia-aesni-avx2-asm_64.S | 63 #define roundsm32(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, \ 68 vbroadcasti128 .Linv_shift_row, t4; \ 76 vpshufb t4, x0, x0; \ 77 vpshufb t4, x7, x7; \ 78 vpshufb t4, x3, x3; \ 79 vpshufb t4, x6, x6; \ 80 vpshufb t4, x2, x2; \ 81 vpshufb t4, x5, x5; \ 82 vpshufb t4, x1, x1; \ 83 vpshufb t4, x [all...] |
H A D | glue_helper-asm-avx2.S | 56 t1x, t2, t2x, t3, t3x, t4, t5) \ 59 vpaddq t0, t0, t4; /* ab: -2:0 ; cd: -2:0 */\ 70 add2_le128(t2, t0, t4, t3, t5); /* ab: le2 ; cd: le3 */ \ 72 add2_le128(t2, t0, t4, t3, t5); \ 74 add2_le128(t2, t0, t4, t3, t5); \ 76 add2_le128(t2, t0, t4, t3, t5); \ 78 add2_le128(t2, t0, t4, t3, t5); \ 80 add2_le128(t2, t0, t4, t3, t5); \ 82 add2_le128(t2, t0, t4, t3, t5); \
|
H A D | camellia-aesni-avx-asm_64.S | 51 #define roundsm16(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, \ 56 vmovdqa .Linv_shift_row, t4; \ 62 vpshufb t4, x0, x0; \ 63 vpshufb t4, x7, x7; \ 64 vpshufb t4, x1, x1; \ 65 vpshufb t4, x4, x4; \ 66 vpshufb t4, x2, x2; \ 67 vpshufb t4, x5, x5; \ 68 vpshufb t4, x3, x3; \ 69 vpshufb t4, x [all...] |
/kernel/linux/linux-6.6/arch/x86/crypto/ |
H A D | camellia-aesni-avx2-asm_64.S | 62 #define roundsm32(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, \ 67 vbroadcasti128 .Linv_shift_row(%rip), t4; \ 75 vpshufb t4, x0, x0; \ 76 vpshufb t4, x7, x7; \ 77 vpshufb t4, x3, x3; \ 78 vpshufb t4, x6, x6; \ 79 vpshufb t4, x2, x2; \ 80 vpshufb t4, x5, x5; \ 81 vpshufb t4, x1, x1; \ 82 vpshufb t4, x [all...] |
H A D | camellia-aesni-avx-asm_64.S | 50 #define roundsm16(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, \ 55 vmovdqa .Linv_shift_row(%rip), t4; \ 61 vpshufb t4, x0, x0; \ 62 vpshufb t4, x7, x7; \ 63 vpshufb t4, x1, x1; \ 64 vpshufb t4, x4, x4; \ 65 vpshufb t4, x2, x2; \ 66 vpshufb t4, x5, x5; \ 67 vpshufb t4, x3, x3; \ 68 vpshufb t4, x [all...] |
/kernel/linux/linux-5.10/lib/zlib_dfltcc/ |
H A D | dfltcc_util.h | 38 const Byte *t4 = op2 ? *op2 : NULL; in dfltcc() local 44 register const Byte *r4 __asm__("r4") = t4; in dfltcc() 60 t2 = r2; t3 = r3; t4 = r4; t5 = r5; in dfltcc() 67 *op2 = t4; in dfltcc()
|
/kernel/linux/linux-6.6/lib/zlib_dfltcc/ |
H A D | dfltcc_util.h | 38 const Byte *t4 = op2 ? *op2 : NULL; in dfltcc() local 44 register const Byte *r4 __asm__("r4") = t4; in dfltcc() 60 t2 = r2; t3 = r3; t4 = r4; t5 = r5; in dfltcc() 67 *op2 = t4; in dfltcc()
|
/kernel/linux/linux-5.10/arch/mips/lib/ |
H A D | csum_partial.S | 33 #define t4 $12 define 182 CSUM_BIGCHUNK1(src, 0x00, sum, t0, t1, t3, t4) 193 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 194 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4) 195 CSUM_BIGCHUNK(src, 0x40, sum, t0, t1, t3, t4) 196 CSUM_BIGCHUNK(src, 0x60, sum, t0, t1, t3, t4) 208 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 209 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4) 217 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 476 LOAD(t4, UNI [all...] |
/kernel/linux/linux-6.6/arch/mips/lib/ |
H A D | csum_partial.S | 33 #define t4 $12 define 182 CSUM_BIGCHUNK1(src, 0x00, sum, t0, t1, t3, t4) 193 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 194 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4) 195 CSUM_BIGCHUNK(src, 0x40, sum, t0, t1, t3, t4) 196 CSUM_BIGCHUNK(src, 0x60, sum, t0, t1, t3, t4) 208 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 209 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4) 217 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 476 LOAD(t4, UNI [all...] |
/kernel/linux/linux-6.6/arch/riscv/include/asm/ |
H A D | compat.h | 50 compat_ulong_t t4; member 87 cregs->t4 = (compat_ulong_t) regs->t4; in regs_to_cregs() 124 regs->t4 = (unsigned long) cregs->t4; in cregs_to_regs()
|
/kernel/linux/linux-5.10/scripts/ |
H A D | makelst | 27 t4=`field 1 $t3` 29 t6=`printf "%lu" $((0x$t4 - 0x$t5))`
|
/kernel/linux/linux-6.6/scripts/ |
H A D | makelst | 27 t4=`field 1 $t3` 29 t6=`printf "%lu" $((0x$t4 - 0x$t5))`
|
/kernel/linux/linux-5.10/arch/x86/include/asm/ |
H A D | syscall_wrapper.h | 63 #define SYSCALL_PT_ARG6(m, t1, t2, t3, t4, t5, t6) \ 64 SYSCALL_PT_ARG5(m, t1, t2, t3, t4, t5), m(t6, (regs->bp)) 65 #define SYSCALL_PT_ARG5(m, t1, t2, t3, t4, t5) \ 66 SYSCALL_PT_ARG4(m, t1, t2, t3, t4), m(t5, (regs->di)) 67 #define SYSCALL_PT_ARG4(m, t1, t2, t3, t4) \ 68 SYSCALL_PT_ARG3(m, t1, t2, t3), m(t4, (regs->si))
|
/kernel/linux/linux-6.6/arch/x86/include/asm/ |
H A D | syscall_wrapper.h | 63 #define SYSCALL_PT_ARG6(m, t1, t2, t3, t4, t5, t6) \ 64 SYSCALL_PT_ARG5(m, t1, t2, t3, t4, t5), m(t6, (regs->bp)) 65 #define SYSCALL_PT_ARG5(m, t1, t2, t3, t4, t5) \ 66 SYSCALL_PT_ARG4(m, t1, t2, t3, t4), m(t5, (regs->di)) 67 #define SYSCALL_PT_ARG4(m, t1, t2, t3, t4) \ 68 SYSCALL_PT_ARG3(m, t1, t2, t3), m(t4, (regs->si))
|