/kernel/linux/linux-6.6/arch/loongarch/net/ |
H A D | bpf_jit.h | 204 enum loongarch_gpr rd, int jmp_offset) in cond_jmp_offset() 208 /* PC += jmp_offset if rj == rd */ in cond_jmp_offset() 209 emit_insn(ctx, beq, rj, rd, jmp_offset); in cond_jmp_offset() 213 /* PC += jmp_offset if rj != rd */ in cond_jmp_offset() 214 emit_insn(ctx, bne, rj, rd, jmp_offset); in cond_jmp_offset() 217 /* PC += jmp_offset if rj > rd (unsigned) */ in cond_jmp_offset() 218 emit_insn(ctx, bltu, rd, rj, jmp_offset); in cond_jmp_offset() 221 /* PC += jmp_offset if rj < rd (unsigned) */ in cond_jmp_offset() 222 emit_insn(ctx, bltu, rj, rd, jmp_offset); in cond_jmp_offset() 225 /* PC += jmp_offset i in cond_jmp_offset() 203 cond_jmp_offset(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) cond_jmp_offset() argument 251 cond_jmp_offs26(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) cond_jmp_offs26() argument 259 uncond_jmp_offs26(struct jit_ctx *ctx, int jmp_offset) uncond_jmp_offs26() argument 264 emit_cond_jmp(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) emit_cond_jmp() argument 286 emit_uncond_jmp(struct jit_ctx *ctx, int jmp_offset) emit_uncond_jmp() argument 296 emit_tailcall_jmp(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) emit_tailcall_jmp() argument [all...] |
H A D | bpf_jit.c | 218 #define jmp_offset (out_offset - (cur_offset)) in emit_bpf_tail_call() macro 230 /* bgeu $a2, $t1, jmp_offset */ in emit_bpf_tail_call() 231 if (emit_tailcall_jmp(ctx, BPF_JGE, a2, t1, jmp_offset) < 0) in emit_bpf_tail_call() 239 if (emit_tailcall_jmp(ctx, BPF_JSLT, REG_TCC, LOONGARCH_GPR_ZERO, jmp_offset) < 0) in emit_bpf_tail_call() 250 /* beq $t2, $zero, jmp_offset */ in emit_bpf_tail_call() 251 if (emit_tailcall_jmp(ctx, BPF_JEQ, t2, LOONGARCH_GPR_ZERO, jmp_offset) < 0) in emit_bpf_tail_call() 274 #undef jmp_offset in emit_bpf_tail_call() macro 455 int ret, jmp_offset; in build_insn() local 752 jmp_offset = bpf2la_offset(i, off, ctx); in build_insn() 762 if (emit_cond_jmp(ctx, cond, t1, t2, jmp_offset) < in build_insn() [all...] |
/kernel/linux/linux-5.10/arch/loongarch/net/ |
H A D | ebpf_jit.h | 713 enum loongarch_gpr rd, int jmp_offset) in cond_jump_offs16() 717 /* PC += jmp_offset if rj == rd */ in cond_jump_offs16() 718 emit_insn(ctx, beq, rj, rd, jmp_offset); in cond_jump_offs16() 722 /* PC += jmp_offset if rj != rd */ in cond_jump_offs16() 723 emit_insn(ctx, bne, rj, rd, jmp_offset); in cond_jump_offs16() 726 /* PC += jmp_offset if rj > rd (unsigned) */ in cond_jump_offs16() 727 emit_insn(ctx, bltu, rd, rj, jmp_offset); in cond_jump_offs16() 730 /* PC += jmp_offset if rj < rd (unsigned) */ in cond_jump_offs16() 731 emit_insn(ctx, bltu, rj, rd, jmp_offset); in cond_jump_offs16() 734 /* PC += jmp_offset i in cond_jump_offs16() 712 cond_jump_offs16(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) cond_jump_offs16() argument 760 cond_jump_offs26(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) cond_jump_offs26() argument 768 cond_jump_offs32(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) cond_jump_offs32() argument 789 uncond_jump_offs26(struct jit_ctx *ctx, int jmp_offset) uncond_jump_offs26() argument 794 uncond_jump_offs32(struct jit_ctx *ctx, int jmp_offset, bool is_exit) uncond_jump_offs32() argument 814 emit_cond_jump(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) emit_cond_jump() argument 825 emit_uncond_jump(struct jit_ctx *ctx, int jmp_offset, bool is_exit) emit_uncond_jump() argument 833 emit_tailcall_jump(struct jit_ctx *ctx, u8 cond, enum loongarch_gpr rj, enum loongarch_gpr rd, int jmp_offset) emit_tailcall_jump() argument [all...] |
H A D | ebpf_jit.c | 230 #define jmp_offset (out_offset - (cur_offset)) in emit_bpf_tail_call() macro 242 /* bgeu $a2, $t1, jmp_offset */ in emit_bpf_tail_call() 243 emit_tailcall_jump(ctx, BPF_JGE, a2, tmp1, jmp_offset); in emit_bpf_tail_call() 250 emit_tailcall_jump(ctx, BPF_JSLT, tcc, LOONGARCH_GPR_ZERO, jmp_offset); in emit_bpf_tail_call() 261 /* beq $t2, $zero, jmp_offset */ in emit_bpf_tail_call() 262 emit_tailcall_jump(ctx, BPF_JEQ, tmp2, LOONGARCH_GPR_ZERO, jmp_offset); in emit_bpf_tail_call() 281 #undef jmp_offset in emit_bpf_tail_call() macro 296 int jmp_offset; in build_insn() local 553 jmp_offset = bpf2la_offset(i, off, ctx); in build_insn() 554 emit_cond_jump(ctx, cond, dst, src, jmp_offset); in build_insn() [all...] |
/kernel/linux/linux-6.6/arch/arm64/net/ |
H A D | bpf_jit_comp.c | 390 #define jmp_offset (out_offset - (cur_offset)) in emit_bpf_tail_call() macro 401 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call() 410 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call() 422 emit(A64_CBZ(1, prg, jmp_offset), ctx); in emit_bpf_tail_call() 442 #undef jmp_offset in emit_bpf_tail_call() macro 529 s32 jmp_offset; in emit_ll_sc_atomic() local 552 jmp_offset = -3; in emit_ll_sc_atomic() 553 check_imm19(jmp_offset); in emit_ll_sc_atomic() 554 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in emit_ll_sc_atomic() 573 jmp_offset in emit_ll_sc_atomic() 777 s32 jmp_offset; build_insn() local [all...] |
/kernel/linux/linux-5.10/arch/arm64/net/ |
H A D | bpf_jit_comp.c | 277 #define jmp_offset (out_offset - (cur_offset)) in emit_bpf_tail_call() macro 288 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call() 296 emit(A64_B_(A64_COND_HI, jmp_offset), ctx); in emit_bpf_tail_call() 308 emit(A64_CBZ(1, prg, jmp_offset), ctx); in emit_bpf_tail_call() 328 #undef jmp_offset in emit_bpf_tail_call() macro 442 s32 jmp_offset; in build_insn() local 648 jmp_offset = bpf2a64_offset(i, off, ctx); in build_insn() 649 check_imm26(jmp_offset); in build_insn() 650 emit(A64_B(jmp_offset), ctx); in build_insn() 675 jmp_offset in build_insn() [all...] |
/kernel/linux/linux-6.6/arch/x86/net/ |
H A D | bpf_jit_comp32.c | 1558 * the jmp_offset relative to the jit-insn address immediately 1579 s64 jmp_offset; in emit_kfunc_call() local 1631 jmp_offset = (u8 *)__bpf_call_base + insn->imm - end_addr; in emit_kfunc_call() 1632 if (!is_simm32(jmp_offset)) { in emit_kfunc_call() 1633 pr_err("unsupported BPF kernel function jmp_offset:%lld\n", in emit_kfunc_call() 1634 jmp_offset); in emit_kfunc_call() 1638 EMIT1_off32(0xE8, jmp_offset); in emit_kfunc_call() 1681 s64 jmp_offset; in do_jit() local 2107 jmp_offset = func - (image + addrs[i]); in do_jit() 2109 if (!imm32 || !is_simm32(jmp_offset)) { in do_jit() [all...] |
H A D | bpf_jit_comp.c | 1064 s64 jmp_offset; in do_jit() local 1498 /* populate jmp_offset for JAE above to jump to start_of_ldx */ in do_jit() 1513 /* populate jmp_offset for JMP above */ in do_jit() 1783 jmp_offset = addrs[i + insn->off] - addrs[i]; in do_jit() 1784 if (is_imm8(jmp_offset)) { in do_jit() 1786 /* To keep the jmp_offset valid, the extra bytes are in do_jit() 1809 EMIT2(jmp_cond, jmp_offset); in do_jit() 1810 } else if (is_simm32(jmp_offset)) { in do_jit() 1811 EMIT2_off32(0x0F, jmp_cond + 0x10, jmp_offset); in do_jit() 1813 pr_err("cond_jmp gen bug %llx\n", jmp_offset); in do_jit() [all...] |
/kernel/linux/linux-5.10/arch/arm/net/ |
H A D | bpf_jit_32.c | 1180 #define jmp_offset (out_offset - (cur_offset) - 2) in emit_bpf_tail_call() macro 1198 _emit(ARM_COND_CS, ARM_B(jmp_offset), ctx); in emit_bpf_tail_call() 1211 _emit(ARM_COND_HI, ARM_B(jmp_offset), ctx); in emit_bpf_tail_call() 1225 _emit(ARM_COND_EQ, ARM_B(jmp_offset), ctx); in emit_bpf_tail_call() 1245 #undef jmp_offset in emit_bpf_tail_call() macro 1376 s32 jmp_offset; in build_insn() local 1743 jmp_offset = bpf2a32_offset(i+off, i, ctx); in build_insn() 1747 _emit(ARM_COND_NE, ARM_B(jmp_offset), ctx); in build_insn() 1750 _emit(ARM_COND_EQ, ARM_B(jmp_offset), ctx); in build_insn() 1753 _emit(ARM_COND_HI, ARM_B(jmp_offset), ct in build_insn() [all...] |
/kernel/linux/linux-6.6/arch/arm/net/ |
H A D | bpf_jit_32.c | 1164 #define jmp_offset (out_offset - (cur_offset) - 2) in emit_bpf_tail_call() macro 1182 _emit(ARM_COND_CS, ARM_B(jmp_offset), ctx); in emit_bpf_tail_call() 1196 _emit(ARM_COND_CS, ARM_B(jmp_offset), ctx); in emit_bpf_tail_call() 1210 _emit(ARM_COND_EQ, ARM_B(jmp_offset), ctx); in emit_bpf_tail_call() 1230 #undef jmp_offset in emit_bpf_tail_call() macro 1361 s32 jmp_offset; in build_insn() local 1727 jmp_offset = bpf2a32_offset(i+off, i, ctx); in build_insn() 1731 _emit(ARM_COND_NE, ARM_B(jmp_offset), ctx); in build_insn() 1734 _emit(ARM_COND_EQ, ARM_B(jmp_offset), ctx); in build_insn() 1737 _emit(ARM_COND_HI, ARM_B(jmp_offset), ct in build_insn() [all...] |
/kernel/linux/linux-5.10/arch/x86/net/ |
H A D | bpf_jit_comp32.c | 1500 s64 jmp_offset; in do_jit() local 1914 jmp_offset = func - (image + addrs[i]); in do_jit() 1916 if (!imm32 || !is_simm32(jmp_offset)) { in do_jit() 1934 EMIT1_off32(0xE8, jmp_offset + 9); in do_jit() 2172 jmp_offset = addrs[i + insn->off] - addrs[i]; in do_jit() 2173 if (is_imm8(jmp_offset)) { in do_jit() 2174 EMIT2(jmp_cond, jmp_offset); in do_jit() 2175 } else if (is_simm32(jmp_offset)) { in do_jit() 2176 EMIT2_off32(0x0F, jmp_cond + 0x10, jmp_offset); in do_jit() 2178 pr_err("cond_jmp gen bug %llx\n", jmp_offset); in do_jit() [all...] |
H A D | bpf_jit_comp.c | 817 s64 jmp_offset; in do_jit() local 1414 jmp_offset = addrs[i + insn->off] - addrs[i]; in do_jit() 1415 if (is_imm8(jmp_offset)) { in do_jit() 1416 EMIT2(jmp_cond, jmp_offset); in do_jit() 1417 } else if (is_simm32(jmp_offset)) { in do_jit() 1418 EMIT2_off32(0x0F, jmp_cond + 0x10, jmp_offset); in do_jit() 1420 pr_err("cond_jmp gen bug %llx\n", jmp_offset); in do_jit() 1434 jmp_offset = -2; in do_jit() 1436 jmp_offset = addrs[i + insn->off] - addrs[i]; in do_jit() 1438 if (!jmp_offset) in do_jit() [all...] |
/kernel/linux/linux-5.10/arch/x86/kernel/ |
H A D | ftrace.c | 323 unsigned long jmp_offset; in create_trampoline() local 341 jmp_offset = (unsigned long)ftrace_regs_caller_jmp; in create_trampoline() 347 jmp_offset = 0; in create_trampoline() 380 ip = trampoline + (jmp_offset - start_offset); in create_trampoline()
|
/kernel/linux/linux-6.6/arch/x86/kernel/ |
H A D | ftrace.c | 319 unsigned long jmp_offset; in create_trampoline() local 337 jmp_offset = (unsigned long)ftrace_regs_caller_jmp; in create_trampoline() 343 jmp_offset = 0; in create_trampoline() 374 ip = trampoline + (jmp_offset - start_offset); in create_trampoline()
|
/kernel/linux/linux-6.6/arch/riscv/net/ |
H A D | bpf_jit_comp64.c | 487 int jmp_offset; in emit_atomic() local 556 jmp_offset = ninsns_rvoff(8); in emit_atomic() 557 emit(rv_bne(RV_REG_T2, r0, jmp_offset >> 1), ctx); in emit_atomic() 560 jmp_offset = ninsns_rvoff(-6); in emit_atomic() 561 emit(rv_bne(RV_REG_T3, 0, jmp_offset >> 1), ctx); in emit_atomic()
|