Lines Matching refs:ctx

92 static inline void emit(const u32 insn, struct jit_ctx *ctx)
94 if (ctx->image != NULL)
95 ctx->image[ctx->idx] = cpu_to_le32(insn);
97 ctx->idx++;
101 const s32 val, struct jit_ctx *ctx)
108 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx);
110 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx);
112 emit(A64_MOVK(is64, reg, lo, 0), ctx);
115 emit(A64_MOVZ(is64, reg, lo, 0), ctx);
117 emit(A64_MOVK(is64, reg, hi, 16), ctx);
130 struct jit_ctx *ctx)
137 return emit_a64_mov_i(0, reg, (u32)val, ctx);
143 emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx);
145 emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx);
149 emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx);
154 static inline void emit_bti(u32 insn, struct jit_ctx *ctx)
157 emit(insn, ctx);
166 struct jit_ctx *ctx)
171 emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx);
175 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx);
179 static inline void emit_call(u64 target, struct jit_ctx *ctx)
183 emit_addr_mov_i64(tmp, target, ctx);
184 emit(A64_BLR(tmp), ctx);
188 const struct jit_ctx *ctx)
197 return ctx->offset[bpf_insn + off] - (ctx->offset[bpf_insn] - 1);
208 static inline int epilogue_offset(const struct jit_ctx *ctx)
210 int to = ctx->epilogue_offset;
211 int from = ctx->idx;
288 static int build_prologue(struct jit_ctx *ctx, bool ebpf_from_cbpf)
290 const struct bpf_prog *prog = ctx->prog;
299 const int idx0 = ctx->idx;
316 * current A64_SP => +-----+ <= (BPF_FP - ctx->stack_size)
331 emit_bti(A64_BTI_JC, ctx);
333 emit(A64_MOV(1, A64_R(9), A64_LR), ctx);
334 emit(A64_NOP, ctx);
338 emit(A64_PACIASP, ctx);
341 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx);
342 emit(A64_MOV(1, A64_FP, A64_SP), ctx);
345 emit(A64_PUSH(r6, r7, A64_SP), ctx);
346 emit(A64_PUSH(r8, r9, A64_SP), ctx);
347 emit(A64_PUSH(fp, tcc, A64_SP), ctx);
348 emit(A64_PUSH(fpb, A64_R(28), A64_SP), ctx);
351 emit(A64_MOV(1, fp, A64_SP), ctx);
355 emit(A64_MOVZ(1, tcc, 0, 0), ctx);
357 cur_offset = ctx->idx - idx0;
365 emit_bti(A64_BTI_J, ctx);
368 emit(A64_SUB_I(1, fpb, fp, ctx->fpb_offset), ctx);
371 ctx->stack_size = round_up(prog->aux->stack_depth, 16);
374 emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
379 static int emit_bpf_tail_call(struct jit_ctx *ctx)
388 const int idx0 = ctx->idx;
389 #define cur_offset (ctx->idx - idx0)
397 emit_a64_mov_i64(tmp, off, ctx);
398 emit(A64_LDR32(tmp, r2, tmp), ctx);
399 emit(A64_MOV(0, r3, r3), ctx);
400 emit(A64_CMP(0, r3, tmp), ctx);
401 emit(A64_B_(A64_COND_CS, jmp_offset), ctx);
408 emit_a64_mov_i64(tmp, MAX_TAIL_CALL_CNT, ctx);
409 emit(A64_CMP(1, tcc, tmp), ctx);
410 emit(A64_B_(A64_COND_CS, jmp_offset), ctx);
411 emit(A64_ADD_I(1, tcc, tcc, 1), ctx);
418 emit_a64_mov_i64(tmp, off, ctx);
419 emit(A64_ADD(1, tmp, r2, tmp), ctx);
420 emit(A64_LSL(1, prg, r3, 3), ctx);
421 emit(A64_LDR64(prg, tmp, prg), ctx);
422 emit(A64_CBZ(1, prg, jmp_offset), ctx);
426 emit_a64_mov_i64(tmp, off, ctx);
427 emit(A64_LDR64(tmp, prg, tmp), ctx);
428 emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx);
429 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
430 emit(A64_BR(tmp), ctx);
446 static int emit_lse_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx)
460 emit_a64_mov_i(1, tmp, off, ctx);
461 emit(A64_ADD(1, tmp, tmp, dst), ctx);
468 emit(A64_STADD(isdw, reg, src), ctx);
471 emit(A64_MVN(isdw, tmp2, src), ctx);
472 emit(A64_STCLR(isdw, reg, tmp2), ctx);
475 emit(A64_STSET(isdw, reg, src), ctx);
478 emit(A64_STEOR(isdw, reg, src), ctx);
482 emit(A64_LDADDAL(isdw, src, reg, src), ctx);
485 emit(A64_MVN(isdw, tmp2, src), ctx);
486 emit(A64_LDCLRAL(isdw, src, reg, tmp2), ctx);
489 emit(A64_LDSETAL(isdw, src, reg, src), ctx);
492 emit(A64_LDEORAL(isdw, src, reg, src), ctx);
496 emit(A64_SWPAL(isdw, src, reg, src), ctx);
500 emit(A64_CASAL(isdw, src, reg, bpf2a64[BPF_REG_0]), ctx);
510 static inline int emit_lse_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx)
516 static int emit_ll_sc_atomic(const struct bpf_insn *insn, struct jit_ctx *ctx)
524 const int i = insn - ctx->prog->insnsi;
534 emit_a64_mov_i(1, tmp, off, ctx);
535 emit(A64_ADD(1, tmp, tmp, dst), ctx);
542 emit(A64_LDXR(isdw, tmp2, reg), ctx);
544 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx);
546 emit(A64_AND(isdw, tmp2, tmp2, src), ctx);
548 emit(A64_ORR(isdw, tmp2, tmp2, src), ctx);
550 emit(A64_EOR(isdw, tmp2, tmp2, src), ctx);
551 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx);
554 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
562 emit(A64_MOV(isdw, ax, src), ctx);
563 emit(A64_LDXR(isdw, src, reg), ctx);
565 emit(A64_ADD(isdw, tmp2, src, ax), ctx);
567 emit(A64_AND(isdw, tmp2, src, ax), ctx);
569 emit(A64_ORR(isdw, tmp2, src, ax), ctx);
571 emit(A64_EOR(isdw, tmp2, src, ax), ctx);
572 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx);
575 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
576 emit(A64_DMB_ISH, ctx);
579 emit(A64_MOV(isdw, tmp2, src), ctx);
580 emit(A64_LDXR(isdw, src, reg), ctx);
581 emit(A64_STLXR(isdw, tmp2, reg, tmp3), ctx);
584 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
585 emit(A64_DMB_ISH, ctx);
590 emit(A64_MOV(isdw, tmp2, r0), ctx);
591 emit(A64_LDXR(isdw, r0, reg), ctx);
592 emit(A64_EOR(isdw, tmp3, r0, tmp2), ctx);
595 emit(A64_CBNZ(isdw, tmp3, jmp_offset), ctx);
596 emit(A64_STLXR(isdw, src, reg, tmp3), ctx);
599 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
600 emit(A64_DMB_ISH, ctx);
638 static void build_plt(struct jit_ctx *ctx)
644 if ((ctx->idx + PLT_TARGET_OFFSET / AARCH64_INSN_SIZE) % 2)
645 emit(A64_NOP, ctx);
647 plt = (struct bpf_plt *)(ctx->image + ctx->idx);
649 emit(A64_LDR64LIT(tmp, 2 * AARCH64_INSN_SIZE), ctx);
650 emit(A64_BR(tmp), ctx);
652 if (ctx->image)
656 static void build_epilogue(struct jit_ctx *ctx)
667 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
670 emit(A64_POP(fpb, A64_R(28), A64_SP), ctx);
672 emit(A64_POP(fp, A64_R(26), A64_SP), ctx);
675 emit(A64_POP(r8, r9, A64_SP), ctx);
676 emit(A64_POP(r6, r7, A64_SP), ctx);
679 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx);
682 emit(A64_MOV(1, A64_R(0), r0), ctx);
686 emit(A64_AUTIASP, ctx);
688 emit(A64_RET(A64_LR), ctx);
707 struct jit_ctx *ctx,
714 if (!ctx->image)
722 if (!ctx->prog->aux->extable ||
723 WARN_ON_ONCE(ctx->exentry_idx >= ctx->prog->aux->num_exentries))
726 ex = &ctx->prog->aux->extable[ctx->exentry_idx];
727 pc = (unsigned long)&ctx->image[ctx->idx - 1];
751 ctx->exentry_idx++;
761 static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
773 const int i = insn - ctx->prog->insnsi;
791 emit(A64_MOV(is64, dst, src), ctx);
794 emit(A64_SXTB(is64, dst, src), ctx);
797 emit(A64_SXTH(is64, dst, src), ctx);
800 emit(A64_SXTW(is64, dst, src), ctx);
807 emit(A64_ADD(is64, dst, dst, src), ctx);
811 emit(A64_SUB(is64, dst, dst, src), ctx);
815 emit(A64_AND(is64, dst, dst, src), ctx);
819 emit(A64_ORR(is64, dst, dst, src), ctx);
823 emit(A64_EOR(is64, dst, dst, src), ctx);
827 emit(A64_MUL(is64, dst, dst, src), ctx);
832 emit(A64_UDIV(is64, dst, dst, src), ctx);
834 emit(A64_SDIV(is64, dst, dst, src), ctx);
839 emit(A64_UDIV(is64, tmp, dst, src), ctx);
841 emit(A64_SDIV(is64, tmp, dst, src), ctx);
842 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx);
846 emit(A64_LSLV(is64, dst, dst, src), ctx);
850 emit(A64_LSRV(is64, dst, dst, src), ctx);
854 emit(A64_ASRV(is64, dst, dst, src), ctx);
859 emit(A64_NEG(is64, dst, dst), ctx);
874 emit(A64_REV16(is64, dst, dst), ctx);
876 emit(A64_UXTH(is64, dst, dst), ctx);
879 emit(A64_REV32(is64, dst, dst), ctx);
883 emit(A64_REV64(dst, dst), ctx);
891 emit(A64_UXTH(is64, dst, dst), ctx);
895 emit(A64_UXTW(is64, dst, dst), ctx);
905 emit_a64_mov_i(is64, dst, imm, ctx);
911 emit(A64_ADD_I(is64, dst, dst, imm), ctx);
913 emit(A64_SUB_I(is64, dst, dst, -imm), ctx);
915 emit_a64_mov_i(is64, tmp, imm, ctx);
916 emit(A64_ADD(is64, dst, dst, tmp), ctx);
922 emit(A64_SUB_I(is64, dst, dst, imm), ctx);
924 emit(A64_ADD_I(is64, dst, dst, -imm), ctx);
926 emit_a64_mov_i(is64, tmp, imm, ctx);
927 emit(A64_SUB(is64, dst, dst, tmp), ctx);
934 emit(a64_insn, ctx);
936 emit_a64_mov_i(is64, tmp, imm, ctx);
937 emit(A64_AND(is64, dst, dst, tmp), ctx);
944 emit(a64_insn, ctx);
946 emit_a64_mov_i(is64, tmp, imm, ctx);
947 emit(A64_ORR(is64, dst, dst, tmp), ctx);
954 emit(a64_insn, ctx);
956 emit_a64_mov_i(is64, tmp, imm, ctx);
957 emit(A64_EOR(is64, dst, dst, tmp), ctx);
962 emit_a64_mov_i(is64, tmp, imm, ctx);
963 emit(A64_MUL(is64, dst, dst, tmp), ctx);
967 emit_a64_mov_i(is64, tmp, imm, ctx);
969 emit(A64_UDIV(is64, dst, dst, tmp), ctx);
971 emit(A64_SDIV(is64, dst, dst, tmp), ctx);
975 emit_a64_mov_i(is64, tmp2, imm, ctx);
977 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx);
979 emit(A64_SDIV(is64, tmp, dst, tmp2), ctx);
980 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx);
984 emit(A64_LSL(is64, dst, dst, imm), ctx);
988 emit(A64_LSR(is64, dst, dst, imm), ctx);
992 emit(A64_ASR(is64, dst, dst, imm), ctx);
999 jmp_offset = bpf2a64_offset(i, off, ctx);
1001 jmp_offset = bpf2a64_offset(i, imm, ctx);
1003 emit(A64_B(jmp_offset), ctx);
1026 emit(A64_CMP(is64, dst, src), ctx);
1028 jmp_offset = bpf2a64_offset(i, off, ctx);
1065 emit(A64_B_(jmp_cond, jmp_offset), ctx);
1069 emit(A64_TST(is64, dst, src), ctx);
1093 emit(A64_CMP_I(is64, dst, imm), ctx);
1095 emit(A64_CMN_I(is64, dst, -imm), ctx);
1097 emit_a64_mov_i(is64, tmp, imm, ctx);
1098 emit(A64_CMP(is64, dst, tmp), ctx);
1105 emit(a64_insn, ctx);
1107 emit_a64_mov_i(is64, tmp, imm, ctx);
1108 emit(A64_TST(is64, dst, tmp), ctx);
1118 ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass,
1122 emit_call(func_addr, ctx);
1123 emit(A64_MOV(1, r0, A64_R(0)), ctx);
1128 if (emit_bpf_tail_call(ctx))
1135 if (i == ctx->prog->len - 1)
1137 jmp_offset = epilogue_offset(ctx);
1139 emit(A64_B(jmp_offset), ctx);
1150 emit_addr_mov_i64(dst, imm64, ctx);
1152 emit_a64_mov_i64(dst, imm64, ctx);
1173 if (ctx->fpb_offset > 0 && src == fp) {
1175 off_adj = off + ctx->fpb_offset;
1186 emit(A64_LDRSWI(dst, src_adj, off_adj), ctx);
1188 emit(A64_LDR32I(dst, src_adj, off_adj), ctx);
1190 emit_a64_mov_i(1, tmp, off, ctx);
1192 emit(A64_LDRSW(dst, src_adj, off_adj), ctx);
1194 emit(A64_LDR32(dst, src, tmp), ctx);
1200 emit(A64_LDRSHI(dst, src_adj, off_adj), ctx);
1202 emit(A64_LDRHI(dst, src_adj, off_adj), ctx);
1204 emit_a64_mov_i(1, tmp, off, ctx);
1206 emit(A64_LDRSH(dst, src, tmp), ctx);
1208 emit(A64_LDRH(dst, src, tmp), ctx);
1214 emit(A64_LDRSBI(dst, src_adj, off_adj), ctx);
1216 emit(A64_LDRBI(dst, src_adj, off_adj), ctx);
1218 emit_a64_mov_i(1, tmp, off, ctx);
1220 emit(A64_LDRSB(dst, src, tmp), ctx);
1222 emit(A64_LDRB(dst, src, tmp), ctx);
1227 emit(A64_LDR64I(dst, src_adj, off_adj), ctx);
1229 emit_a64_mov_i(1, tmp, off, ctx);
1230 emit(A64_LDR64(dst, src, tmp), ctx);
1235 ret = add_exception_handler(insn, ctx, dst);
1258 if (ctx->fpb_offset > 0 && dst == fp) {
1260 off_adj = off + ctx->fpb_offset;
1266 emit_a64_mov_i(1, tmp, imm, ctx);
1270 emit(A64_STR32I(tmp, dst_adj, off_adj), ctx);
1272 emit_a64_mov_i(1, tmp2, off, ctx);
1273 emit(A64_STR32(tmp, dst, tmp2), ctx);
1278 emit(A64_STRHI(tmp, dst_adj, off_adj), ctx);
1280 emit_a64_mov_i(1, tmp2, off, ctx);
1281 emit(A64_STRH(tmp, dst, tmp2), ctx);
1286 emit(A64_STRBI(tmp, dst_adj, off_adj), ctx);
1288 emit_a64_mov_i(1, tmp2, off, ctx);
1289 emit(A64_STRB(tmp, dst, tmp2), ctx);
1294 emit(A64_STR64I(tmp, dst_adj, off_adj), ctx);
1296 emit_a64_mov_i(1, tmp2, off, ctx);
1297 emit(A64_STR64(tmp, dst, tmp2), ctx);
1308 if (ctx->fpb_offset > 0 && dst == fp) {
1310 off_adj = off + ctx->fpb_offset;
1318 emit(A64_STR32I(src, dst_adj, off_adj), ctx);
1320 emit_a64_mov_i(1, tmp, off, ctx);
1321 emit(A64_STR32(src, dst, tmp), ctx);
1326 emit(A64_STRHI(src, dst_adj, off_adj), ctx);
1328 emit_a64_mov_i(1, tmp, off, ctx);
1329 emit(A64_STRH(src, dst, tmp), ctx);
1334 emit(A64_STRBI(src, dst_adj, off_adj), ctx);
1336 emit_a64_mov_i(1, tmp, off, ctx);
1337 emit(A64_STRB(src, dst, tmp), ctx);
1342 emit(A64_STR64I(src, dst_adj, off_adj), ctx);
1344 emit_a64_mov_i(1, tmp, off, ctx);
1345 emit(A64_STR64(src, dst, tmp), ctx);
1354 ret = emit_lse_atomic(insn, ctx);
1356 ret = emit_ll_sc_atomic(insn, ctx);
1442 static int build_body(struct jit_ctx *ctx, bool extra_pass)
1444 const struct bpf_prog *prog = ctx->prog;
1460 if (ctx->image == NULL)
1461 ctx->offset[i] = ctx->idx;
1462 ret = build_insn(insn, ctx, extra_pass);
1465 if (ctx->image == NULL)
1466 ctx->offset[i] = ctx->idx;
1477 if (ctx->image == NULL)
1478 ctx->offset[i] = ctx->idx;
1483 static int validate_code(struct jit_ctx *ctx)
1487 for (i = 0; i < ctx->idx; i++) {
1488 u32 a64_insn = le32_to_cpu(ctx->image[i]);
1496 static int validate_ctx(struct jit_ctx *ctx)
1498 if (validate_code(ctx))
1501 if (WARN_ON_ONCE(ctx->exentry_idx != ctx->prog->aux->num_exentries))
1515 struct jit_ctx ctx;
1527 struct jit_ctx ctx;
1553 if (jit_data->ctx.offset) {
1554 ctx = jit_data->ctx;
1558 prog_size = sizeof(u32) * ctx.idx;
1561 memset(&ctx, 0, sizeof(ctx));
1562 ctx.prog = prog;
1564 ctx.offset = kvcalloc(prog->len + 1, sizeof(int), GFP_KERNEL);
1565 if (ctx.offset == NULL) {
1570 ctx.fpb_offset = find_fpb_offset(prog);
1573 * 1. Initial fake pass to compute ctx->idx and ctx->offset.
1575 * BPF line info needs ctx->offset[i] to be the offset of
1578 if (build_prologue(&ctx, was_classic)) {
1583 if (build_body(&ctx, extra_pass)) {
1588 ctx.epilogue_offset = ctx.idx;
1589 build_epilogue(&ctx);
1590 build_plt(&ctx);
1597 prog_size = sizeof(u32) * ctx.idx;
1610 ctx.image = (__le32 *)image_ptr;
1614 ctx.idx = 0;
1615 ctx.exentry_idx = 0;
1617 build_prologue(&ctx, was_classic);
1619 if (build_body(&ctx, extra_pass)) {
1625 build_epilogue(&ctx);
1626 build_plt(&ctx);
1629 if (validate_ctx(&ctx)) {
1637 bpf_jit_dump(prog->len, prog_size, 2, ctx.image);
1639 bpf_flush_icache(header, ctx.image + ctx.idx);
1642 if (extra_pass && ctx.idx != jit_data->ctx.idx) {
1644 ctx.idx, jit_data->ctx.idx);
1653 jit_data->ctx = ctx;
1657 prog->bpf_func = (void *)ctx.image;
1666 ctx.offset[i] *= AARCH64_INSN_SIZE;
1667 bpf_prog_fill_jited_linfo(prog, ctx.offset + 1);
1669 kvfree(ctx.offset);
1707 static void invoke_bpf_prog(struct jit_ctx *ctx, struct bpf_tramp_link *l,
1722 emit(A64_STR64I(A64_ZR, A64_SP, run_ctx_off + cookie_off), ctx);
1724 emit_a64_mov_i64(A64_R(10), l->cookie, ctx);
1726 ctx);
1732 emit_addr_mov_i64(A64_R(19), (const u64)p, ctx);
1735 emit(A64_MOV(1, A64_R(0), A64_R(19)), ctx);
1737 emit(A64_ADD_I(1, A64_R(1), A64_SP, run_ctx_off), ctx);
1739 emit_call(enter_prog, ctx);
1744 branch = ctx->image + ctx->idx;
1745 emit(A64_NOP, ctx);
1748 emit(A64_MOV(1, A64_R(20), A64_R(0)), ctx);
1750 emit(A64_ADD_I(1, A64_R(0), A64_SP, args_off), ctx);
1752 emit_addr_mov_i64(A64_R(1), (const u64)p->insnsi, ctx);
1754 emit_call((const u64)p->bpf_func, ctx);
1757 emit(A64_STR64I(A64_R(0), A64_SP, retval_off), ctx);
1759 if (ctx->image) {
1760 int offset = &ctx->image[ctx->idx] - branch;
1765 emit(A64_MOV(1, A64_R(0), A64_R(19)), ctx);
1767 emit(A64_MOV(1, A64_R(1), A64_R(20)), ctx);
1769 emit(A64_ADD_I(1, A64_R(2), A64_SP, run_ctx_off), ctx);
1771 emit_call(exit_prog, ctx);
1774 static void invoke_bpf_mod_ret(struct jit_ctx *ctx, struct bpf_tramp_links *tl,
1783 emit(A64_STR64I(A64_ZR, A64_SP, retval_off), ctx);
1785 invoke_bpf_prog(ctx, tl->links[i], args_off, retval_off,
1790 emit(A64_LDR64I(A64_R(10), A64_SP, retval_off), ctx);
1794 branches[i] = ctx->image + ctx->idx;
1795 emit(A64_NOP, ctx);
1799 static void save_args(struct jit_ctx *ctx, int args_off, int nregs)
1804 emit(A64_STR64I(i, A64_SP, args_off), ctx);
1809 static void restore_args(struct jit_ctx *ctx, int args_off, int nregs)
1814 emit(A64_LDR64I(i, A64_SP, args_off), ctx);
1830 static int prepare_trampoline(struct jit_ctx *ctx, struct bpf_tramp_image *im,
1914 emit_bti(A64_BTI_JC, ctx);
1917 emit(A64_PUSH(A64_FP, A64_R(9), A64_SP), ctx);
1918 emit(A64_MOV(1, A64_FP, A64_SP), ctx);
1921 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx);
1922 emit(A64_MOV(1, A64_FP, A64_SP), ctx);
1925 emit(A64_SUB_I(1, A64_SP, A64_SP, stack_size), ctx);
1929 emit_addr_mov_i64(A64_R(10), (const u64)orig_call, ctx);
1930 emit(A64_STR64I(A64_R(10), A64_SP, ip_off), ctx);
1934 emit(A64_MOVZ(1, A64_R(10), nregs, 0), ctx);
1935 emit(A64_STR64I(A64_R(10), A64_SP, nregs_off), ctx);
1938 save_args(ctx, args_off, nregs);
1941 emit(A64_STR64I(A64_R(19), A64_SP, regs_off), ctx);
1942 emit(A64_STR64I(A64_R(20), A64_SP, regs_off + 8), ctx);
1945 emit_addr_mov_i64(A64_R(0), (const u64)im, ctx);
1946 emit_call((const u64)__bpf_tramp_enter, ctx);
1950 invoke_bpf_prog(ctx, fentry->links[i], args_off,
1960 invoke_bpf_mod_ret(ctx, fmod_ret, args_off, retval_off,
1965 restore_args(ctx, args_off, nregs);
1967 emit(A64_LDR64I(A64_R(10), A64_SP, retaddr_off), ctx);
1968 emit(A64_ADR(A64_LR, AARCH64_INSN_SIZE * 2), ctx);
1969 emit(A64_RET(A64_R(10)), ctx);
1971 emit(A64_STR64I(A64_R(0), A64_SP, retval_off), ctx);
1973 im->ip_after_call = ctx->image + ctx->idx;
1974 emit(A64_NOP, ctx);
1978 for (i = 0; i < fmod_ret->nr_links && ctx->image != NULL; i++) {
1979 int offset = &ctx->image[ctx->idx] - branches[i];
1984 invoke_bpf_prog(ctx, fexit->links[i], args_off, retval_off,
1988 im->ip_epilogue = ctx->image + ctx->idx;
1989 emit_addr_mov_i64(A64_R(0), (const u64)im, ctx);
1990 emit_call((const u64)__bpf_tramp_exit, ctx);
1994 restore_args(ctx, args_off, nregs);
1997 emit(A64_LDR64I(A64_R(19), A64_SP, regs_off), ctx);
1998 emit(A64_LDR64I(A64_R(20), A64_SP, regs_off + 8), ctx);
2001 emit(A64_LDR64I(A64_R(0), A64_SP, retval_off), ctx);
2004 emit(A64_MOV(1, A64_SP, A64_FP), ctx);
2007 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx);
2008 emit(A64_POP(A64_FP, A64_R(9), A64_SP), ctx);
2012 emit(A64_MOV(1, A64_LR, A64_R(9)), ctx);
2013 emit(A64_RET(A64_R(9)), ctx);
2016 emit(A64_MOV(1, A64_R(10), A64_LR), ctx);
2017 emit(A64_MOV(1, A64_LR, A64_R(9)), ctx);
2018 emit(A64_RET(A64_R(10)), ctx);
2021 if (ctx->image)
2022 bpf_flush_icache(ctx->image, ctx->image + ctx->idx);
2026 return ctx->idx;
2037 struct jit_ctx ctx = {
2053 ret = prepare_trampoline(&ctx, im, tlinks, orig_call, nregs, flags);
2060 ctx.image = image;
2061 ctx.idx = 0;
2064 ret = prepare_trampoline(&ctx, im, tlinks, orig_call, nregs, flags);
2066 if (ret > 0 && validate_code(&ctx) < 0)