Lines Matching refs:imm

33 #define check_imm(bits, imm) do {				\
34 if ((((imm) > 0) && ((imm) >> (bits))) || \
35 (((imm) < 0) && (~(imm) >> (bits)))) { \
36 pr_info("[%2d] imm=%d(0x%x) out of range\n", \
37 i, imm, imm); \
41 #define check_imm19(imm) check_imm(19, imm)
42 #define check_imm26(imm) check_imm(26, imm)
216 static bool is_addsub_imm(u32 imm)
219 return !(imm & ~0xfff) || !(imm & ~0xfff000);
465 switch (insn->imm) {
503 pr_err_once("unknown atomic op code %02x\n", insn->imm);
525 const s32 imm = insn->imm;
539 if (imm == BPF_ADD || imm == BPF_AND ||
540 imm == BPF_OR || imm == BPF_XOR) {
543 if (imm == BPF_ADD)
545 else if (imm == BPF_AND)
547 else if (imm == BPF_OR)
555 } else if (imm == (BPF_ADD | BPF_FETCH) ||
556 imm == (BPF_AND | BPF_FETCH) ||
557 imm == (BPF_OR | BPF_FETCH) ||
558 imm == (BPF_XOR | BPF_FETCH)) {
564 if (imm == (BPF_ADD | BPF_FETCH))
566 else if (imm == (BPF_AND | BPF_FETCH))
568 else if (imm == (BPF_OR | BPF_FETCH))
577 } else if (imm == BPF_XCHG) {
586 } else if (imm == BPF_CMPXCHG) {
602 pr_err_once("unknown atomic op code %02x\n", imm);
772 const s32 imm = insn->imm;
861 /* dst = BSWAP##imm(dst) */
872 switch (imm) {
888 switch (imm) {
902 /* dst = imm */
905 emit_a64_mov_i(is64, dst, imm, ctx);
907 /* dst = dst OP imm */
910 if (is_addsub_imm(imm)) {
911 emit(A64_ADD_I(is64, dst, dst, imm), ctx);
912 } else if (is_addsub_imm(-imm)) {
913 emit(A64_SUB_I(is64, dst, dst, -imm), ctx);
915 emit_a64_mov_i(is64, tmp, imm, ctx);
921 if (is_addsub_imm(imm)) {
922 emit(A64_SUB_I(is64, dst, dst, imm), ctx);
923 } else if (is_addsub_imm(-imm)) {
924 emit(A64_ADD_I(is64, dst, dst, -imm), ctx);
926 emit_a64_mov_i(is64, tmp, imm, ctx);
932 a64_insn = A64_AND_I(is64, dst, dst, imm);
936 emit_a64_mov_i(is64, tmp, imm, ctx);
942 a64_insn = A64_ORR_I(is64, dst, dst, imm);
946 emit_a64_mov_i(is64, tmp, imm, ctx);
952 a64_insn = A64_EOR_I(is64, dst, dst, imm);
956 emit_a64_mov_i(is64, tmp, imm, ctx);
962 emit_a64_mov_i(is64, tmp, imm, ctx);
967 emit_a64_mov_i(is64, tmp, imm, ctx);
975 emit_a64_mov_i(is64, tmp2, imm, ctx);
984 emit(A64_LSL(is64, dst, dst, imm), ctx);
988 emit(A64_LSR(is64, dst, dst, imm), ctx);
992 emit(A64_ASR(is64, dst, dst, imm), ctx);
1001 jmp_offset = bpf2a64_offset(i, imm, ctx);
1071 /* IF (dst COND imm) JUMP off */
1092 if (is_addsub_imm(imm)) {
1093 emit(A64_CMP_I(is64, dst, imm), ctx);
1094 } else if (is_addsub_imm(-imm)) {
1095 emit(A64_CMN_I(is64, dst, -imm), ctx);
1097 emit_a64_mov_i(is64, tmp, imm, ctx);
1103 a64_insn = A64_TST_I(is64, dst, imm);
1107 emit_a64_mov_i(is64, tmp, imm, ctx);
1148 imm64 = (u64)insn1.imm << 32 | (u32)imm;
1253 /* ST: *(size *)(dst + off) = imm */
1265 /* Load imm to a register then store it */
1266 emit_a64_mov_i(1, tmp, imm, ctx);
1384 const s32 imm = insn->imm;
1392 ((imm == BPF_XCHG ||
1393 imm == (BPF_FETCH | BPF_ADD) ||
1394 imm == (BPF_FETCH | BPF_AND) ||
1395 imm == (BPF_FETCH | BPF_XOR) ||
1396 imm == (BPF_FETCH | BPF_OR)) &&