Lines Matching refs:imm32

87 		return 4; /* imm32 */
625 u32 dst_reg, const u32 imm32)
631 * Optimization: if imm32 is positive, use 'mov %eax, imm32'
632 * (which zero-extends imm32) to save 2 bytes.
634 if (sign_propagate && (s32)imm32 < 0) {
635 /* 'mov %rax, imm32' sign extends imm32 */
639 EMIT3_off32(b1, b2, add_1reg(b3, dst_reg), imm32);
644 * Optimization: if imm32 is zero, use 'xor %eax, %eax'
647 if (imm32 == 0) {
656 /* mov %eax, imm32 */
659 EMIT1_off32(add_1reg(0xB8, dst_reg), imm32);
674 * 'mov %eax, imm32' instead.
1059 const s32 imm32 = insn->imm;
1148 if (is_imm8(imm32))
1149 EMIT3(0x83, add_1reg(b3, dst_reg), imm32);
1151 EMIT1_off32(b2, imm32);
1153 EMIT2_off32(0x81, add_1reg(b3, dst_reg), imm32);
1159 dst_reg, imm32);
1168 /* dst %= src, dst /= src, dst %= imm32, dst /= imm32 */
1192 /* mov r11, imm32 */
1193 EMIT3_off32(0x49, 0xC7, 0xC3, imm32);
1243 if (is_imm8(imm32))
1246 imm32);
1248 /* imul dst_reg, dst_reg, imm32 */
1251 imm32);
1274 if (imm32 == 1)
1277 EMIT3(0xC1, add_1reg(b3, dst_reg), imm32);
1341 switch (imm32) {
1373 switch (imm32) {
1429 EMIT(imm32, bpf_size_to_x86_bytes(BPF_SIZE(insn->code)));
1628 func = (u8 *) __bpf_call_base + imm32;
1631 if (!imm32)
1635 if (!imm32)
1645 if (imm32)
1646 emit_bpf_tail_call_direct(&bpf_prog->aux->poke_tab[imm32 - 1],
1696 /* test dst_reg, imm32 */
1699 EMIT2_off32(0xF7, add_1reg(0xC0, dst_reg), imm32);
1723 if (imm32 == 0) {
1734 if (is_imm8(imm32))
1735 EMIT3(0x83, add_1reg(0xF8, dst_reg), imm32);
1737 EMIT2_off32(0x81, add_1reg(0xF8, dst_reg), imm32);
1795 * imm32 jmp_cond, the extra 4 bytes(*) is padded to
1798 * (*) imm32 jmp_cond is 6 bytes, and imm8 jmp_cond
1853 * imm32 jmp, and 5 bytes is padded.
1875 * If it emits an imm32 jmp (5 bytes) previously