Lines Matching refs:ctxt

198 		int (*execute)(struct x86_emulate_ctxt *ctxt);
207 int (*check_perm)(struct x86_emulate_ctxt *ctxt);
246 static void writeback_registers(struct x86_emulate_ctxt *ctxt)
248 unsigned long dirty = ctxt->regs_dirty;
252 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]);
255 static void invalidate_registers(struct x86_emulate_ctxt *ctxt)
257 ctxt->regs_dirty = 0;
258 ctxt->regs_valid = 0;
291 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop);
466 static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt,
472 .rep_prefix = ctxt->rep_prefix,
473 .modrm_mod = ctxt->modrm_mod,
474 .modrm_reg = ctxt->modrm_reg,
475 .modrm_rm = ctxt->modrm_rm,
476 .src_val = ctxt->src.val64,
477 .dst_val = ctxt->dst.val64,
478 .src_bytes = ctxt->src.bytes,
479 .dst_bytes = ctxt->dst.bytes,
480 .ad_bytes = ctxt->ad_bytes,
481 .next_rip = ctxt->eip,
484 return ctxt->ops->intercept(ctxt, &info, stage);
511 static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt)
513 return (1UL << (ctxt->ad_bytes << 3)) - 1;
516 static ulong stack_mask(struct x86_emulate_ctxt *ctxt)
521 if (ctxt->mode == X86EMUL_MODE_PROT64)
523 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS);
527 static int stack_size(struct x86_emulate_ctxt *ctxt)
529 return (__fls(stack_mask(ctxt)) + 1) >> 3;
534 address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg)
536 if (ctxt->ad_bytes == sizeof(unsigned long))
539 return reg & ad_mask(ctxt);
543 register_address(struct x86_emulate_ctxt *ctxt, int reg)
545 return address_mask(ctxt, reg_read(ctxt, reg));
554 register_address_increment(struct x86_emulate_ctxt *ctxt, int reg, int inc)
556 ulong *preg = reg_rmw(ctxt, reg);
558 assign_register(preg, *preg + inc, ctxt->ad_bytes);
561 static void rsp_increment(struct x86_emulate_ctxt *ctxt, int inc)
563 masked_increment(reg_rmw(ctxt, VCPU_REGS_RSP), stack_mask(ctxt), inc);
573 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
575 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
578 return ctxt->ops->get_cached_segment_base(ctxt, seg);
581 static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec,
584 if (KVM_EMULATOR_BUG_ON(vec > 0x1f, ctxt))
587 ctxt->exception.vector = vec;
588 ctxt->exception.error_code = error;
589 ctxt->exception.error_code_valid = valid;
593 static int emulate_db(struct x86_emulate_ctxt *ctxt)
595 return emulate_exception(ctxt, DB_VECTOR, 0, false);
598 static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err)
600 return emulate_exception(ctxt, GP_VECTOR, err, true);
603 static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err)
605 return emulate_exception(ctxt, SS_VECTOR, err, true);
608 static int emulate_ud(struct x86_emulate_ctxt *ctxt)
610 return emulate_exception(ctxt, UD_VECTOR, 0, false);
613 static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err)
615 return emulate_exception(ctxt, TS_VECTOR, err, true);
618 static int emulate_de(struct x86_emulate_ctxt *ctxt)
620 return emulate_exception(ctxt, DE_VECTOR, 0, false);
623 static int emulate_nm(struct x86_emulate_ctxt *ctxt)
625 return emulate_exception(ctxt, NM_VECTOR, 0, false);
628 static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg)
633 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
637 static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector,
644 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
645 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
648 static inline u8 ctxt_virt_addr_bits(struct x86_emulate_ctxt *ctxt)
650 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48;
654 struct x86_emulate_ctxt *ctxt)
656 return !__is_canonical_address(la, ctxt_virt_addr_bits(ctxt));
668 static unsigned insn_alignment(struct x86_emulate_ctxt *ctxt, unsigned size)
670 u64 alignment = ctxt->d & AlignMask;
687 static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt,
700 la = seg_base(ctxt, addr.seg) + addr.ea;
705 va_bits = ctxt_virt_addr_bits(ctxt);
715 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
720 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8))
744 if (la & (insn_alignment(ctxt, size) - 1))
745 return emulate_gp(ctxt, 0);
749 return emulate_ss(ctxt, 0);
751 return emulate_gp(ctxt, 0);
754 static int linearize(struct x86_emulate_ctxt *ctxt,
760 return __linearize(ctxt, addr, &max_size, size, write, false,
761 ctxt->mode, linear);
764 static inline int assign_eip(struct x86_emulate_ctxt *ctxt, ulong dst)
772 if (ctxt->op_bytes != sizeof(unsigned long))
773 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1);
774 rc = __linearize(ctxt, addr, &max_size, 1, false, true, ctxt->mode, &linear);
776 ctxt->_eip = addr.ea;
780 static inline int emulator_recalc_and_set_mode(struct x86_emulate_ctxt *ctxt)
787 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
789 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) {
793 ctxt->mode = X86EMUL_MODE_REAL;
797 if (ctxt->eflags & X86_EFLAGS_VM) {
801 ctxt->mode = X86EMUL_MODE_VM86;
805 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS))
811 ctxt->mode = X86EMUL_MODE_PROT64;
814 ctxt->mode = X86EMUL_MODE_PROT32;
816 ctxt->mode = X86EMUL_MODE_PROT16;
820 ctxt->mode = cs.d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16;
826 static inline int assign_eip_near(struct x86_emulate_ctxt *ctxt, ulong dst)
828 return assign_eip(ctxt, dst);
831 static int assign_eip_far(struct x86_emulate_ctxt *ctxt, ulong dst)
833 int rc = emulator_recalc_and_set_mode(ctxt);
838 return assign_eip(ctxt, dst);
841 static inline int jmp_rel(struct x86_emulate_ctxt *ctxt, int rel)
843 return assign_eip_near(ctxt, ctxt->_eip + rel);
846 static int linear_read_system(struct x86_emulate_ctxt *ctxt, ulong linear,
849 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true);
852 static int linear_write_system(struct x86_emulate_ctxt *ctxt,
856 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true);
859 static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
867 rc = linearize(ctxt, addr, size, false, &linear);
870 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false);
873 static int segmented_write_std(struct x86_emulate_ctxt *ctxt,
881 rc = linearize(ctxt, addr, size, true, &linear);
884 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false);
891 static int __do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, int op_size)
896 int cur_size = ctxt->fetch.end - ctxt->fetch.data;
898 .ea = ctxt->eip + cur_size };
910 rc = __linearize(ctxt, addr, &max_size, 0, false, true, ctxt->mode,
925 return emulate_gp(ctxt, 0);
927 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end,
928 size, &ctxt->exception);
931 ctxt->fetch.end += size;
935 static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt,
938 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr;
941 return __do_insn_fetch_bytes(ctxt, size - done_size);
953 ctxt->_eip += sizeof(_type); \
954 memcpy(&_x, ctxt->fetch.ptr, sizeof(_type)); \
955 ctxt->fetch.ptr += sizeof(_type); \
964 ctxt->_eip += (_size); \
965 memcpy(_arr, ctxt->fetch.ptr, _size); \
966 ctxt->fetch.ptr += (_size); \
974 static void *decode_register(struct x86_emulate_ctxt *ctxt, u8 modrm_reg,
978 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop;
981 p = (unsigned char *)reg_rmw(ctxt, modrm_reg & 3) + 1;
983 p = reg_rmw(ctxt, modrm_reg);
987 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
996 rc = segmented_read_std(ctxt, addr, size, 2);
1000 rc = segmented_read_std(ctxt, addr, address, op_bytes);
1048 static int em_bsf_c(struct x86_emulate_ctxt *ctxt)
1051 if (ctxt->src.val == 0)
1052 ctxt->dst.type = OP_NONE;
1053 return fastop(ctxt, em_bsf);
1056 static int em_bsr_c(struct x86_emulate_ctxt *ctxt)
1059 if (ctxt->src.val == 0)
1060 ctxt->dst.type = OP_NONE;
1061 return fastop(ctxt, em_bsr);
1093 static int em_fninit(struct x86_emulate_ctxt *ctxt)
1095 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1096 return emulate_nm(ctxt);
1104 static int em_fnstcw(struct x86_emulate_ctxt *ctxt)
1108 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1109 return emulate_nm(ctxt);
1115 ctxt->dst.val = fcw;
1120 static int em_fnstsw(struct x86_emulate_ctxt *ctxt)
1124 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1125 return emulate_nm(ctxt);
1131 ctxt->dst.val = fsw;
1136 static void decode_register_operand(struct x86_emulate_ctxt *ctxt,
1141 if (ctxt->d & ModRM)
1142 reg = ctxt->modrm_reg;
1144 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3);
1146 if (ctxt->d & Sse) {
1153 if (ctxt->d & Mmx) {
1162 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
1163 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp);
1169 static void adjust_modrm_seg(struct x86_emulate_ctxt *ctxt, int base_reg)
1172 ctxt->modrm_seg = VCPU_SREG_SS;
1175 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
1183 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */
1184 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */
1185 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */
1187 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6;
1188 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3;
1189 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07);
1190 ctxt->modrm_seg = VCPU_SREG_DS;
1192 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) {
1194 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
1195 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm,
1196 ctxt->d & ByteOp);
1197 if (ctxt->d & Sse) {
1200 op->addr.xmm = ctxt->modrm_rm;
1201 kvm_read_sse_reg(ctxt->modrm_rm, &op->vec_val);
1204 if (ctxt->d & Mmx) {
1207 op->addr.mm = ctxt->modrm_rm & 7;
1216 if (ctxt->ad_bytes == 2) {
1217 unsigned bx = reg_read(ctxt, VCPU_REGS_RBX);
1218 unsigned bp = reg_read(ctxt, VCPU_REGS_RBP);
1219 unsigned si = reg_read(ctxt, VCPU_REGS_RSI);
1220 unsigned di = reg_read(ctxt, VCPU_REGS_RDI);
1223 switch (ctxt->modrm_mod) {
1225 if (ctxt->modrm_rm == 6)
1226 modrm_ea += insn_fetch(u16, ctxt);
1229 modrm_ea += insn_fetch(s8, ctxt);
1232 modrm_ea += insn_fetch(u16, ctxt);
1235 switch (ctxt->modrm_rm) {
1255 if (ctxt->modrm_mod != 0)
1262 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 ||
1263 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0))
1264 ctxt->modrm_seg = VCPU_SREG_SS;
1268 if ((ctxt->modrm_rm & 7) == 4) {
1269 sib = insn_fetch(u8, ctxt);
1274 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0)
1275 modrm_ea += insn_fetch(s32, ctxt);
1277 modrm_ea += reg_read(ctxt, base_reg);
1278 adjust_modrm_seg(ctxt, base_reg);
1280 if ((ctxt->d & IncSP) &&
1282 modrm_ea += ctxt->op_bytes;
1285 modrm_ea += reg_read(ctxt, index_reg) << scale;
1286 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) {
1287 modrm_ea += insn_fetch(s32, ctxt);
1288 if (ctxt->mode == X86EMUL_MODE_PROT64)
1289 ctxt->rip_relative = 1;
1291 base_reg = ctxt->modrm_rm;
1292 modrm_ea += reg_read(ctxt, base_reg);
1293 adjust_modrm_seg(ctxt, base_reg);
1295 switch (ctxt->modrm_mod) {
1297 modrm_ea += insn_fetch(s8, ctxt);
1300 modrm_ea += insn_fetch(s32, ctxt);
1305 if (ctxt->ad_bytes != 8)
1306 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea;
1312 static int decode_abs(struct x86_emulate_ctxt *ctxt,
1318 switch (ctxt->ad_bytes) {
1320 op->addr.mem.ea = insn_fetch(u16, ctxt);
1323 op->addr.mem.ea = insn_fetch(u32, ctxt);
1326 op->addr.mem.ea = insn_fetch(u64, ctxt);
1333 static void fetch_bit_operand(struct x86_emulate_ctxt *ctxt)
1337 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) {
1338 mask = ~((long)ctxt->dst.bytes * 8 - 1);
1340 if (ctxt->src.bytes == 2)
1341 sv = (s16)ctxt->src.val & (s16)mask;
1342 else if (ctxt->src.bytes == 4)
1343 sv = (s32)ctxt->src.val & (s32)mask;
1345 sv = (s64)ctxt->src.val & (s64)mask;
1347 ctxt->dst.addr.mem.ea = address_mask(ctxt,
1348 ctxt->dst.addr.mem.ea + (sv >> 3));
1352 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1;
1355 static int read_emulated(struct x86_emulate_ctxt *ctxt,
1359 struct read_cache *mc = &ctxt->mem_read;
1364 if (KVM_EMULATOR_BUG_ON((mc->end + size) >= sizeof(mc->data), ctxt))
1367 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size,
1368 &ctxt->exception);
1380 static int segmented_read(struct x86_emulate_ctxt *ctxt,
1388 rc = linearize(ctxt, addr, size, false, &linear);
1391 return read_emulated(ctxt, linear, data, size);
1394 static int segmented_write(struct x86_emulate_ctxt *ctxt,
1402 rc = linearize(ctxt, addr, size, true, &linear);
1405 return ctxt->ops->write_emulated(ctxt, linear, data, size,
1406 &ctxt->exception);
1409 static int segmented_cmpxchg(struct x86_emulate_ctxt *ctxt,
1417 rc = linearize(ctxt, addr, size, true, &linear);
1420 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data,
1421 size, &ctxt->exception);
1424 static int pio_in_emulated(struct x86_emulate_ctxt *ctxt,
1428 struct read_cache *rc = &ctxt->io_read;
1432 unsigned int count = ctxt->rep_prefix ?
1433 address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) : 1;
1434 in_page = (ctxt->eflags & X86_EFLAGS_DF) ?
1435 offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)) :
1436 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI));
1441 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n))
1446 if (ctxt->rep_prefix && (ctxt->d & String) &&
1447 !(ctxt->eflags & X86_EFLAGS_DF)) {
1448 ctxt->dst.data = rc->data + rc->pos;
1449 ctxt->dst.type = OP_MEM_STR;
1450 ctxt->dst.count = (rc->end - rc->pos) / size;
1459 static int read_interrupt_descriptor(struct x86_emulate_ctxt *ctxt,
1465 ctxt->ops->get_idt(ctxt, &dt);
1468 return emulate_gp(ctxt, index << 3 | 0x2);
1471 return linear_read_system(ctxt, addr, desc, sizeof(*desc));
1474 static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt,
1477 const struct x86_emulate_ops *ops = ctxt->ops;
1485 if (!ops->get_segment(ctxt, &sel, &desc, &base3,
1492 ops->get_gdt(ctxt, dt);
1495 static int get_descriptor_ptr(struct x86_emulate_ctxt *ctxt,
1502 get_descriptor_table_ptr(ctxt, selector, &dt);
1505 return emulate_gp(ctxt, selector & 0xfffc);
1513 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1524 static int read_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1530 rc = get_descriptor_ptr(ctxt, selector, desc_addr_p);
1534 return linear_read_system(ctxt, *desc_addr_p, desc, sizeof(*desc));
1538 static int write_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1544 rc = get_descriptor_ptr(ctxt, selector, &addr);
1548 return linear_write_system(ctxt, addr, desc, sizeof(*desc));
1551 static int __load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1568 if (ctxt->mode == X86EMUL_MODE_REAL) {
1571 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg);
1574 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) {
1597 if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl)
1601 * ctxt->ops->set_segment expects the CPL to be in
1616 ret = read_segment_descriptor(ctxt, selector, &seg_desc, &desc_addr);
1687 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1725 ret = write_segment_descriptor(ctxt, selector,
1730 } else if (ctxt->mode == X86EMUL_MODE_PROT64) {
1731 ret = linear_read_system(ctxt, desc_addr+8, &base3, sizeof(base3));
1735 ((u64)base3 << 32), ctxt))
1736 return emulate_gp(ctxt, err_code);
1742 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc,
1743 sizeof(seg_desc), &ctxt->exception);
1748 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1753 return emulate_exception(ctxt, err_vec, err_code, true);
1756 static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1759 u8 cpl = ctxt->ops->cpl(ctxt);
1772 ctxt->mode == X86EMUL_MODE_PROT64)
1773 return emulate_exception(ctxt, GP_VECTOR, 0, true);
1775 return __load_segment_descriptor(ctxt, selector, seg, cpl,
1784 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op)
1791 if (ctxt->lock_prefix)
1792 return segmented_cmpxchg(ctxt,
1798 return segmented_write(ctxt,
1803 return segmented_write(ctxt,
1822 static int push(struct x86_emulate_ctxt *ctxt, void *data, int bytes)
1826 rsp_increment(ctxt, -bytes);
1827 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt);
1830 return segmented_write(ctxt, addr, data, bytes);
1833 static int em_push(struct x86_emulate_ctxt *ctxt)
1836 ctxt->dst.type = OP_NONE;
1837 return push(ctxt, &ctxt->src.val, ctxt->op_bytes);
1840 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1846 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt);
1848 rc = segmented_read(ctxt, addr, dest, len);
1852 rsp_increment(ctxt, len);
1856 static int em_pop(struct x86_emulate_ctxt *ctxt)
1858 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes);
1861 static int emulate_popf(struct x86_emulate_ctxt *ctxt,
1866 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT;
1867 int cpl = ctxt->ops->cpl(ctxt);
1869 rc = emulate_pop(ctxt, &val, len);
1878 switch(ctxt->mode) {
1889 return emulate_gp(ctxt, 0);
1898 (ctxt->eflags & ~change_mask) | (val & change_mask);
1903 static int em_popf(struct x86_emulate_ctxt *ctxt)
1905 ctxt->dst.type = OP_REG;
1906 ctxt->dst.addr.reg = &ctxt->eflags;
1907 ctxt->dst.bytes = ctxt->op_bytes;
1908 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes);
1911 static int em_enter(struct x86_emulate_ctxt *ctxt)
1914 unsigned frame_size = ctxt->src.val;
1915 unsigned nesting_level = ctxt->src2.val & 31;
1921 rbp = reg_read(ctxt, VCPU_REGS_RBP);
1922 rc = push(ctxt, &rbp, stack_size(ctxt));
1925 assign_masked(reg_rmw(ctxt, VCPU_REGS_RBP), reg_read(ctxt, VCPU_REGS_RSP),
1926 stack_mask(ctxt));
1927 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP),
1928 reg_read(ctxt, VCPU_REGS_RSP) - frame_size,
1929 stack_mask(ctxt));
1933 static int em_leave(struct x86_emulate_ctxt *ctxt)
1935 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), reg_read(ctxt, VCPU_REGS_RBP),
1936 stack_mask(ctxt));
1937 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes);
1940 static int em_push_sreg(struct x86_emulate_ctxt *ctxt)
1942 int seg = ctxt->src2.val;
1944 ctxt->src.val = get_segment_selector(ctxt, seg);
1945 if (ctxt->op_bytes == 4) {
1946 rsp_increment(ctxt, -2);
1947 ctxt->op_bytes = 2;
1950 return em_push(ctxt);
1953 static int em_pop_sreg(struct x86_emulate_ctxt *ctxt)
1955 int seg = ctxt->src2.val;
1959 rc = emulate_pop(ctxt, &selector, 2);
1964 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
1965 if (ctxt->op_bytes > 2)
1966 rsp_increment(ctxt, ctxt->op_bytes - 2);
1968 rc = load_segment_descriptor(ctxt, (u16)selector, seg);
1972 static int em_pusha(struct x86_emulate_ctxt *ctxt)
1974 unsigned long old_esp = reg_read(ctxt, VCPU_REGS_RSP);
1980 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg));
1982 rc = em_push(ctxt);
1992 static int em_pushf(struct x86_emulate_ctxt *ctxt)
1994 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM;
1995 return em_push(ctxt);
1998 static int em_popa(struct x86_emulate_ctxt *ctxt)
2006 rsp_increment(ctxt, ctxt->op_bytes);
2010 rc = emulate_pop(ctxt, &val, ctxt->op_bytes);
2013 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes);
2019 static int __emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
2021 const struct x86_emulate_ops *ops = ctxt->ops;
2029 ctxt->src.val = ctxt->eflags;
2030 rc = em_push(ctxt);
2034 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC);
2036 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS);
2037 rc = em_push(ctxt);
2041 ctxt->src.val = ctxt->_eip;
2042 rc = em_push(ctxt);
2046 ops->get_idt(ctxt, &dt);
2051 rc = linear_read_system(ctxt, cs_addr, &cs, 2);
2055 rc = linear_read_system(ctxt, eip_addr, &eip, 2);
2059 rc = load_segment_descriptor(ctxt, cs, VCPU_SREG_CS);
2063 ctxt->_eip = eip;
2068 int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
2072 invalidate_registers(ctxt);
2073 rc = __emulate_int_real(ctxt, irq);
2075 writeback_registers(ctxt);
2079 static int emulate_int(struct x86_emulate_ctxt *ctxt, int irq)
2081 switch(ctxt->mode) {
2083 return __emulate_int_real(ctxt, irq);
2094 static int emulate_iret_real(struct x86_emulate_ctxt *ctxt)
2111 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes);
2117 return emulate_gp(ctxt, 0);
2119 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
2124 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes);
2129 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS);
2134 ctxt->_eip = temp_eip;
2136 if (ctxt->op_bytes == 4)
2137 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask));
2138 else if (ctxt->op_bytes == 2) {
2139 ctxt->eflags &= ~0xffff;
2140 ctxt->eflags |= temp_eflags;
2143 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */
2144 ctxt->eflags |= X86_EFLAGS_FIXED;
2145 ctxt->ops->set_nmi_mask(ctxt, false);
2150 static int em_iret(struct x86_emulate_ctxt *ctxt)
2152 switch(ctxt->mode) {
2154 return emulate_iret_real(ctxt);
2165 static int em_jmp_far(struct x86_emulate_ctxt *ctxt)
2170 u8 cpl = ctxt->ops->cpl(ctxt);
2172 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
2174 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl,
2180 rc = assign_eip_far(ctxt, ctxt->src.val);
2188 static int em_jmp_abs(struct x86_emulate_ctxt *ctxt)
2190 return assign_eip_near(ctxt, ctxt->src.val);
2193 static int em_call_near_abs(struct x86_emulate_ctxt *ctxt)
2198 old_eip = ctxt->_eip;
2199 rc = assign_eip_near(ctxt, ctxt->src.val);
2202 ctxt->src.val = old_eip;
2203 rc = em_push(ctxt);
2207 static int em_cmpxchg8b(struct x86_emulate_ctxt *ctxt)
2209 u64 old = ctxt->dst.orig_val64;
2211 if (ctxt->dst.bytes == 16)
2214 if (((u32) (old >> 0) != (u32) reg_read(ctxt, VCPU_REGS_RAX)) ||
2215 ((u32) (old >> 32) != (u32) reg_read(ctxt, VCPU_REGS_RDX))) {
2216 *reg_write(ctxt, VCPU_REGS_RAX) = (u32) (old >> 0);
2217 *reg_write(ctxt, VCPU_REGS_RDX) = (u32) (old >> 32);
2218 ctxt->eflags &= ~X86_EFLAGS_ZF;
2220 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) |
2221 (u32) reg_read(ctxt, VCPU_REGS_RBX);
2223 ctxt->eflags |= X86_EFLAGS_ZF;
2228 static int em_ret(struct x86_emulate_ctxt *ctxt)
2233 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
2237 return assign_eip_near(ctxt, eip);
2240 static int em_ret_far(struct x86_emulate_ctxt *ctxt)
2244 int cpl = ctxt->ops->cpl(ctxt);
2247 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
2250 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
2253 rc = __load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS, cpl,
2258 rc = assign_eip_far(ctxt, eip);
2266 static int em_ret_far_imm(struct x86_emulate_ctxt *ctxt)
2270 rc = em_ret_far(ctxt);
2273 rsp_increment(ctxt, ctxt->src.val);
2277 static int em_cmpxchg(struct x86_emulate_ctxt *ctxt)
2280 ctxt->dst.orig_val = ctxt->dst.val;
2281 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX);
2282 ctxt->src.orig_val = ctxt->src.val;
2283 ctxt->src.val = ctxt->dst.orig_val;
2284 fastop(ctxt, em_cmp);
2286 if (ctxt->eflags & X86_EFLAGS_ZF) {
2288 ctxt->src.type = OP_NONE;
2289 ctxt->dst.val = ctxt->src.orig_val;
2292 ctxt->src.type = OP_REG;
2293 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
2294 ctxt->src.val = ctxt->dst.orig_val;
2296 ctxt->dst.val = ctxt->dst.orig_val;
2301 static int em_lseg(struct x86_emulate_ctxt *ctxt)
2303 int seg = ctxt->src2.val;
2307 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
2309 rc = load_segment_descriptor(ctxt, sel, seg);
2313 ctxt->dst.val = ctxt->src.val;
2317 static int em_rsm(struct x86_emulate_ctxt *ctxt)
2319 if (!ctxt->ops->is_smm(ctxt))
2320 return emulate_ud(ctxt);
2322 if (ctxt->ops->leave_smm(ctxt))
2323 ctxt->ops->triple_fault(ctxt);
2325 return emulator_recalc_and_set_mode(ctxt);
2354 static bool vendor_intel(struct x86_emulate_ctxt *ctxt)
2359 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2363 static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt)
2365 const struct x86_emulate_ops *ops = ctxt->ops;
2372 if (ctxt->mode == X86EMUL_MODE_PROT64)
2377 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2398 static int em_syscall(struct x86_emulate_ctxt *ctxt)
2400 const struct x86_emulate_ops *ops = ctxt->ops;
2407 if (ctxt->mode == X86EMUL_MODE_REAL ||
2408 ctxt->mode == X86EMUL_MODE_VM86)
2409 return emulate_ud(ctxt);
2411 if (!(em_syscall_is_enabled(ctxt)))
2412 return emulate_ud(ctxt);
2414 ops->get_msr(ctxt, MSR_EFER, &efer);
2416 return emulate_ud(ctxt);
2419 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2428 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2429 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2431 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip;
2434 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags;
2436 ops->get_msr(ctxt,
2437 ctxt->mode == X86EMUL_MODE_PROT64 ?
2439 ctxt->_eip = msr_data;
2441 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
2442 ctxt->eflags &= ~msr_data;
2443 ctxt->eflags |= X86_EFLAGS_FIXED;
2447 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2448 ctxt->_eip = (u32)msr_data;
2450 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF);
2453 ctxt->tf = (ctxt->eflags & X86_EFLAGS_TF) != 0;
2457 static int em_sysenter(struct x86_emulate_ctxt *ctxt)
2459 const struct x86_emulate_ops *ops = ctxt->ops;
2465 ops->get_msr(ctxt, MSR_EFER, &efer);
2467 if (ctxt->mode == X86EMUL_MODE_REAL)
2468 return emulate_gp(ctxt, 0);
2474 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA)
2475 && !vendor_intel(ctxt))
2476 return emulate_ud(ctxt);
2479 if (ctxt->mode == X86EMUL_MODE_PROT64)
2482 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2484 return emulate_gp(ctxt, 0);
2487 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF);
2495 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2496 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2498 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
2499 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data;
2501 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
2502 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data :
2505 ctxt->mode = X86EMUL_MODE_PROT64;
2510 static int em_sysexit(struct x86_emulate_ctxt *ctxt)
2512 const struct x86_emulate_ops *ops = ctxt->ops;
2519 if (ctxt->mode == X86EMUL_MODE_REAL ||
2520 ctxt->mode == X86EMUL_MODE_VM86)
2521 return emulate_gp(ctxt, 0);
2525 if ((ctxt->rex_prefix & 0x8) != 0x0)
2530 rcx = reg_read(ctxt, VCPU_REGS_RCX);
2531 rdx = reg_read(ctxt, VCPU_REGS_RDX);
2535 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2540 return emulate_gp(ctxt, 0);
2548 return emulate_gp(ctxt, 0);
2552 if (emul_is_noncanonical_address(rcx, ctxt) ||
2553 emul_is_noncanonical_address(rdx, ctxt))
2554 return emulate_gp(ctxt, 0);
2560 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2561 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2563 ctxt->_eip = rdx;
2564 ctxt->mode = usermode;
2565 *reg_write(ctxt, VCPU_REGS_RSP) = rcx;
2570 static bool emulator_bad_iopl(struct x86_emulate_ctxt *ctxt)
2573 if (ctxt->mode == X86EMUL_MODE_REAL)
2575 if (ctxt->mode == X86EMUL_MODE_VM86)
2577 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT;
2578 return ctxt->ops->cpl(ctxt) > iopl;
2584 static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt,
2587 const struct x86_emulate_ops *ops = ctxt->ops;
2603 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR);
2612 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true);
2617 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true);
2625 static bool emulator_io_permitted(struct x86_emulate_ctxt *ctxt,
2628 if (ctxt->perm_ok)
2631 if (emulator_bad_iopl(ctxt))
2632 if (!emulator_io_port_access_allowed(ctxt, port, len))
2635 ctxt->perm_ok = true;
2640 static void string_registers_quirk(struct x86_emulate_ctxt *ctxt)
2647 if (ctxt->ad_bytes != 4 || !vendor_intel(ctxt))
2650 *reg_write(ctxt, VCPU_REGS_RCX) = 0;
2652 switch (ctxt->b) {
2655 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1;
2659 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1;
2664 static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt,
2667 tss->ip = ctxt->_eip;
2668 tss->flag = ctxt->eflags;
2669 tss->ax = reg_read(ctxt, VCPU_REGS_RAX);
2670 tss->cx = reg_read(ctxt, VCPU_REGS_RCX);
2671 tss->dx = reg_read(ctxt, VCPU_REGS_RDX);
2672 tss->bx = reg_read(ctxt, VCPU_REGS_RBX);
2673 tss->sp = reg_read(ctxt, VCPU_REGS_RSP);
2674 tss->bp = reg_read(ctxt, VCPU_REGS_RBP);
2675 tss->si = reg_read(ctxt, VCPU_REGS_RSI);
2676 tss->di = reg_read(ctxt, VCPU_REGS_RDI);
2678 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
2679 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2680 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
2681 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
2682 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR);
2685 static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt,
2691 ctxt->_eip = tss->ip;
2692 ctxt->eflags = tss->flag | 2;
2693 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax;
2694 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx;
2695 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx;
2696 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx;
2697 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp;
2698 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp;
2699 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si;
2700 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di;
2706 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR);
2707 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
2708 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
2709 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
2710 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
2718 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl,
2722 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl,
2726 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl,
2730 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl,
2734 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl,
2742 static int task_switch_16(struct x86_emulate_ctxt *ctxt, u16 old_tss_sel,
2749 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
2753 save_state_to_tss16(ctxt, &tss_seg);
2755 ret = linear_write_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
2759 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg));
2766 ret = linear_write_system(ctxt, new_tss_base,
2773 return load_state_from_tss16(ctxt, &tss_seg);
2776 static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt,
2780 tss->eip = ctxt->_eip;
2781 tss->eflags = ctxt->eflags;
2782 tss->eax = reg_read(ctxt, VCPU_REGS_RAX);
2783 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX);
2784 tss->edx = reg_read(ctxt, VCPU_REGS_RDX);
2785 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX);
2786 tss->esp = reg_read(ctxt, VCPU_REGS_RSP);
2787 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP);
2788 tss->esi = reg_read(ctxt, VCPU_REGS_RSI);
2789 tss->edi = reg_read(ctxt, VCPU_REGS_RDI);
2791 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
2792 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2793 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
2794 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
2795 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS);
2796 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS);
2799 static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt,
2805 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
2806 return emulate_gp(ctxt, 0);
2807 ctxt->_eip = tss->eip;
2808 ctxt->eflags = tss->eflags | 2;
2811 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax;
2812 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx;
2813 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx;
2814 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx;
2815 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp;
2816 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp;
2817 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi;
2818 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi;
2825 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR);
2826 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
2827 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
2828 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
2829 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
2830 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS);
2831 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS);
2838 if (ctxt->eflags & X86_EFLAGS_VM) {
2839 ctxt->mode = X86EMUL_MODE_VM86;
2842 ctxt->mode = X86EMUL_MODE_PROT32;
2850 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR,
2854 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl,
2858 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl,
2862 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl,
2866 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl,
2870 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl,
2874 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl,
2880 static int task_switch_32(struct x86_emulate_ctxt *ctxt, u16 old_tss_sel,
2889 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
2893 save_state_to_tss32(ctxt, &tss_seg);
2896 ret = linear_write_system(ctxt, old_tss_base + eip_offset, &tss_seg.eip,
2901 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg));
2908 ret = linear_write_system(ctxt, new_tss_base,
2915 return load_state_from_tss32(ctxt, &tss_seg);
2918 static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt,
2922 const struct x86_emulate_ops *ops = ctxt->ops;
2925 u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR);
2927 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR);
2933 ret = read_segment_descriptor(ctxt, tss_selector, &next_tss_desc, &desc_addr);
2936 ret = read_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc, &desc_addr);
2956 ret = read_interrupt_descriptor(ctxt, idt_index,
2962 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl)
2963 return emulate_gp(ctxt, (idt_index << 3) | 0x2);
2971 return emulate_ts(ctxt, tss_selector & 0xfffc);
2976 write_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc);
2980 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT;
2988 ret = task_switch_32(ctxt, old_tss_sel, old_tss_base, &next_tss_desc);
2990 ret = task_switch_16(ctxt, old_tss_sel,
2996 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT;
3000 write_segment_descriptor(ctxt, tss_selector, &next_tss_desc);
3003 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS);
3004 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR);
3007 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2;
3008 ctxt->lock_prefix = 0;
3009 ctxt->src.val = (unsigned long) error_code;
3010 ret = em_push(ctxt);
3013 ops->get_dr(ctxt, 7, &dr7);
3014 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN));
3019 int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
3025 invalidate_registers(ctxt);
3026 ctxt->_eip = ctxt->eip;
3027 ctxt->dst.type = OP_NONE;
3029 rc = emulator_do_task_switch(ctxt, tss_selector, idt_index, reason,
3033 ctxt->eip = ctxt->_eip;
3034 writeback_registers(ctxt);
3040 static void string_addr_inc(struct x86_emulate_ctxt *ctxt, int reg,
3043 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count;
3045 register_address_increment(ctxt, reg, df * op->bytes);
3046 op->addr.mem.ea = register_address(ctxt, reg);
3049 static int em_das(struct x86_emulate_ctxt *ctxt)
3054 cf = ctxt->eflags & X86_EFLAGS_CF;
3055 al = ctxt->dst.val;
3060 af = ctxt->eflags & X86_EFLAGS_AF;
3073 ctxt->dst.val = al;
3075 ctxt->src.type = OP_IMM;
3076 ctxt->src.val = 0;
3077 ctxt->src.bytes = 1;
3078 fastop(ctxt, em_or);
3079 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF);
3081 ctxt->eflags |= X86_EFLAGS_CF;
3083 ctxt->eflags |= X86_EFLAGS_AF;
3087 static int em_aam(struct x86_emulate_ctxt *ctxt)
3091 if (ctxt->src.val == 0)
3092 return emulate_de(ctxt);
3094 al = ctxt->dst.val & 0xff;
3095 ah = al / ctxt->src.val;
3096 al %= ctxt->src.val;
3098 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8);
3101 ctxt->src.type = OP_IMM;
3102 ctxt->src.val = 0;
3103 ctxt->src.bytes = 1;
3104 fastop(ctxt, em_or);
3109 static int em_aad(struct x86_emulate_ctxt *ctxt)
3111 u8 al = ctxt->dst.val & 0xff;
3112 u8 ah = (ctxt->dst.val >> 8) & 0xff;
3114 al = (al + (ah * ctxt->src.val)) & 0xff;
3116 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al;
3119 ctxt->src.type = OP_IMM;
3120 ctxt->src.val = 0;
3121 ctxt->src.bytes = 1;
3122 fastop(ctxt, em_or);
3127 static int em_call(struct x86_emulate_ctxt *ctxt)
3130 long rel = ctxt->src.val;
3132 ctxt->src.val = (unsigned long)ctxt->_eip;
3133 rc = jmp_rel(ctxt, rel);
3136 return em_push(ctxt);
3139 static int em_call_far(struct x86_emulate_ctxt *ctxt)
3145 const struct x86_emulate_ops *ops = ctxt->ops;
3146 int cpl = ctxt->ops->cpl(ctxt);
3147 enum x86emul_mode prev_mode = ctxt->mode;
3149 old_eip = ctxt->_eip;
3150 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS);
3152 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
3153 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl,
3158 rc = assign_eip_far(ctxt, ctxt->src.val);
3162 ctxt->src.val = old_cs;
3163 rc = em_push(ctxt);
3167 ctxt->src.val = old_eip;
3168 rc = em_push(ctxt);
3177 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS);
3178 ctxt->mode = prev_mode;
3183 static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt)
3188 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
3191 rc = assign_eip_near(ctxt, eip);
3194 rsp_increment(ctxt, ctxt->src.val);
3198 static int em_xchg(struct x86_emulate_ctxt *ctxt)
3201 ctxt->src.val = ctxt->dst.val;
3202 write_register_operand(&ctxt->src);
3205 ctxt->dst.val = ctxt->src.orig_val;
3206 ctxt->lock_prefix = 1;
3210 static int em_imul_3op(struct x86_emulate_ctxt *ctxt)
3212 ctxt->dst.val = ctxt->src2.val;
3213 return fastop(ctxt, em_imul);
3216 static int em_cwd(struct x86_emulate_ctxt *ctxt)
3218 ctxt->dst.type = OP_REG;
3219 ctxt->dst.bytes = ctxt->src.bytes;
3220 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
3221 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1);
3226 static int em_rdpid(struct x86_emulate_ctxt *ctxt)
3230 if (!ctxt->ops->guest_has_rdpid(ctxt))
3231 return emulate_ud(ctxt);
3233 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux);
3234 ctxt->dst.val = tsc_aux;
3238 static int em_rdtsc(struct x86_emulate_ctxt *ctxt)
3242 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc);
3243 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)tsc;
3244 *reg_write(ctxt, VCPU_REGS_RDX) = tsc >> 32;
3248 static int em_rdpmc(struct x86_emulate_ctxt *ctxt)
3252 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc))
3253 return emulate_gp(ctxt, 0);
3254 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)pmc;
3255 *reg_write(ctxt, VCPU_REGS_RDX) = pmc >> 32;
3259 static int em_mov(struct x86_emulate_ctxt *ctxt)
3261 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr));
3265 static int em_movbe(struct x86_emulate_ctxt *ctxt)
3269 if (!ctxt->ops->guest_has_movbe(ctxt))
3270 return emulate_ud(ctxt);
3272 switch (ctxt->op_bytes) {
3282 tmp = (u16)ctxt->src.val;
3283 ctxt->dst.val &= ~0xffffUL;
3284 ctxt->dst.val |= (unsigned long)swab16(tmp);
3287 ctxt->dst.val = swab32((u32)ctxt->src.val);
3290 ctxt->dst.val = swab64(ctxt->src.val);
3298 static int em_cr_write(struct x86_emulate_ctxt *ctxt)
3300 int cr_num = ctxt->modrm_reg;
3303 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val))
3304 return emulate_gp(ctxt, 0);
3307 ctxt->dst.type = OP_NONE;
3314 r = emulator_recalc_and_set_mode(ctxt);
3322 static int em_dr_write(struct x86_emulate_ctxt *ctxt)
3326 if (ctxt->mode == X86EMUL_MODE_PROT64)
3327 val = ctxt->src.val & ~0ULL;
3329 val = ctxt->src.val & ~0U;
3332 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0)
3333 return emulate_gp(ctxt, 0);
3336 ctxt->dst.type = OP_NONE;
3340 static int em_wrmsr(struct x86_emulate_ctxt *ctxt)
3342 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX);
3346 msr_data = (u32)reg_read(ctxt, VCPU_REGS_RAX)
3347 | ((u64)reg_read(ctxt, VCPU_REGS_RDX) << 32);
3348 r = ctxt->ops->set_msr_with_filter(ctxt, msr_index, msr_data);
3351 return emulate_gp(ctxt, 0);
3356 static int em_rdmsr(struct x86_emulate_ctxt *ctxt)
3358 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX);
3362 r = ctxt->ops->get_msr_with_filter(ctxt, msr_index, &msr_data);
3365 return emulate_gp(ctxt, 0);
3368 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)msr_data;
3369 *reg_write(ctxt, VCPU_REGS_RDX) = msr_data >> 32;
3374 static int em_store_sreg(struct x86_emulate_ctxt *ctxt, int segment)
3377 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3378 ctxt->ops->cpl(ctxt) > 0)
3379 return emulate_gp(ctxt, 0);
3381 ctxt->dst.val = get_segment_selector(ctxt, segment);
3382 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM)
3383 ctxt->dst.bytes = 2;
3387 static int em_mov_rm_sreg(struct x86_emulate_ctxt *ctxt)
3389 if (ctxt->modrm_reg > VCPU_SREG_GS)
3390 return emulate_ud(ctxt);
3392 return em_store_sreg(ctxt, ctxt->modrm_reg);
3395 static int em_mov_sreg_rm(struct x86_emulate_ctxt *ctxt)
3397 u16 sel = ctxt->src.val;
3399 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS)
3400 return emulate_ud(ctxt);
3402 if (ctxt->modrm_reg == VCPU_SREG_SS)
3403 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
3406 ctxt->dst.type = OP_NONE;
3407 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg);
3410 static int em_sldt(struct x86_emulate_ctxt *ctxt)
3412 return em_store_sreg(ctxt, VCPU_SREG_LDTR);
3415 static int em_lldt(struct x86_emulate_ctxt *ctxt)
3417 u16 sel = ctxt->src.val;
3420 ctxt->dst.type = OP_NONE;
3421 return load_segment_descriptor(ctxt, sel, VCPU_SREG_LDTR);
3424 static int em_str(struct x86_emulate_ctxt *ctxt)
3426 return em_store_sreg(ctxt, VCPU_SREG_TR);
3429 static int em_ltr(struct x86_emulate_ctxt *ctxt)
3431 u16 sel = ctxt->src.val;
3434 ctxt->dst.type = OP_NONE;
3435 return load_segment_descriptor(ctxt, sel, VCPU_SREG_TR);
3438 static int em_invlpg(struct x86_emulate_ctxt *ctxt)
3443 rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear);
3445 ctxt->ops->invlpg(ctxt, linear);
3447 ctxt->dst.type = OP_NONE;
3451 static int em_clts(struct x86_emulate_ctxt *ctxt)
3455 cr0 = ctxt->ops->get_cr(ctxt, 0);
3457 ctxt->ops->set_cr(ctxt, 0, cr0);
3461 static int em_hypercall(struct x86_emulate_ctxt *ctxt)
3463 int rc = ctxt->ops->fix_hypercall(ctxt);
3469 ctxt->_eip = ctxt->eip;
3471 ctxt->dst.type = OP_NONE;
3475 static int emulate_store_desc_ptr(struct x86_emulate_ctxt *ctxt,
3476 void (*get)(struct x86_emulate_ctxt *ctxt,
3481 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3482 ctxt->ops->cpl(ctxt) > 0)
3483 return emulate_gp(ctxt, 0);
3485 if (ctxt->mode == X86EMUL_MODE_PROT64)
3486 ctxt->op_bytes = 8;
3487 get(ctxt, &desc_ptr);
3488 if (ctxt->op_bytes == 2) {
3489 ctxt->op_bytes = 4;
3493 ctxt->dst.type = OP_NONE;
3494 return segmented_write_std(ctxt, ctxt->dst.addr.mem,
3495 &desc_ptr, 2 + ctxt->op_bytes);
3498 static int em_sgdt(struct x86_emulate_ctxt *ctxt)
3500 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt);
3503 static int em_sidt(struct x86_emulate_ctxt *ctxt)
3505 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt);
3508 static int em_lgdt_lidt(struct x86_emulate_ctxt *ctxt, bool lgdt)
3513 if (ctxt->mode == X86EMUL_MODE_PROT64)
3514 ctxt->op_bytes = 8;
3515 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
3517 ctxt->op_bytes);
3520 if (ctxt->mode == X86EMUL_MODE_PROT64 &&
3521 emul_is_noncanonical_address(desc_ptr.address, ctxt))
3522 return emulate_gp(ctxt, 0);
3524 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3526 ctxt->ops->set_idt(ctxt, &desc_ptr);
3528 ctxt->dst.type = OP_NONE;
3532 static int em_lgdt(struct x86_emulate_ctxt *ctxt)
3534 return em_lgdt_lidt(ctxt, true);
3537 static int em_lidt(struct x86_emulate_ctxt *ctxt)
3539 return em_lgdt_lidt(ctxt, false);
3542 static int em_smsw(struct x86_emulate_ctxt *ctxt)
3544 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3545 ctxt->ops->cpl(ctxt) > 0)
3546 return emulate_gp(ctxt, 0);
3548 if (ctxt->dst.type == OP_MEM)
3549 ctxt->dst.bytes = 2;
3550 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0);
3554 static int em_lmsw(struct x86_emulate_ctxt *ctxt)
3556 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul)
3557 | (ctxt->src.val & 0x0f));
3558 ctxt->dst.type = OP_NONE;
3562 static int em_loop(struct x86_emulate_ctxt *ctxt)
3566 register_address_increment(ctxt, VCPU_REGS_RCX, -1);
3567 if ((address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) != 0) &&
3568 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags)))
3569 rc = jmp_rel(ctxt, ctxt->src.val);
3574 static int em_jcxz(struct x86_emulate_ctxt *ctxt)
3578 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0)
3579 rc = jmp_rel(ctxt, ctxt->src.val);
3584 static int em_in(struct x86_emulate_ctxt *ctxt)
3586 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val,
3587 &ctxt->dst.val))
3593 static int em_out(struct x86_emulate_ctxt *ctxt)
3595 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val,
3596 &ctxt->src.val, 1);
3598 ctxt->dst.type = OP_NONE;
3602 static int em_cli(struct x86_emulate_ctxt *ctxt)
3604 if (emulator_bad_iopl(ctxt))
3605 return emulate_gp(ctxt, 0);
3607 ctxt->eflags &= ~X86_EFLAGS_IF;
3611 static int em_sti(struct x86_emulate_ctxt *ctxt)
3613 if (emulator_bad_iopl(ctxt))
3614 return emulate_gp(ctxt, 0);
3616 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI;
3617 ctxt->eflags |= X86_EFLAGS_IF;
3621 static int em_cpuid(struct x86_emulate_ctxt *ctxt)
3626 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
3628 ctxt->ops->cpl(ctxt)) {
3629 return emulate_gp(ctxt, 0);
3632 eax = reg_read(ctxt, VCPU_REGS_RAX);
3633 ecx = reg_read(ctxt, VCPU_REGS_RCX);
3634 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
3635 *reg_write(ctxt, VCPU_REGS_RAX) = eax;
3636 *reg_write(ctxt, VCPU_REGS_RBX) = ebx;
3637 *reg_write(ctxt, VCPU_REGS_RCX) = ecx;
3638 *reg_write(ctxt, VCPU_REGS_RDX) = edx;
3642 static int em_sahf(struct x86_emulate_ctxt *ctxt)
3648 flags &= *reg_rmw(ctxt, VCPU_REGS_RAX) >> 8;
3650 ctxt->eflags &= ~0xffUL;
3651 ctxt->eflags |= flags | X86_EFLAGS_FIXED;
3655 static int em_lahf(struct x86_emulate_ctxt *ctxt)
3657 *reg_rmw(ctxt, VCPU_REGS_RAX) &= ~0xff00UL;
3658 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8;
3662 static int em_bswap(struct x86_emulate_ctxt *ctxt)
3664 switch (ctxt->op_bytes) {
3667 asm("bswap %0" : "+r"(ctxt->dst.val));
3671 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val));
3677 static int em_clflush(struct x86_emulate_ctxt *ctxt)
3683 static int em_clflushopt(struct x86_emulate_ctxt *ctxt)
3689 static int em_movsxd(struct x86_emulate_ctxt *ctxt)
3691 ctxt->dst.val = (s32) ctxt->src.val;
3695 static int check_fxsr(struct x86_emulate_ctxt *ctxt)
3697 if (!ctxt->ops->guest_has_fxsr(ctxt))
3698 return emulate_ud(ctxt);
3700 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
3701 return emulate_nm(ctxt);
3707 if (ctxt->mode >= X86EMUL_MODE_PROT64)
3722 static inline size_t fxstate_size(struct x86_emulate_ctxt *ctxt)
3725 if (ctxt->mode == X86EMUL_MODE_PROT64)
3728 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR;
3750 static int em_fxsave(struct x86_emulate_ctxt *ctxt)
3755 rc = check_fxsr(ctxt);
3768 return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state,
3769 fxstate_size(ctxt));
3792 static int em_fxrstor(struct x86_emulate_ctxt *ctxt)
3798 rc = check_fxsr(ctxt);
3802 size = fxstate_size(ctxt);
3803 rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size);
3816 rc = emulate_gp(ctxt, 0);
3829 static int em_xsetbv(struct x86_emulate_ctxt *ctxt)
3833 if (!(ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSXSAVE))
3834 return emulate_ud(ctxt);
3836 eax = reg_read(ctxt, VCPU_REGS_RAX);
3837 edx = reg_read(ctxt, VCPU_REGS_RDX);
3838 ecx = reg_read(ctxt, VCPU_REGS_RCX);
3840 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax))
3841 return emulate_gp(ctxt, 0);
3858 static int check_cr_access(struct x86_emulate_ctxt *ctxt)
3860 if (!valid_cr(ctxt->modrm_reg))
3861 return emulate_ud(ctxt);
3866 static int check_dr7_gd(struct x86_emulate_ctxt *ctxt)
3870 ctxt->ops->get_dr(ctxt, 7, &dr7);
3875 static int check_dr_read(struct x86_emulate_ctxt *ctxt)
3877 int dr = ctxt->modrm_reg;
3881 return emulate_ud(ctxt);
3883 cr4 = ctxt->ops->get_cr(ctxt, 4);
3885 return emulate_ud(ctxt);
3887 if (check_dr7_gd(ctxt)) {
3890 ctxt->ops->get_dr(ctxt, 6, &dr6);
3893 ctxt->ops->set_dr(ctxt, 6, dr6);
3894 return emulate_db(ctxt);
3900 static int check_dr_write(struct x86_emulate_ctxt *ctxt)
3902 u64 new_val = ctxt->src.val64;
3903 int dr = ctxt->modrm_reg;
3906 return emulate_gp(ctxt, 0);
3908 return check_dr_read(ctxt);
3911 static int check_svme(struct x86_emulate_ctxt *ctxt)
3915 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
3918 return emulate_ud(ctxt);
3923 static int check_svme_pa(struct x86_emulate_ctxt *ctxt)
3925 u64 rax = reg_read(ctxt, VCPU_REGS_RAX);
3929 return emulate_gp(ctxt, 0);
3931 return check_svme(ctxt);
3934 static int check_rdtsc(struct x86_emulate_ctxt *ctxt)
3936 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
3938 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt))
3939 return emulate_gp(ctxt, 0);
3944 static int check_rdpmc(struct x86_emulate_ctxt *ctxt)
3946 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
3947 u64 rcx = reg_read(ctxt, VCPU_REGS_RCX);
3961 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) ||
3962 ctxt->ops->check_pmc(ctxt, rcx))
3963 return emulate_gp(ctxt, 0);
3968 static int check_perm_in(struct x86_emulate_ctxt *ctxt)
3970 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u);
3971 if (!emulator_io_permitted(ctxt, ctxt->src.val, ctxt->dst.bytes))
3972 return emulate_gp(ctxt, 0);
3977 static int check_perm_out(struct x86_emulate_ctxt *ctxt)
3979 ctxt->src.bytes = min(ctxt->src.bytes, 4u);
3980 if (!emulator_io_permitted(ctxt, ctxt->dst.val, ctxt->src.bytes))
3981 return emulate_gp(ctxt, 0);
4544 static unsigned imm_size(struct x86_emulate_ctxt *ctxt)
4548 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4554 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op,
4561 op->addr.mem.ea = ctxt->_eip;
4565 op->val = insn_fetch(s8, ctxt);
4568 op->val = insn_fetch(s16, ctxt);
4571 op->val = insn_fetch(s32, ctxt);
4574 op->val = insn_fetch(s64, ctxt);
4594 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op,
4601 decode_register_operand(ctxt, op);
4604 rc = decode_imm(ctxt, op, 1, false);
4607 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4609 *op = ctxt->memop;
4610 ctxt->memopp = op;
4611 if (ctxt->d & BitOp)
4612 fetch_bit_operand(ctxt);
4616 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8;
4620 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4621 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
4627 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes;
4628 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
4633 if (ctxt->d & ByteOp) {
4638 op->bytes = ctxt->op_bytes;
4639 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
4645 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4647 register_address(ctxt, VCPU_REGS_RDI);
4655 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
4661 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff;
4664 rc = decode_imm(ctxt, op, 1, true);
4672 rc = decode_imm(ctxt, op, imm_size(ctxt), true);
4675 rc = decode_imm(ctxt, op, ctxt->op_bytes, true);
4678 ctxt->memop.bytes = 1;
4679 if (ctxt->memop.type == OP_REG) {
4680 ctxt->memop.addr.reg = decode_register(ctxt,
4681 ctxt->modrm_rm, true);
4682 fetch_register_operand(&ctxt->memop);
4686 ctxt->memop.bytes = 2;
4689 ctxt->memop.bytes = 4;
4692 rc = decode_imm(ctxt, op, 2, false);
4695 rc = decode_imm(ctxt, op, imm_size(ctxt), false);
4699 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4701 register_address(ctxt, VCPU_REGS_RSI);
4702 op->addr.mem.seg = ctxt->seg_override;
4708 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4710 address_mask(ctxt,
4711 reg_read(ctxt, VCPU_REGS_RBX) +
4712 (reg_read(ctxt, VCPU_REGS_RAX) & 0xff));
4713 op->addr.mem.seg = ctxt->seg_override;
4718 op->addr.mem.ea = ctxt->_eip;
4719 op->bytes = ctxt->op_bytes + 2;
4720 insn_fetch_arr(op->valptr, op->bytes, ctxt);
4723 ctxt->memop.bytes = ctxt->op_bytes + 2;
4760 int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len, int emulation_type)
4763 int mode = ctxt->mode;
4771 ctxt->memop.type = OP_NONE;
4772 ctxt->memopp = NULL;
4773 ctxt->_eip = ctxt->eip;
4774 ctxt->fetch.ptr = ctxt->fetch.data;
4775 ctxt->fetch.end = ctxt->fetch.data + insn_len;
4776 ctxt->opcode_len = 1;
4777 ctxt->intercept = x86_intercept_none;
4779 memcpy(ctxt->fetch.data, insn, insn_len);
4781 rc = __do_insn_fetch_bytes(ctxt, 1);
4790 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS);
4810 ctxt->op_bytes = def_op_bytes;
4811 ctxt->ad_bytes = def_ad_bytes;
4815 switch (ctxt->b = insn_fetch(u8, ctxt)) {
4819 ctxt->op_bytes = def_op_bytes ^ 6;
4824 ctxt->ad_bytes = def_ad_bytes ^ 12;
4827 ctxt->ad_bytes = def_ad_bytes ^ 6;
4831 ctxt->seg_override = VCPU_SREG_ES;
4835 ctxt->seg_override = VCPU_SREG_CS;
4839 ctxt->seg_override = VCPU_SREG_SS;
4843 ctxt->seg_override = VCPU_SREG_DS;
4847 ctxt->seg_override = VCPU_SREG_FS;
4851 ctxt->seg_override = VCPU_SREG_GS;
4856 ctxt->rex_prefix = ctxt->b;
4859 ctxt->lock_prefix = 1;
4863 ctxt->rep_prefix = ctxt->b;
4871 ctxt->rex_prefix = 0;
4877 if (ctxt->rex_prefix & 8)
4878 ctxt->op_bytes = 8; /* REX.W */
4881 opcode = opcode_table[ctxt->b];
4883 if (ctxt->b == 0x0f) {
4884 ctxt->opcode_len = 2;
4885 ctxt->b = insn_fetch(u8, ctxt);
4886 opcode = twobyte_table[ctxt->b];
4889 if (ctxt->b == 0x38) {
4890 ctxt->opcode_len = 3;
4891 ctxt->b = insn_fetch(u8, ctxt);
4892 opcode = opcode_map_0f_38[ctxt->b];
4895 ctxt->d = opcode.flags;
4897 if (ctxt->d & ModRM)
4898 ctxt->modrm = insn_fetch(u8, ctxt);
4901 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) &&
4902 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) {
4903 ctxt->d = NotImpl;
4906 while (ctxt->d & GroupMask) {
4907 switch (ctxt->d & GroupMask) {
4909 goffset = (ctxt->modrm >> 3) & 7;
4913 goffset = (ctxt->modrm >> 3) & 7;
4914 if ((ctxt->modrm >> 6) == 3)
4920 goffset = ctxt->modrm & 7;
4924 if (ctxt->rep_prefix && op_prefix)
4926 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix;
4935 if (ctxt->modrm > 0xbf) {
4938 ctxt->modrm - 0xc0, size);
4942 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7];
4946 if ((ctxt->modrm >> 6) == 3)
4952 if (ctxt->mode == X86EMUL_MODE_PROT64)
4961 ctxt->d &= ~(u64)GroupMask;
4962 ctxt->d |= opcode.flags;
4965 ctxt->is_branch = opcode.flags & IsBranch;
4968 if (ctxt->d == 0)
4971 ctxt->execute = opcode.u.execute;
4974 likely(!(ctxt->d & EmulateOnUD)))
4977 if (unlikely(ctxt->d &
4984 ctxt->check_perm = opcode.check_perm;
4985 ctxt->intercept = opcode.intercept;
4987 if (ctxt->d & NotImpl)
4991 if (ctxt->op_bytes == 4 && (ctxt->d & Stack))
4992 ctxt->op_bytes = 8;
4993 else if (ctxt->d & NearBranch)
4994 ctxt->op_bytes = 8;
4997 if (ctxt->d & Op3264) {
4999 ctxt->op_bytes = 8;
5001 ctxt->op_bytes = 4;
5004 if ((ctxt->d & No16) && ctxt->op_bytes == 2)
5005 ctxt->op_bytes = 4;
5007 if (ctxt->d & Sse)
5008 ctxt->op_bytes = 16;
5009 else if (ctxt->d & Mmx)
5010 ctxt->op_bytes = 8;
5014 if (ctxt->d & ModRM) {
5015 rc = decode_modrm(ctxt, &ctxt->memop);
5018 ctxt->seg_override = ctxt->modrm_seg;
5020 } else if (ctxt->d & MemAbs)
5021 rc = decode_abs(ctxt, &ctxt->memop);
5026 ctxt->seg_override = VCPU_SREG_DS;
5028 ctxt->memop.addr.mem.seg = ctxt->seg_override;
5034 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask);
5042 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask);
5047 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask);
5049 if (ctxt->rip_relative && likely(ctxt->memopp))
5050 ctxt->memopp->addr.mem.ea = address_mask(ctxt,
5051 ctxt->memopp->addr.mem.ea + ctxt->_eip);
5055 ctxt->have_exception = true;
5059 bool x86_page_table_writing_insn(struct x86_emulate_ctxt *ctxt)
5061 return ctxt->d & PageTable;
5064 static bool string_insn_completed(struct x86_emulate_ctxt *ctxt)
5073 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) ||
5074 (ctxt->b == 0xae) || (ctxt->b == 0xaf))
5075 && (((ctxt->rep_prefix == REPE_PREFIX) &&
5076 ((ctxt->eflags & X86_EFLAGS_ZF) == 0))
5077 || ((ctxt->rep_prefix == REPNE_PREFIX) &&
5078 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF))))
5084 static int flush_pending_x87_faults(struct x86_emulate_ctxt *ctxt)
5093 return emulate_exception(ctxt, MF_VECTOR, 0, false);
5104 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop)
5106 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF;
5108 if (!(ctxt->d & ByteOp))
5109 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE;
5112 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags),
5114 : "c"(ctxt->src2.val));
5116 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK);
5118 return emulate_de(ctxt);
5122 void init_decode_cache(struct x86_emulate_ctxt *ctxt)
5125 ctxt->rip_relative = false;
5126 ctxt->rex_prefix = 0;
5127 ctxt->lock_prefix = 0;
5128 ctxt->rep_prefix = 0;
5129 ctxt->regs_valid = 0;
5130 ctxt->regs_dirty = 0;
5132 ctxt->io_read.pos = 0;
5133 ctxt->io_read.end = 0;
5134 ctxt->mem_read.end = 0;
5137 int x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
5139 const struct x86_emulate_ops *ops = ctxt->ops;
5141 int saved_dst_type = ctxt->dst.type;
5142 bool is_guest_mode = ctxt->ops->is_guest_mode(ctxt);
5144 ctxt->mem_read.pos = 0;
5147 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) {
5148 rc = emulate_ud(ctxt);
5152 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) {
5153 rc = emulate_ud(ctxt);
5157 if (unlikely(ctxt->d &
5159 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) ||
5160 (ctxt->d & Undefined)) {
5161 rc = emulate_ud(ctxt);
5165 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
5166 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
5167 rc = emulate_ud(ctxt);
5171 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
5172 rc = emulate_nm(ctxt);
5176 if (ctxt->d & Mmx) {
5177 rc = flush_pending_x87_faults(ctxt);
5184 fetch_possible_mmx_operand(&ctxt->src);
5185 fetch_possible_mmx_operand(&ctxt->src2);
5186 if (!(ctxt->d & Mov))
5187 fetch_possible_mmx_operand(&ctxt->dst);
5190 if (unlikely(is_guest_mode) && ctxt->intercept) {
5191 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5198 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) {
5199 rc = emulate_ud(ctxt);
5204 if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
5205 if (ctxt->d & PrivUD)
5206 rc = emulate_ud(ctxt);
5208 rc = emulate_gp(ctxt, 0);
5213 if (ctxt->d & CheckPerm) {
5214 rc = ctxt->check_perm(ctxt);
5219 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) {
5220 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5226 if (ctxt->rep_prefix && (ctxt->d & String)) {
5228 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) {
5229 string_registers_quirk(ctxt);
5230 ctxt->eip = ctxt->_eip;
5231 ctxt->eflags &= ~X86_EFLAGS_RF;
5237 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) {
5238 rc = segmented_read(ctxt, ctxt->src.addr.mem,
5239 ctxt->src.valptr, ctxt->src.bytes);
5242 ctxt->src.orig_val64 = ctxt->src.val64;
5245 if (ctxt->src2.type == OP_MEM) {
5246 rc = segmented_read(ctxt, ctxt->src2.addr.mem,
5247 &ctxt->src2.val, ctxt->src2.bytes);
5252 if ((ctxt->d & DstMask) == ImplicitOps)
5256 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) {
5258 rc = segmented_read(ctxt, ctxt->dst.addr.mem,
5259 &ctxt->dst.val, ctxt->dst.bytes);
5261 if (!(ctxt->d & NoWrite) &&
5263 ctxt->exception.vector == PF_VECTOR)
5264 ctxt->exception.error_code |= PFERR_WRITE_MASK;
5269 ctxt->dst.orig_val64 = ctxt->dst.val64;
5273 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) {
5274 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5280 if (ctxt->rep_prefix && (ctxt->d & String))
5281 ctxt->eflags |= X86_EFLAGS_RF;
5283 ctxt->eflags &= ~X86_EFLAGS_RF;
5285 if (ctxt->execute) {
5286 if (ctxt->d & Fastop)
5287 rc = fastop(ctxt, ctxt->fop);
5289 rc = ctxt->execute(ctxt);
5295 if (ctxt->opcode_len == 2)
5297 else if (ctxt->opcode_len == 3)
5300 switch (ctxt->b) {
5302 if (test_cc(ctxt->b, ctxt->eflags))
5303 rc = jmp_rel(ctxt, ctxt->src.val);
5306 ctxt->dst.val = ctxt->src.addr.mem.ea;
5309 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX))
5310 ctxt->dst.type = OP_NONE;
5312 rc = em_xchg(ctxt);
5315 switch (ctxt->op_bytes) {
5316 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break;
5317 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break;
5318 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break;
5322 rc = emulate_int(ctxt, 3);
5325 rc = emulate_int(ctxt, ctxt->src.val);
5328 if (ctxt->eflags & X86_EFLAGS_OF)
5329 rc = emulate_int(ctxt, 4);
5333 rc = jmp_rel(ctxt, ctxt->src.val);
5334 ctxt->dst.type = OP_NONE; /* Disable writeback. */
5337 ctxt->ops->halt(ctxt);
5341 ctxt->eflags ^= X86_EFLAGS_CF;
5344 ctxt->eflags &= ~X86_EFLAGS_CF;
5347 ctxt->eflags |= X86_EFLAGS_CF;
5350 ctxt->eflags &= ~X86_EFLAGS_DF;
5353 ctxt->eflags |= X86_EFLAGS_DF;
5363 if (ctxt->d & SrcWrite) {
5364 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR);
5365 rc = writeback(ctxt, &ctxt->src);
5369 if (!(ctxt->d & NoWrite)) {
5370 rc = writeback(ctxt, &ctxt->dst);
5379 ctxt->dst.type = saved_dst_type;
5381 if ((ctxt->d & SrcMask) == SrcSI)
5382 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src);
5384 if ((ctxt->d & DstMask) == DstDI)
5385 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst);
5387 if (ctxt->rep_prefix && (ctxt->d & String)) {
5389 struct read_cache *r = &ctxt->io_read;
5390 if ((ctxt->d & SrcMask) == SrcSI)
5391 count = ctxt->src.count;
5393 count = ctxt->dst.count;
5394 register_address_increment(ctxt, VCPU_REGS_RCX, -count);
5396 if (!string_insn_completed(ctxt)) {
5401 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) &&
5408 ctxt->mem_read.end = 0;
5409 writeback_registers(ctxt);
5414 ctxt->eflags &= ~X86_EFLAGS_RF;
5417 ctxt->eip = ctxt->_eip;
5418 if (ctxt->mode != X86EMUL_MODE_PROT64)
5419 ctxt->eip = (u32)ctxt->_eip;
5423 if (KVM_EMULATOR_BUG_ON(ctxt->exception.vector > 0x1f, ctxt))
5425 ctxt->have_exception = true;
5431 writeback_registers(ctxt);
5436 switch (ctxt->b) {
5438 (ctxt->ops->wbinvd)(ctxt);
5446 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg);
5449 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val);
5452 if (test_cc(ctxt->b, ctxt->eflags))
5453 ctxt->dst.val = ctxt->src.val;
5454 else if (ctxt->op_bytes != 4)
5455 ctxt->dst.type = OP_NONE; /* no writeback */
5458 if (test_cc(ctxt->b, ctxt->eflags))
5459 rc = jmp_rel(ctxt, ctxt->src.val);
5462 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags);
5465 ctxt->dst.bytes = ctxt->op_bytes;
5466 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val
5467 : (u16) ctxt->src.val;
5470 ctxt->dst.bytes = ctxt->op_bytes;
5471 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val :
5472 (s16) ctxt->src.val;
5489 void emulator_invalidate_register_cache(struct x86_emulate_ctxt *ctxt)
5491 invalidate_registers(ctxt);
5494 void emulator_writeback_register_cache(struct x86_emulate_ctxt *ctxt)
5496 writeback_registers(ctxt);
5499 bool emulator_can_use_gpa(struct x86_emulate_ctxt *ctxt)
5501 if (ctxt->rep_prefix && (ctxt->d & String))
5504 if (ctxt->d & TwoMemOp)