Lines Matching refs:ctxt
195 int (*execute)(struct x86_emulate_ctxt *ctxt);
204 int (*check_perm)(struct x86_emulate_ctxt *ctxt);
243 static ulong reg_read(struct x86_emulate_ctxt *ctxt, unsigned nr)
245 if (!(ctxt->regs_valid & (1 << nr))) {
246 ctxt->regs_valid |= 1 << nr;
247 ctxt->_regs[nr] = ctxt->ops->read_gpr(ctxt, nr);
249 return ctxt->_regs[nr];
252 static ulong *reg_write(struct x86_emulate_ctxt *ctxt, unsigned nr)
254 ctxt->regs_valid |= 1 << nr;
255 ctxt->regs_dirty |= 1 << nr;
256 return &ctxt->_regs[nr];
259 static ulong *reg_rmw(struct x86_emulate_ctxt *ctxt, unsigned nr)
261 reg_read(ctxt, nr);
262 return reg_write(ctxt, nr);
265 static void writeback_registers(struct x86_emulate_ctxt *ctxt)
269 for_each_set_bit(reg, (ulong *)&ctxt->regs_dirty, 16)
270 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]);
273 static void invalidate_registers(struct x86_emulate_ctxt *ctxt)
275 ctxt->regs_dirty = 0;
276 ctxt->regs_valid = 0;
309 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop);
503 static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt,
509 .rep_prefix = ctxt->rep_prefix,
510 .modrm_mod = ctxt->modrm_mod,
511 .modrm_reg = ctxt->modrm_reg,
512 .modrm_rm = ctxt->modrm_rm,
513 .src_val = ctxt->src.val64,
514 .dst_val = ctxt->dst.val64,
515 .src_bytes = ctxt->src.bytes,
516 .dst_bytes = ctxt->dst.bytes,
517 .ad_bytes = ctxt->ad_bytes,
518 .next_rip = ctxt->eip,
521 return ctxt->ops->intercept(ctxt, &info, stage);
548 static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt)
550 return (1UL << (ctxt->ad_bytes << 3)) - 1;
553 static ulong stack_mask(struct x86_emulate_ctxt *ctxt)
558 if (ctxt->mode == X86EMUL_MODE_PROT64)
560 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS);
564 static int stack_size(struct x86_emulate_ctxt *ctxt)
566 return (__fls(stack_mask(ctxt)) + 1) >> 3;
571 address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg)
573 if (ctxt->ad_bytes == sizeof(unsigned long))
576 return reg & ad_mask(ctxt);
580 register_address(struct x86_emulate_ctxt *ctxt, int reg)
582 return address_mask(ctxt, reg_read(ctxt, reg));
591 register_address_increment(struct x86_emulate_ctxt *ctxt, int reg, int inc)
593 ulong *preg = reg_rmw(ctxt, reg);
595 assign_register(preg, *preg + inc, ctxt->ad_bytes);
598 static void rsp_increment(struct x86_emulate_ctxt *ctxt, int inc)
600 masked_increment(reg_rmw(ctxt, VCPU_REGS_RSP), stack_mask(ctxt), inc);
610 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
612 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
615 return ctxt->ops->get_cached_segment_base(ctxt, seg);
618 static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec,
622 ctxt->exception.vector = vec;
623 ctxt->exception.error_code = error;
624 ctxt->exception.error_code_valid = valid;
628 static int emulate_db(struct x86_emulate_ctxt *ctxt)
630 return emulate_exception(ctxt, DB_VECTOR, 0, false);
633 static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err)
635 return emulate_exception(ctxt, GP_VECTOR, err, true);
638 static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err)
640 return emulate_exception(ctxt, SS_VECTOR, err, true);
643 static int emulate_ud(struct x86_emulate_ctxt *ctxt)
645 return emulate_exception(ctxt, UD_VECTOR, 0, false);
648 static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err)
650 return emulate_exception(ctxt, TS_VECTOR, err, true);
653 static int emulate_de(struct x86_emulate_ctxt *ctxt)
655 return emulate_exception(ctxt, DE_VECTOR, 0, false);
658 static int emulate_nm(struct x86_emulate_ctxt *ctxt)
660 return emulate_exception(ctxt, NM_VECTOR, 0, false);
663 static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg)
668 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
672 static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector,
679 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
680 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
683 static inline u8 ctxt_virt_addr_bits(struct x86_emulate_ctxt *ctxt)
685 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48;
689 struct x86_emulate_ctxt *ctxt)
691 return !__is_canonical_address(la, ctxt_virt_addr_bits(ctxt));
703 static unsigned insn_alignment(struct x86_emulate_ctxt *ctxt, unsigned size)
705 u64 alignment = ctxt->d & AlignMask;
722 static __always_inline int __linearize(struct x86_emulate_ctxt *ctxt,
735 la = seg_base(ctxt, addr.seg) + addr.ea;
740 va_bits = ctxt_virt_addr_bits(ctxt);
750 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
755 if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8))
779 if (la & (insn_alignment(ctxt, size) - 1))
780 return emulate_gp(ctxt, 0);
784 return emulate_ss(ctxt, 0);
786 return emulate_gp(ctxt, 0);
789 static int linearize(struct x86_emulate_ctxt *ctxt,
795 return __linearize(ctxt, addr, &max_size, size, write, false,
796 ctxt->mode, linear);
799 static inline int assign_eip(struct x86_emulate_ctxt *ctxt, ulong dst)
807 if (ctxt->op_bytes != sizeof(unsigned long))
808 addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1);
809 rc = __linearize(ctxt, addr, &max_size, 1, false, true, ctxt->mode, &linear);
811 ctxt->_eip = addr.ea;
815 static inline int emulator_recalc_and_set_mode(struct x86_emulate_ctxt *ctxt)
822 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
824 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) {
828 ctxt->mode = X86EMUL_MODE_REAL;
832 if (ctxt->eflags & X86_EFLAGS_VM) {
836 ctxt->mode = X86EMUL_MODE_VM86;
840 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS))
846 ctxt->mode = X86EMUL_MODE_PROT64;
849 ctxt->mode = X86EMUL_MODE_PROT32;
851 ctxt->mode = X86EMUL_MODE_PROT16;
855 ctxt->mode = cs.d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16;
861 static inline int assign_eip_near(struct x86_emulate_ctxt *ctxt, ulong dst)
863 return assign_eip(ctxt, dst);
866 static int assign_eip_far(struct x86_emulate_ctxt *ctxt, ulong dst)
868 int rc = emulator_recalc_and_set_mode(ctxt);
873 return assign_eip(ctxt, dst);
876 static inline int jmp_rel(struct x86_emulate_ctxt *ctxt, int rel)
878 return assign_eip_near(ctxt, ctxt->_eip + rel);
881 static int linear_read_system(struct x86_emulate_ctxt *ctxt, ulong linear,
884 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true);
887 static int linear_write_system(struct x86_emulate_ctxt *ctxt,
891 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true);
894 static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
902 rc = linearize(ctxt, addr, size, false, &linear);
905 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false);
908 static int segmented_write_std(struct x86_emulate_ctxt *ctxt,
916 rc = linearize(ctxt, addr, size, true, &linear);
919 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false);
926 static int __do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt, int op_size)
931 int cur_size = ctxt->fetch.end - ctxt->fetch.data;
933 .ea = ctxt->eip + cur_size };
945 rc = __linearize(ctxt, addr, &max_size, 0, false, true, ctxt->mode,
960 return emulate_gp(ctxt, 0);
962 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end,
963 size, &ctxt->exception);
966 ctxt->fetch.end += size;
970 static __always_inline int do_insn_fetch_bytes(struct x86_emulate_ctxt *ctxt,
973 unsigned done_size = ctxt->fetch.end - ctxt->fetch.ptr;
976 return __do_insn_fetch_bytes(ctxt, size - done_size);
988 ctxt->_eip += sizeof(_type); \
989 memcpy(&_x, ctxt->fetch.ptr, sizeof(_type)); \
990 ctxt->fetch.ptr += sizeof(_type); \
999 ctxt->_eip += (_size); \
1000 memcpy(_arr, ctxt->fetch.ptr, _size); \
1001 ctxt->fetch.ptr += (_size); \
1009 static void *decode_register(struct x86_emulate_ctxt *ctxt, u8 modrm_reg,
1013 int highbyte_regs = (ctxt->rex_prefix == 0) && byteop;
1016 p = (unsigned char *)reg_rmw(ctxt, modrm_reg & 3) + 1;
1018 p = reg_rmw(ctxt, modrm_reg);
1022 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
1031 rc = segmented_read_std(ctxt, addr, size, 2);
1035 rc = segmented_read_std(ctxt, addr, address, op_bytes);
1083 static int em_bsf_c(struct x86_emulate_ctxt *ctxt)
1086 if (ctxt->src.val == 0)
1087 ctxt->dst.type = OP_NONE;
1088 return fastop(ctxt, em_bsf);
1091 static int em_bsr_c(struct x86_emulate_ctxt *ctxt)
1094 if (ctxt->src.val == 0)
1095 ctxt->dst.type = OP_NONE;
1096 return fastop(ctxt, em_bsr);
1230 static int em_fninit(struct x86_emulate_ctxt *ctxt)
1232 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1233 return emulate_nm(ctxt);
1241 static int em_fnstcw(struct x86_emulate_ctxt *ctxt)
1245 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1246 return emulate_nm(ctxt);
1252 ctxt->dst.val = fcw;
1257 static int em_fnstsw(struct x86_emulate_ctxt *ctxt)
1261 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1262 return emulate_nm(ctxt);
1268 ctxt->dst.val = fsw;
1273 static void decode_register_operand(struct x86_emulate_ctxt *ctxt,
1276 unsigned reg = ctxt->modrm_reg;
1278 if (!(ctxt->d & ModRM))
1279 reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3);
1281 if (ctxt->d & Sse) {
1288 if (ctxt->d & Mmx) {
1297 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
1298 op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp);
1304 static void adjust_modrm_seg(struct x86_emulate_ctxt *ctxt, int base_reg)
1307 ctxt->modrm_seg = VCPU_SREG_SS;
1310 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
1318 ctxt->modrm_reg = ((ctxt->rex_prefix << 1) & 8); /* REX.R */
1319 index_reg = (ctxt->rex_prefix << 2) & 8; /* REX.X */
1320 base_reg = (ctxt->rex_prefix << 3) & 8; /* REX.B */
1322 ctxt->modrm_mod = (ctxt->modrm & 0xc0) >> 6;
1323 ctxt->modrm_reg |= (ctxt->modrm & 0x38) >> 3;
1324 ctxt->modrm_rm = base_reg | (ctxt->modrm & 0x07);
1325 ctxt->modrm_seg = VCPU_SREG_DS;
1327 if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) {
1329 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
1330 op->addr.reg = decode_register(ctxt, ctxt->modrm_rm,
1331 ctxt->d & ByteOp);
1332 if (ctxt->d & Sse) {
1335 op->addr.xmm = ctxt->modrm_rm;
1336 read_sse_reg(&op->vec_val, ctxt->modrm_rm);
1339 if (ctxt->d & Mmx) {
1342 op->addr.mm = ctxt->modrm_rm & 7;
1351 if (ctxt->ad_bytes == 2) {
1352 unsigned bx = reg_read(ctxt, VCPU_REGS_RBX);
1353 unsigned bp = reg_read(ctxt, VCPU_REGS_RBP);
1354 unsigned si = reg_read(ctxt, VCPU_REGS_RSI);
1355 unsigned di = reg_read(ctxt, VCPU_REGS_RDI);
1358 switch (ctxt->modrm_mod) {
1360 if (ctxt->modrm_rm == 6)
1361 modrm_ea += insn_fetch(u16, ctxt);
1364 modrm_ea += insn_fetch(s8, ctxt);
1367 modrm_ea += insn_fetch(u16, ctxt);
1370 switch (ctxt->modrm_rm) {
1390 if (ctxt->modrm_mod != 0)
1397 if (ctxt->modrm_rm == 2 || ctxt->modrm_rm == 3 ||
1398 (ctxt->modrm_rm == 6 && ctxt->modrm_mod != 0))
1399 ctxt->modrm_seg = VCPU_SREG_SS;
1403 if ((ctxt->modrm_rm & 7) == 4) {
1404 sib = insn_fetch(u8, ctxt);
1409 if ((base_reg & 7) == 5 && ctxt->modrm_mod == 0)
1410 modrm_ea += insn_fetch(s32, ctxt);
1412 modrm_ea += reg_read(ctxt, base_reg);
1413 adjust_modrm_seg(ctxt, base_reg);
1415 if ((ctxt->d & IncSP) &&
1417 modrm_ea += ctxt->op_bytes;
1420 modrm_ea += reg_read(ctxt, index_reg) << scale;
1421 } else if ((ctxt->modrm_rm & 7) == 5 && ctxt->modrm_mod == 0) {
1422 modrm_ea += insn_fetch(s32, ctxt);
1423 if (ctxt->mode == X86EMUL_MODE_PROT64)
1424 ctxt->rip_relative = 1;
1426 base_reg = ctxt->modrm_rm;
1427 modrm_ea += reg_read(ctxt, base_reg);
1428 adjust_modrm_seg(ctxt, base_reg);
1430 switch (ctxt->modrm_mod) {
1432 modrm_ea += insn_fetch(s8, ctxt);
1435 modrm_ea += insn_fetch(s32, ctxt);
1440 if (ctxt->ad_bytes != 8)
1441 ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea;
1447 static int decode_abs(struct x86_emulate_ctxt *ctxt,
1453 switch (ctxt->ad_bytes) {
1455 op->addr.mem.ea = insn_fetch(u16, ctxt);
1458 op->addr.mem.ea = insn_fetch(u32, ctxt);
1461 op->addr.mem.ea = insn_fetch(u64, ctxt);
1468 static void fetch_bit_operand(struct x86_emulate_ctxt *ctxt)
1472 if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) {
1473 mask = ~((long)ctxt->dst.bytes * 8 - 1);
1475 if (ctxt->src.bytes == 2)
1476 sv = (s16)ctxt->src.val & (s16)mask;
1477 else if (ctxt->src.bytes == 4)
1478 sv = (s32)ctxt->src.val & (s32)mask;
1480 sv = (s64)ctxt->src.val & (s64)mask;
1482 ctxt->dst.addr.mem.ea = address_mask(ctxt,
1483 ctxt->dst.addr.mem.ea + (sv >> 3));
1487 ctxt->src.val &= (ctxt->dst.bytes << 3) - 1;
1490 static int read_emulated(struct x86_emulate_ctxt *ctxt,
1494 struct read_cache *mc = &ctxt->mem_read;
1501 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size,
1502 &ctxt->exception);
1514 static int segmented_read(struct x86_emulate_ctxt *ctxt,
1522 rc = linearize(ctxt, addr, size, false, &linear);
1525 return read_emulated(ctxt, linear, data, size);
1528 static int segmented_write(struct x86_emulate_ctxt *ctxt,
1536 rc = linearize(ctxt, addr, size, true, &linear);
1539 return ctxt->ops->write_emulated(ctxt, linear, data, size,
1540 &ctxt->exception);
1543 static int segmented_cmpxchg(struct x86_emulate_ctxt *ctxt,
1551 rc = linearize(ctxt, addr, size, true, &linear);
1554 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data,
1555 size, &ctxt->exception);
1558 static int pio_in_emulated(struct x86_emulate_ctxt *ctxt,
1562 struct read_cache *rc = &ctxt->io_read;
1566 unsigned int count = ctxt->rep_prefix ?
1567 address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) : 1;
1568 in_page = (ctxt->eflags & X86_EFLAGS_DF) ?
1569 offset_in_page(reg_read(ctxt, VCPU_REGS_RDI)) :
1570 PAGE_SIZE - offset_in_page(reg_read(ctxt, VCPU_REGS_RDI));
1575 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n))
1580 if (ctxt->rep_prefix && (ctxt->d & String) &&
1581 !(ctxt->eflags & X86_EFLAGS_DF)) {
1582 ctxt->dst.data = rc->data + rc->pos;
1583 ctxt->dst.type = OP_MEM_STR;
1584 ctxt->dst.count = (rc->end - rc->pos) / size;
1593 static int read_interrupt_descriptor(struct x86_emulate_ctxt *ctxt,
1599 ctxt->ops->get_idt(ctxt, &dt);
1602 return emulate_gp(ctxt, index << 3 | 0x2);
1605 return linear_read_system(ctxt, addr, desc, sizeof(*desc));
1608 static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt,
1611 const struct x86_emulate_ops *ops = ctxt->ops;
1619 if (!ops->get_segment(ctxt, &sel, &desc, &base3,
1626 ops->get_gdt(ctxt, dt);
1629 static int get_descriptor_ptr(struct x86_emulate_ctxt *ctxt,
1636 get_descriptor_table_ptr(ctxt, selector, &dt);
1639 return emulate_gp(ctxt, selector & 0xfffc);
1647 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1658 static int read_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1664 rc = get_descriptor_ptr(ctxt, selector, desc_addr_p);
1668 return linear_read_system(ctxt, *desc_addr_p, desc, sizeof(*desc));
1672 static int write_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1678 rc = get_descriptor_ptr(ctxt, selector, &addr);
1682 return linear_write_system(ctxt, addr, desc, sizeof(*desc));
1685 static int __load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1702 if (ctxt->mode == X86EMUL_MODE_REAL) {
1705 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg);
1708 } else if (seg <= VCPU_SREG_GS && ctxt->mode == X86EMUL_MODE_VM86) {
1731 if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl)
1735 * ctxt->ops->set_segment expects the CPL to be in
1750 ret = read_segment_descriptor(ctxt, selector, &seg_desc, &desc_addr);
1793 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1831 ret = write_segment_descriptor(ctxt, selector,
1836 } else if (ctxt->mode == X86EMUL_MODE_PROT64) {
1837 ret = linear_read_system(ctxt, desc_addr+8, &base3, sizeof(base3));
1841 ((u64)base3 << 32), ctxt))
1842 return emulate_gp(ctxt, err_code);
1848 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc,
1849 sizeof(seg_desc), &ctxt->exception);
1854 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1859 return emulate_exception(ctxt, err_vec, err_code, true);
1862 static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt,
1865 u8 cpl = ctxt->ops->cpl(ctxt);
1878 ctxt->mode == X86EMUL_MODE_PROT64)
1879 return emulate_exception(ctxt, GP_VECTOR, 0, true);
1881 return __load_segment_descriptor(ctxt, selector, seg, cpl,
1890 static int writeback(struct x86_emulate_ctxt *ctxt, struct operand *op)
1897 if (ctxt->lock_prefix)
1898 return segmented_cmpxchg(ctxt,
1904 return segmented_write(ctxt,
1910 return segmented_write(ctxt,
1930 static int push(struct x86_emulate_ctxt *ctxt, void *data, int bytes)
1934 rsp_increment(ctxt, -bytes);
1935 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt);
1938 return segmented_write(ctxt, addr, data, bytes);
1941 static int em_push(struct x86_emulate_ctxt *ctxt)
1944 ctxt->dst.type = OP_NONE;
1945 return push(ctxt, &ctxt->src.val, ctxt->op_bytes);
1948 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1954 addr.ea = reg_read(ctxt, VCPU_REGS_RSP) & stack_mask(ctxt);
1956 rc = segmented_read(ctxt, addr, dest, len);
1960 rsp_increment(ctxt, len);
1964 static int em_pop(struct x86_emulate_ctxt *ctxt)
1966 return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes);
1969 static int emulate_popf(struct x86_emulate_ctxt *ctxt,
1974 int iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT;
1975 int cpl = ctxt->ops->cpl(ctxt);
1977 rc = emulate_pop(ctxt, &val, len);
1986 switch(ctxt->mode) {
1997 return emulate_gp(ctxt, 0);
2006 (ctxt->eflags & ~change_mask) | (val & change_mask);
2011 static int em_popf(struct x86_emulate_ctxt *ctxt)
2013 ctxt->dst.type = OP_REG;
2014 ctxt->dst.addr.reg = &ctxt->eflags;
2015 ctxt->dst.bytes = ctxt->op_bytes;
2016 return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes);
2019 static int em_enter(struct x86_emulate_ctxt *ctxt)
2022 unsigned frame_size = ctxt->src.val;
2023 unsigned nesting_level = ctxt->src2.val & 31;
2029 rbp = reg_read(ctxt, VCPU_REGS_RBP);
2030 rc = push(ctxt, &rbp, stack_size(ctxt));
2033 assign_masked(reg_rmw(ctxt, VCPU_REGS_RBP), reg_read(ctxt, VCPU_REGS_RSP),
2034 stack_mask(ctxt));
2035 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP),
2036 reg_read(ctxt, VCPU_REGS_RSP) - frame_size,
2037 stack_mask(ctxt));
2041 static int em_leave(struct x86_emulate_ctxt *ctxt)
2043 assign_masked(reg_rmw(ctxt, VCPU_REGS_RSP), reg_read(ctxt, VCPU_REGS_RBP),
2044 stack_mask(ctxt));
2045 return emulate_pop(ctxt, reg_rmw(ctxt, VCPU_REGS_RBP), ctxt->op_bytes);
2048 static int em_push_sreg(struct x86_emulate_ctxt *ctxt)
2050 int seg = ctxt->src2.val;
2052 ctxt->src.val = get_segment_selector(ctxt, seg);
2053 if (ctxt->op_bytes == 4) {
2054 rsp_increment(ctxt, -2);
2055 ctxt->op_bytes = 2;
2058 return em_push(ctxt);
2061 static int em_pop_sreg(struct x86_emulate_ctxt *ctxt)
2063 int seg = ctxt->src2.val;
2067 rc = emulate_pop(ctxt, &selector, 2);
2072 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
2073 if (ctxt->op_bytes > 2)
2074 rsp_increment(ctxt, ctxt->op_bytes - 2);
2076 rc = load_segment_descriptor(ctxt, (u16)selector, seg);
2080 static int em_pusha(struct x86_emulate_ctxt *ctxt)
2082 unsigned long old_esp = reg_read(ctxt, VCPU_REGS_RSP);
2088 (ctxt->src.val = old_esp) : (ctxt->src.val = reg_read(ctxt, reg));
2090 rc = em_push(ctxt);
2100 static int em_pushf(struct x86_emulate_ctxt *ctxt)
2102 ctxt->src.val = (unsigned long)ctxt->eflags & ~X86_EFLAGS_VM;
2103 return em_push(ctxt);
2106 static int em_popa(struct x86_emulate_ctxt *ctxt)
2114 rsp_increment(ctxt, ctxt->op_bytes);
2118 rc = emulate_pop(ctxt, &val, ctxt->op_bytes);
2121 assign_register(reg_rmw(ctxt, reg), val, ctxt->op_bytes);
2127 static int __emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
2129 const struct x86_emulate_ops *ops = ctxt->ops;
2137 ctxt->src.val = ctxt->eflags;
2138 rc = em_push(ctxt);
2142 ctxt->eflags &= ~(X86_EFLAGS_IF | X86_EFLAGS_TF | X86_EFLAGS_AC);
2144 ctxt->src.val = get_segment_selector(ctxt, VCPU_SREG_CS);
2145 rc = em_push(ctxt);
2149 ctxt->src.val = ctxt->_eip;
2150 rc = em_push(ctxt);
2154 ops->get_idt(ctxt, &dt);
2159 rc = linear_read_system(ctxt, cs_addr, &cs, 2);
2163 rc = linear_read_system(ctxt, eip_addr, &eip, 2);
2167 rc = load_segment_descriptor(ctxt, cs, VCPU_SREG_CS);
2171 ctxt->_eip = eip;
2176 int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
2180 invalidate_registers(ctxt);
2181 rc = __emulate_int_real(ctxt, irq);
2183 writeback_registers(ctxt);
2187 static int emulate_int(struct x86_emulate_ctxt *ctxt, int irq)
2189 switch(ctxt->mode) {
2191 return __emulate_int_real(ctxt, irq);
2202 static int emulate_iret_real(struct x86_emulate_ctxt *ctxt)
2219 rc = emulate_pop(ctxt, &temp_eip, ctxt->op_bytes);
2225 return emulate_gp(ctxt, 0);
2227 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
2232 rc = emulate_pop(ctxt, &temp_eflags, ctxt->op_bytes);
2237 rc = load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS);
2242 ctxt->_eip = temp_eip;
2244 if (ctxt->op_bytes == 4)
2245 ctxt->eflags = ((temp_eflags & mask) | (ctxt->eflags & vm86_mask));
2246 else if (ctxt->op_bytes == 2) {
2247 ctxt->eflags &= ~0xffff;
2248 ctxt->eflags |= temp_eflags;
2251 ctxt->eflags &= ~EFLG_RESERVED_ZEROS_MASK; /* Clear reserved zeros */
2252 ctxt->eflags |= X86_EFLAGS_FIXED;
2253 ctxt->ops->set_nmi_mask(ctxt, false);
2258 static int em_iret(struct x86_emulate_ctxt *ctxt)
2260 switch(ctxt->mode) {
2262 return emulate_iret_real(ctxt);
2273 static int em_jmp_far(struct x86_emulate_ctxt *ctxt)
2278 u8 cpl = ctxt->ops->cpl(ctxt);
2280 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
2282 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl,
2288 rc = assign_eip_far(ctxt, ctxt->src.val);
2296 static int em_jmp_abs(struct x86_emulate_ctxt *ctxt)
2298 return assign_eip_near(ctxt, ctxt->src.val);
2301 static int em_call_near_abs(struct x86_emulate_ctxt *ctxt)
2306 old_eip = ctxt->_eip;
2307 rc = assign_eip_near(ctxt, ctxt->src.val);
2310 ctxt->src.val = old_eip;
2311 rc = em_push(ctxt);
2315 static int em_cmpxchg8b(struct x86_emulate_ctxt *ctxt)
2317 u64 old = ctxt->dst.orig_val64;
2319 if (ctxt->dst.bytes == 16)
2322 if (((u32) (old >> 0) != (u32) reg_read(ctxt, VCPU_REGS_RAX)) ||
2323 ((u32) (old >> 32) != (u32) reg_read(ctxt, VCPU_REGS_RDX))) {
2324 *reg_write(ctxt, VCPU_REGS_RAX) = (u32) (old >> 0);
2325 *reg_write(ctxt, VCPU_REGS_RDX) = (u32) (old >> 32);
2326 ctxt->eflags &= ~X86_EFLAGS_ZF;
2328 ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) |
2329 (u32) reg_read(ctxt, VCPU_REGS_RBX);
2331 ctxt->eflags |= X86_EFLAGS_ZF;
2336 static int em_ret(struct x86_emulate_ctxt *ctxt)
2341 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
2345 return assign_eip_near(ctxt, eip);
2348 static int em_ret_far(struct x86_emulate_ctxt *ctxt)
2352 int cpl = ctxt->ops->cpl(ctxt);
2355 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
2358 rc = emulate_pop(ctxt, &cs, ctxt->op_bytes);
2362 if (ctxt->mode >= X86EMUL_MODE_PROT16 && (cs & 3) > cpl)
2364 rc = __load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS, cpl,
2369 rc = assign_eip_far(ctxt, eip);
2377 static int em_ret_far_imm(struct x86_emulate_ctxt *ctxt)
2381 rc = em_ret_far(ctxt);
2384 rsp_increment(ctxt, ctxt->src.val);
2388 static int em_cmpxchg(struct x86_emulate_ctxt *ctxt)
2391 ctxt->dst.orig_val = ctxt->dst.val;
2392 ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX);
2393 ctxt->src.orig_val = ctxt->src.val;
2394 ctxt->src.val = ctxt->dst.orig_val;
2395 fastop(ctxt, em_cmp);
2397 if (ctxt->eflags & X86_EFLAGS_ZF) {
2399 ctxt->src.type = OP_NONE;
2400 ctxt->dst.val = ctxt->src.orig_val;
2403 ctxt->src.type = OP_REG;
2404 ctxt->src.addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
2405 ctxt->src.val = ctxt->dst.orig_val;
2407 ctxt->dst.val = ctxt->dst.orig_val;
2412 static int em_lseg(struct x86_emulate_ctxt *ctxt)
2414 int seg = ctxt->src2.val;
2418 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
2420 rc = load_segment_descriptor(ctxt, sel, seg);
2424 ctxt->dst.val = ctxt->src.val;
2428 static int emulator_has_longmode(struct x86_emulate_ctxt *ctxt)
2431 return ctxt->ops->guest_has_long_mode(ctxt);
2449 static int rsm_load_seg_32(struct x86_emulate_ctxt *ctxt, const char *smstate,
2466 ctxt->ops->set_segment(ctxt, selector, &desc, 0, n);
2471 static int rsm_load_seg_64(struct x86_emulate_ctxt *ctxt, const char *smstate,
2487 ctxt->ops->set_segment(ctxt, selector, &desc, base3, n);
2492 static int rsm_enter_protected_mode(struct x86_emulate_ctxt *ctxt,
2505 bad = ctxt->ops->set_cr(ctxt, 3, cr3);
2514 bad = ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE);
2518 bad = ctxt->ops->set_cr(ctxt, 0, cr0);
2523 bad = ctxt->ops->set_cr(ctxt, 4, cr4);
2527 bad = ctxt->ops->set_cr(ctxt, 3, cr3 | pcid);
2537 static int rsm_load_state_32(struct x86_emulate_ctxt *ctxt,
2548 ctxt->eflags = GET_SMSTATE(u32, smstate, 0x7ff4) | X86_EFLAGS_FIXED;
2549 ctxt->_eip = GET_SMSTATE(u32, smstate, 0x7ff0);
2552 *reg_write(ctxt, i) = GET_SMSTATE(u32, smstate, 0x7fd0 + i * 4);
2556 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1))
2561 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1))
2568 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_TR);
2574 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_LDTR);
2578 ctxt->ops->set_gdt(ctxt, &dt);
2582 ctxt->ops->set_idt(ctxt, &dt);
2585 int r = rsm_load_seg_32(ctxt, smstate, i);
2592 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7ef8));
2594 return rsm_enter_protected_mode(ctxt, cr0, cr3, cr4);
2598 static int rsm_load_state_64(struct x86_emulate_ctxt *ctxt,
2609 *reg_write(ctxt, i) = GET_SMSTATE(u64, smstate, 0x7ff8 - i * 8);
2611 ctxt->_eip = GET_SMSTATE(u64, smstate, 0x7f78);
2612 ctxt->eflags = GET_SMSTATE(u32, smstate, 0x7f70) | X86_EFLAGS_FIXED;
2616 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1))
2621 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1))
2627 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7f00));
2630 if (ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA))
2638 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_TR);
2642 ctxt->ops->set_idt(ctxt, &dt);
2649 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_LDTR);
2653 ctxt->ops->set_gdt(ctxt, &dt);
2655 r = rsm_enter_protected_mode(ctxt, cr0, cr3, cr4);
2660 r = rsm_load_seg_64(ctxt, smstate, i);
2669 static int em_rsm(struct x86_emulate_ctxt *ctxt)
2676 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_MASK) == 0)
2677 return emulate_ud(ctxt);
2679 smbase = ctxt->ops->get_smbase(ctxt);
2681 ret = ctxt->ops->read_phys(ctxt, smbase + 0xfe00, buf, sizeof(buf));
2685 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_INSIDE_NMI_MASK) == 0)
2686 ctxt->ops->set_nmi_mask(ctxt, false);
2688 ctxt->ops->set_hflags(ctxt, ctxt->ops->get_hflags(ctxt) &
2696 if (emulator_has_longmode(ctxt)) {
2700 cr4 = ctxt->ops->get_cr(ctxt, 4);
2702 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE);
2708 ctxt->ops->set_segment(ctxt, 0, &cs_desc, 0, VCPU_SREG_CS);
2712 cr0 = ctxt->ops->get_cr(ctxt, 0);
2714 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE));
2716 if (emulator_has_longmode(ctxt)) {
2718 cr4 = ctxt->ops->get_cr(ctxt, 4);
2720 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PAE);
2724 ctxt->ops->set_msr(ctxt, MSR_EFER, efer);
2732 if (ctxt->ops->pre_leave_smm(ctxt, buf))
2736 if (emulator_has_longmode(ctxt))
2737 ret = rsm_load_state_64(ctxt, buf);
2740 ret = rsm_load_state_32(ctxt, buf);
2747 ctxt->ops->post_leave_smm(ctxt);
2753 setup_syscalls_segments(struct x86_emulate_ctxt *ctxt,
2779 static bool vendor_intel(struct x86_emulate_ctxt *ctxt)
2784 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2788 static bool em_syscall_is_enabled(struct x86_emulate_ctxt *ctxt)
2790 const struct x86_emulate_ops *ops = ctxt->ops;
2797 if (ctxt->mode == X86EMUL_MODE_PROT64)
2802 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2823 static int em_syscall(struct x86_emulate_ctxt *ctxt)
2825 const struct x86_emulate_ops *ops = ctxt->ops;
2832 if (ctxt->mode == X86EMUL_MODE_REAL ||
2833 ctxt->mode == X86EMUL_MODE_VM86)
2834 return emulate_ud(ctxt);
2836 if (!(em_syscall_is_enabled(ctxt)))
2837 return emulate_ud(ctxt);
2839 ops->get_msr(ctxt, MSR_EFER, &efer);
2841 return emulate_ud(ctxt);
2843 setup_syscalls_segments(ctxt, &cs, &ss);
2844 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2853 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2854 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2856 *reg_write(ctxt, VCPU_REGS_RCX) = ctxt->_eip;
2859 *reg_write(ctxt, VCPU_REGS_R11) = ctxt->eflags;
2861 ops->get_msr(ctxt,
2862 ctxt->mode == X86EMUL_MODE_PROT64 ?
2864 ctxt->_eip = msr_data;
2866 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
2867 ctxt->eflags &= ~msr_data;
2868 ctxt->eflags |= X86_EFLAGS_FIXED;
2872 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2873 ctxt->_eip = (u32)msr_data;
2875 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF);
2878 ctxt->tf = (ctxt->eflags & X86_EFLAGS_TF) != 0;
2882 static int em_sysenter(struct x86_emulate_ctxt *ctxt)
2884 const struct x86_emulate_ops *ops = ctxt->ops;
2890 ops->get_msr(ctxt, MSR_EFER, &efer);
2892 if (ctxt->mode == X86EMUL_MODE_REAL)
2893 return emulate_gp(ctxt, 0);
2899 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA)
2900 && !vendor_intel(ctxt))
2901 return emulate_ud(ctxt);
2904 if (ctxt->mode == X86EMUL_MODE_PROT64)
2907 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2909 return emulate_gp(ctxt, 0);
2911 setup_syscalls_segments(ctxt, &cs, &ss);
2912 ctxt->eflags &= ~(X86_EFLAGS_VM | X86_EFLAGS_IF);
2920 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2921 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2923 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
2924 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data;
2926 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
2927 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data :
2930 ctxt->mode = X86EMUL_MODE_PROT64;
2935 static int em_sysexit(struct x86_emulate_ctxt *ctxt)
2937 const struct x86_emulate_ops *ops = ctxt->ops;
2944 if (ctxt->mode == X86EMUL_MODE_REAL ||
2945 ctxt->mode == X86EMUL_MODE_VM86)
2946 return emulate_gp(ctxt, 0);
2948 setup_syscalls_segments(ctxt, &cs, &ss);
2950 if ((ctxt->rex_prefix & 0x8) != 0x0)
2955 rcx = reg_read(ctxt, VCPU_REGS_RCX);
2956 rdx = reg_read(ctxt, VCPU_REGS_RDX);
2960 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2965 return emulate_gp(ctxt, 0);
2973 return emulate_gp(ctxt, 0);
2977 if (emul_is_noncanonical_address(rcx, ctxt) ||
2978 emul_is_noncanonical_address(rdx, ctxt))
2979 return emulate_gp(ctxt, 0);
2985 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2986 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2988 ctxt->_eip = rdx;
2989 ctxt->mode = usermode;
2990 *reg_write(ctxt, VCPU_REGS_RSP) = rcx;
2995 static bool emulator_bad_iopl(struct x86_emulate_ctxt *ctxt)
2998 if (ctxt->mode == X86EMUL_MODE_REAL)
3000 if (ctxt->mode == X86EMUL_MODE_VM86)
3002 iopl = (ctxt->eflags & X86_EFLAGS_IOPL) >> X86_EFLAGS_IOPL_BIT;
3003 return ctxt->ops->cpl(ctxt) > iopl;
3009 static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt,
3012 const struct x86_emulate_ops *ops = ctxt->ops;
3028 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR);
3037 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true);
3042 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true);
3050 static bool emulator_io_permited(struct x86_emulate_ctxt *ctxt,
3053 if (ctxt->perm_ok)
3056 if (emulator_bad_iopl(ctxt))
3057 if (!emulator_io_port_access_allowed(ctxt, port, len))
3060 ctxt->perm_ok = true;
3065 static void string_registers_quirk(struct x86_emulate_ctxt *ctxt)
3072 if (ctxt->ad_bytes != 4 || !vendor_intel(ctxt))
3075 *reg_write(ctxt, VCPU_REGS_RCX) = 0;
3077 switch (ctxt->b) {
3080 *reg_rmw(ctxt, VCPU_REGS_RSI) &= (u32)-1;
3084 *reg_rmw(ctxt, VCPU_REGS_RDI) &= (u32)-1;
3089 static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt,
3092 tss->ip = ctxt->_eip;
3093 tss->flag = ctxt->eflags;
3094 tss->ax = reg_read(ctxt, VCPU_REGS_RAX);
3095 tss->cx = reg_read(ctxt, VCPU_REGS_RCX);
3096 tss->dx = reg_read(ctxt, VCPU_REGS_RDX);
3097 tss->bx = reg_read(ctxt, VCPU_REGS_RBX);
3098 tss->sp = reg_read(ctxt, VCPU_REGS_RSP);
3099 tss->bp = reg_read(ctxt, VCPU_REGS_RBP);
3100 tss->si = reg_read(ctxt, VCPU_REGS_RSI);
3101 tss->di = reg_read(ctxt, VCPU_REGS_RDI);
3103 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
3104 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
3105 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
3106 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
3107 tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR);
3110 static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt,
3116 ctxt->_eip = tss->ip;
3117 ctxt->eflags = tss->flag | 2;
3118 *reg_write(ctxt, VCPU_REGS_RAX) = tss->ax;
3119 *reg_write(ctxt, VCPU_REGS_RCX) = tss->cx;
3120 *reg_write(ctxt, VCPU_REGS_RDX) = tss->dx;
3121 *reg_write(ctxt, VCPU_REGS_RBX) = tss->bx;
3122 *reg_write(ctxt, VCPU_REGS_RSP) = tss->sp;
3123 *reg_write(ctxt, VCPU_REGS_RBP) = tss->bp;
3124 *reg_write(ctxt, VCPU_REGS_RSI) = tss->si;
3125 *reg_write(ctxt, VCPU_REGS_RDI) = tss->di;
3131 set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR);
3132 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
3133 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
3134 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
3135 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
3143 ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl,
3147 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl,
3151 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl,
3155 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl,
3159 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl,
3167 static int task_switch_16(struct x86_emulate_ctxt *ctxt,
3175 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
3179 save_state_to_tss16(ctxt, &tss_seg);
3181 ret = linear_write_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
3185 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg));
3192 ret = linear_write_system(ctxt, new_tss_base,
3199 return load_state_from_tss16(ctxt, &tss_seg);
3202 static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt,
3206 tss->eip = ctxt->_eip;
3207 tss->eflags = ctxt->eflags;
3208 tss->eax = reg_read(ctxt, VCPU_REGS_RAX);
3209 tss->ecx = reg_read(ctxt, VCPU_REGS_RCX);
3210 tss->edx = reg_read(ctxt, VCPU_REGS_RDX);
3211 tss->ebx = reg_read(ctxt, VCPU_REGS_RBX);
3212 tss->esp = reg_read(ctxt, VCPU_REGS_RSP);
3213 tss->ebp = reg_read(ctxt, VCPU_REGS_RBP);
3214 tss->esi = reg_read(ctxt, VCPU_REGS_RSI);
3215 tss->edi = reg_read(ctxt, VCPU_REGS_RDI);
3217 tss->es = get_segment_selector(ctxt, VCPU_SREG_ES);
3218 tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS);
3219 tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS);
3220 tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS);
3221 tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS);
3222 tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS);
3225 static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt,
3231 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
3232 return emulate_gp(ctxt, 0);
3233 ctxt->_eip = tss->eip;
3234 ctxt->eflags = tss->eflags | 2;
3237 *reg_write(ctxt, VCPU_REGS_RAX) = tss->eax;
3238 *reg_write(ctxt, VCPU_REGS_RCX) = tss->ecx;
3239 *reg_write(ctxt, VCPU_REGS_RDX) = tss->edx;
3240 *reg_write(ctxt, VCPU_REGS_RBX) = tss->ebx;
3241 *reg_write(ctxt, VCPU_REGS_RSP) = tss->esp;
3242 *reg_write(ctxt, VCPU_REGS_RBP) = tss->ebp;
3243 *reg_write(ctxt, VCPU_REGS_RSI) = tss->esi;
3244 *reg_write(ctxt, VCPU_REGS_RDI) = tss->edi;
3251 set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR);
3252 set_segment_selector(ctxt, tss->es, VCPU_SREG_ES);
3253 set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS);
3254 set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS);
3255 set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS);
3256 set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS);
3257 set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS);
3264 if (ctxt->eflags & X86_EFLAGS_VM) {
3265 ctxt->mode = X86EMUL_MODE_VM86;
3268 ctxt->mode = X86EMUL_MODE_PROT32;
3276 ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR,
3280 ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl,
3284 ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl,
3288 ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl,
3292 ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl,
3296 ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl,
3300 ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl,
3306 static int task_switch_32(struct x86_emulate_ctxt *ctxt,
3316 ret = linear_read_system(ctxt, old_tss_base, &tss_seg, sizeof(tss_seg));
3320 save_state_to_tss32(ctxt, &tss_seg);
3323 ret = linear_write_system(ctxt, old_tss_base + eip_offset, &tss_seg.eip,
3328 ret = linear_read_system(ctxt, new_tss_base, &tss_seg, sizeof(tss_seg));
3335 ret = linear_write_system(ctxt, new_tss_base,
3342 return load_state_from_tss32(ctxt, &tss_seg);
3345 static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt,
3349 const struct x86_emulate_ops *ops = ctxt->ops;
3352 u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR);
3354 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR);
3360 ret = read_segment_descriptor(ctxt, tss_selector, &next_tss_desc, &desc_addr);
3363 ret = read_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc, &desc_addr);
3383 ret = read_interrupt_descriptor(ctxt, idt_index,
3389 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl)
3390 return emulate_gp(ctxt, (idt_index << 3) | 0x2);
3398 return emulate_ts(ctxt, tss_selector & 0xfffc);
3403 write_segment_descriptor(ctxt, old_tss_sel, &curr_tss_desc);
3407 ctxt->eflags = ctxt->eflags & ~X86_EFLAGS_NT;
3415 ret = task_switch_32(ctxt, tss_selector, old_tss_sel,
3418 ret = task_switch_16(ctxt, tss_selector, old_tss_sel,
3424 ctxt->eflags = ctxt->eflags | X86_EFLAGS_NT;
3428 write_segment_descriptor(ctxt, tss_selector, &next_tss_desc);
3431 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS);
3432 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR);
3435 ctxt->op_bytes = ctxt->ad_bytes = (next_tss_desc.type & 8) ? 4 : 2;
3436 ctxt->lock_prefix = 0;
3437 ctxt->src.val = (unsigned long) error_code;
3438 ret = em_push(ctxt);
3441 ops->get_dr(ctxt, 7, &dr7);
3442 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN));
3447 int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
3453 invalidate_registers(ctxt);
3454 ctxt->_eip = ctxt->eip;
3455 ctxt->dst.type = OP_NONE;
3457 rc = emulator_do_task_switch(ctxt, tss_selector, idt_index, reason,
3461 ctxt->eip = ctxt->_eip;
3462 writeback_registers(ctxt);
3468 static void string_addr_inc(struct x86_emulate_ctxt *ctxt, int reg,
3471 int df = (ctxt->eflags & X86_EFLAGS_DF) ? -op->count : op->count;
3473 register_address_increment(ctxt, reg, df * op->bytes);
3474 op->addr.mem.ea = register_address(ctxt, reg);
3477 static int em_das(struct x86_emulate_ctxt *ctxt)
3482 cf = ctxt->eflags & X86_EFLAGS_CF;
3483 al = ctxt->dst.val;
3488 af = ctxt->eflags & X86_EFLAGS_AF;
3501 ctxt->dst.val = al;
3503 ctxt->src.type = OP_IMM;
3504 ctxt->src.val = 0;
3505 ctxt->src.bytes = 1;
3506 fastop(ctxt, em_or);
3507 ctxt->eflags &= ~(X86_EFLAGS_AF | X86_EFLAGS_CF);
3509 ctxt->eflags |= X86_EFLAGS_CF;
3511 ctxt->eflags |= X86_EFLAGS_AF;
3515 static int em_aam(struct x86_emulate_ctxt *ctxt)
3519 if (ctxt->src.val == 0)
3520 return emulate_de(ctxt);
3522 al = ctxt->dst.val & 0xff;
3523 ah = al / ctxt->src.val;
3524 al %= ctxt->src.val;
3526 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8);
3529 ctxt->src.type = OP_IMM;
3530 ctxt->src.val = 0;
3531 ctxt->src.bytes = 1;
3532 fastop(ctxt, em_or);
3537 static int em_aad(struct x86_emulate_ctxt *ctxt)
3539 u8 al = ctxt->dst.val & 0xff;
3540 u8 ah = (ctxt->dst.val >> 8) & 0xff;
3542 al = (al + (ah * ctxt->src.val)) & 0xff;
3544 ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al;
3547 ctxt->src.type = OP_IMM;
3548 ctxt->src.val = 0;
3549 ctxt->src.bytes = 1;
3550 fastop(ctxt, em_or);
3555 static int em_call(struct x86_emulate_ctxt *ctxt)
3558 long rel = ctxt->src.val;
3560 ctxt->src.val = (unsigned long)ctxt->_eip;
3561 rc = jmp_rel(ctxt, rel);
3564 return em_push(ctxt);
3567 static int em_call_far(struct x86_emulate_ctxt *ctxt)
3573 const struct x86_emulate_ops *ops = ctxt->ops;
3574 int cpl = ctxt->ops->cpl(ctxt);
3575 enum x86emul_mode prev_mode = ctxt->mode;
3577 old_eip = ctxt->_eip;
3578 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS);
3580 memcpy(&sel, ctxt->src.valptr + ctxt->op_bytes, 2);
3581 rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl,
3586 rc = assign_eip_far(ctxt, ctxt->src.val);
3590 ctxt->src.val = old_cs;
3591 rc = em_push(ctxt);
3595 ctxt->src.val = old_eip;
3596 rc = em_push(ctxt);
3605 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS);
3606 ctxt->mode = prev_mode;
3611 static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt)
3616 rc = emulate_pop(ctxt, &eip, ctxt->op_bytes);
3619 rc = assign_eip_near(ctxt, eip);
3622 rsp_increment(ctxt, ctxt->src.val);
3626 static int em_xchg(struct x86_emulate_ctxt *ctxt)
3629 ctxt->src.val = ctxt->dst.val;
3630 write_register_operand(&ctxt->src);
3633 ctxt->dst.val = ctxt->src.orig_val;
3634 ctxt->lock_prefix = 1;
3638 static int em_imul_3op(struct x86_emulate_ctxt *ctxt)
3640 ctxt->dst.val = ctxt->src2.val;
3641 return fastop(ctxt, em_imul);
3644 static int em_cwd(struct x86_emulate_ctxt *ctxt)
3646 ctxt->dst.type = OP_REG;
3647 ctxt->dst.bytes = ctxt->src.bytes;
3648 ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
3649 ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1);
3654 static int em_rdpid(struct x86_emulate_ctxt *ctxt)
3658 if (!ctxt->ops->guest_has_rdpid(ctxt))
3659 return emulate_ud(ctxt);
3661 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux);
3662 ctxt->dst.val = tsc_aux;
3666 static int em_rdtsc(struct x86_emulate_ctxt *ctxt)
3670 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc);
3671 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)tsc;
3672 *reg_write(ctxt, VCPU_REGS_RDX) = tsc >> 32;
3676 static int em_rdpmc(struct x86_emulate_ctxt *ctxt)
3680 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc))
3681 return emulate_gp(ctxt, 0);
3682 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)pmc;
3683 *reg_write(ctxt, VCPU_REGS_RDX) = pmc >> 32;
3687 static int em_mov(struct x86_emulate_ctxt *ctxt)
3689 memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr));
3693 static int em_movbe(struct x86_emulate_ctxt *ctxt)
3697 if (!ctxt->ops->guest_has_movbe(ctxt))
3698 return emulate_ud(ctxt);
3700 switch (ctxt->op_bytes) {
3710 tmp = (u16)ctxt->src.val;
3711 ctxt->dst.val &= ~0xffffUL;
3712 ctxt->dst.val |= (unsigned long)swab16(tmp);
3715 ctxt->dst.val = swab32((u32)ctxt->src.val);
3718 ctxt->dst.val = swab64(ctxt->src.val);
3726 static int em_cr_write(struct x86_emulate_ctxt *ctxt)
3728 int cr_num = ctxt->modrm_reg;
3731 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val))
3732 return emulate_gp(ctxt, 0);
3735 ctxt->dst.type = OP_NONE;
3742 r = emulator_recalc_and_set_mode(ctxt);
3750 static int em_dr_write(struct x86_emulate_ctxt *ctxt)
3754 if (ctxt->mode == X86EMUL_MODE_PROT64)
3755 val = ctxt->src.val & ~0ULL;
3757 val = ctxt->src.val & ~0U;
3760 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0)
3761 return emulate_gp(ctxt, 0);
3764 ctxt->dst.type = OP_NONE;
3768 static int em_wrmsr(struct x86_emulate_ctxt *ctxt)
3770 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX);
3774 msr_data = (u32)reg_read(ctxt, VCPU_REGS_RAX)
3775 | ((u64)reg_read(ctxt, VCPU_REGS_RDX) << 32);
3776 r = ctxt->ops->set_msr(ctxt, msr_index, msr_data);
3782 return emulate_gp(ctxt, 0);
3787 static int em_rdmsr(struct x86_emulate_ctxt *ctxt)
3789 u64 msr_index = reg_read(ctxt, VCPU_REGS_RCX);
3793 r = ctxt->ops->get_msr(ctxt, msr_index, &msr_data);
3799 return emulate_gp(ctxt, 0);
3801 *reg_write(ctxt, VCPU_REGS_RAX) = (u32)msr_data;
3802 *reg_write(ctxt, VCPU_REGS_RDX) = msr_data >> 32;
3806 static int em_store_sreg(struct x86_emulate_ctxt *ctxt, int segment)
3809 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3810 ctxt->ops->cpl(ctxt) > 0)
3811 return emulate_gp(ctxt, 0);
3813 ctxt->dst.val = get_segment_selector(ctxt, segment);
3814 if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM)
3815 ctxt->dst.bytes = 2;
3819 static int em_mov_rm_sreg(struct x86_emulate_ctxt *ctxt)
3821 if (ctxt->modrm_reg > VCPU_SREG_GS)
3822 return emulate_ud(ctxt);
3824 return em_store_sreg(ctxt, ctxt->modrm_reg);
3827 static int em_mov_sreg_rm(struct x86_emulate_ctxt *ctxt)
3829 u16 sel = ctxt->src.val;
3831 if (ctxt->modrm_reg == VCPU_SREG_CS || ctxt->modrm_reg > VCPU_SREG_GS)
3832 return emulate_ud(ctxt);
3834 if (ctxt->modrm_reg == VCPU_SREG_SS)
3835 ctxt->interruptibility = KVM_X86_SHADOW_INT_MOV_SS;
3838 ctxt->dst.type = OP_NONE;
3839 return load_segment_descriptor(ctxt, sel, ctxt->modrm_reg);
3842 static int em_sldt(struct x86_emulate_ctxt *ctxt)
3844 return em_store_sreg(ctxt, VCPU_SREG_LDTR);
3847 static int em_lldt(struct x86_emulate_ctxt *ctxt)
3849 u16 sel = ctxt->src.val;
3852 ctxt->dst.type = OP_NONE;
3853 return load_segment_descriptor(ctxt, sel, VCPU_SREG_LDTR);
3856 static int em_str(struct x86_emulate_ctxt *ctxt)
3858 return em_store_sreg(ctxt, VCPU_SREG_TR);
3861 static int em_ltr(struct x86_emulate_ctxt *ctxt)
3863 u16 sel = ctxt->src.val;
3866 ctxt->dst.type = OP_NONE;
3867 return load_segment_descriptor(ctxt, sel, VCPU_SREG_TR);
3870 static int em_invlpg(struct x86_emulate_ctxt *ctxt)
3875 rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear);
3877 ctxt->ops->invlpg(ctxt, linear);
3879 ctxt->dst.type = OP_NONE;
3883 static int em_clts(struct x86_emulate_ctxt *ctxt)
3887 cr0 = ctxt->ops->get_cr(ctxt, 0);
3889 ctxt->ops->set_cr(ctxt, 0, cr0);
3893 static int em_hypercall(struct x86_emulate_ctxt *ctxt)
3895 int rc = ctxt->ops->fix_hypercall(ctxt);
3901 ctxt->_eip = ctxt->eip;
3903 ctxt->dst.type = OP_NONE;
3907 static int emulate_store_desc_ptr(struct x86_emulate_ctxt *ctxt,
3908 void (*get)(struct x86_emulate_ctxt *ctxt,
3913 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3914 ctxt->ops->cpl(ctxt) > 0)
3915 return emulate_gp(ctxt, 0);
3917 if (ctxt->mode == X86EMUL_MODE_PROT64)
3918 ctxt->op_bytes = 8;
3919 get(ctxt, &desc_ptr);
3920 if (ctxt->op_bytes == 2) {
3921 ctxt->op_bytes = 4;
3925 ctxt->dst.type = OP_NONE;
3926 return segmented_write_std(ctxt, ctxt->dst.addr.mem,
3927 &desc_ptr, 2 + ctxt->op_bytes);
3930 static int em_sgdt(struct x86_emulate_ctxt *ctxt)
3932 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt);
3935 static int em_sidt(struct x86_emulate_ctxt *ctxt)
3937 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt);
3940 static int em_lgdt_lidt(struct x86_emulate_ctxt *ctxt, bool lgdt)
3945 if (ctxt->mode == X86EMUL_MODE_PROT64)
3946 ctxt->op_bytes = 8;
3947 rc = read_descriptor(ctxt, ctxt->src.addr.mem,
3949 ctxt->op_bytes);
3952 if (ctxt->mode == X86EMUL_MODE_PROT64 &&
3953 emul_is_noncanonical_address(desc_ptr.address, ctxt))
3954 return emulate_gp(ctxt, 0);
3956 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3958 ctxt->ops->set_idt(ctxt, &desc_ptr);
3960 ctxt->dst.type = OP_NONE;
3964 static int em_lgdt(struct x86_emulate_ctxt *ctxt)
3966 return em_lgdt_lidt(ctxt, true);
3969 static int em_lidt(struct x86_emulate_ctxt *ctxt)
3971 return em_lgdt_lidt(ctxt, false);
3974 static int em_smsw(struct x86_emulate_ctxt *ctxt)
3976 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3977 ctxt->ops->cpl(ctxt) > 0)
3978 return emulate_gp(ctxt, 0);
3980 if (ctxt->dst.type == OP_MEM)
3981 ctxt->dst.bytes = 2;
3982 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0);
3986 static int em_lmsw(struct x86_emulate_ctxt *ctxt)
3988 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul)
3989 | (ctxt->src.val & 0x0f));
3990 ctxt->dst.type = OP_NONE;
3994 static int em_loop(struct x86_emulate_ctxt *ctxt)
3998 register_address_increment(ctxt, VCPU_REGS_RCX, -1);
3999 if ((address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) != 0) &&
4000 (ctxt->b == 0xe2 || test_cc(ctxt->b ^ 0x5, ctxt->eflags)))
4001 rc = jmp_rel(ctxt, ctxt->src.val);
4006 static int em_jcxz(struct x86_emulate_ctxt *ctxt)
4010 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0)
4011 rc = jmp_rel(ctxt, ctxt->src.val);
4016 static int em_in(struct x86_emulate_ctxt *ctxt)
4018 if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val,
4019 &ctxt->dst.val))
4025 static int em_out(struct x86_emulate_ctxt *ctxt)
4027 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val,
4028 &ctxt->src.val, 1);
4030 ctxt->dst.type = OP_NONE;
4034 static int em_cli(struct x86_emulate_ctxt *ctxt)
4036 if (emulator_bad_iopl(ctxt))
4037 return emulate_gp(ctxt, 0);
4039 ctxt->eflags &= ~X86_EFLAGS_IF;
4043 static int em_sti(struct x86_emulate_ctxt *ctxt)
4045 if (emulator_bad_iopl(ctxt))
4046 return emulate_gp(ctxt, 0);
4048 ctxt->interruptibility = KVM_X86_SHADOW_INT_STI;
4049 ctxt->eflags |= X86_EFLAGS_IF;
4053 static int em_cpuid(struct x86_emulate_ctxt *ctxt)
4058 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
4060 ctxt->ops->cpl(ctxt)) {
4061 return emulate_gp(ctxt, 0);
4064 eax = reg_read(ctxt, VCPU_REGS_RAX);
4065 ecx = reg_read(ctxt, VCPU_REGS_RCX);
4066 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
4067 *reg_write(ctxt, VCPU_REGS_RAX) = eax;
4068 *reg_write(ctxt, VCPU_REGS_RBX) = ebx;
4069 *reg_write(ctxt, VCPU_REGS_RCX) = ecx;
4070 *reg_write(ctxt, VCPU_REGS_RDX) = edx;
4074 static int em_sahf(struct x86_emulate_ctxt *ctxt)
4080 flags &= *reg_rmw(ctxt, VCPU_REGS_RAX) >> 8;
4082 ctxt->eflags &= ~0xffUL;
4083 ctxt->eflags |= flags | X86_EFLAGS_FIXED;
4087 static int em_lahf(struct x86_emulate_ctxt *ctxt)
4089 *reg_rmw(ctxt, VCPU_REGS_RAX) &= ~0xff00UL;
4090 *reg_rmw(ctxt, VCPU_REGS_RAX) |= (ctxt->eflags & 0xff) << 8;
4094 static int em_bswap(struct x86_emulate_ctxt *ctxt)
4096 switch (ctxt->op_bytes) {
4099 asm("bswap %0" : "+r"(ctxt->dst.val));
4103 asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val));
4109 static int em_clflush(struct x86_emulate_ctxt *ctxt)
4115 static int em_clflushopt(struct x86_emulate_ctxt *ctxt)
4121 static int em_movsxd(struct x86_emulate_ctxt *ctxt)
4123 ctxt->dst.val = (s32) ctxt->src.val;
4127 static int check_fxsr(struct x86_emulate_ctxt *ctxt)
4129 if (!ctxt->ops->guest_has_fxsr(ctxt))
4130 return emulate_ud(ctxt);
4132 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
4133 return emulate_nm(ctxt);
4139 if (ctxt->mode >= X86EMUL_MODE_PROT64)
4154 static inline size_t fxstate_size(struct x86_emulate_ctxt *ctxt)
4157 if (ctxt->mode == X86EMUL_MODE_PROT64)
4160 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR;
4182 static int em_fxsave(struct x86_emulate_ctxt *ctxt)
4187 rc = check_fxsr(ctxt);
4200 return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state,
4201 fxstate_size(ctxt));
4224 static int em_fxrstor(struct x86_emulate_ctxt *ctxt)
4230 rc = check_fxsr(ctxt);
4234 size = fxstate_size(ctxt);
4235 rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size);
4248 rc = emulate_gp(ctxt, 0);
4261 static int em_xsetbv(struct x86_emulate_ctxt *ctxt)
4265 eax = reg_read(ctxt, VCPU_REGS_RAX);
4266 edx = reg_read(ctxt, VCPU_REGS_RDX);
4267 ecx = reg_read(ctxt, VCPU_REGS_RCX);
4269 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax))
4270 return emulate_gp(ctxt, 0);
4287 static int check_cr_access(struct x86_emulate_ctxt *ctxt)
4289 if (!valid_cr(ctxt->modrm_reg))
4290 return emulate_ud(ctxt);
4295 static int check_dr7_gd(struct x86_emulate_ctxt *ctxt)
4299 ctxt->ops->get_dr(ctxt, 7, &dr7);
4305 static int check_dr_read(struct x86_emulate_ctxt *ctxt)
4307 int dr = ctxt->modrm_reg;
4311 return emulate_ud(ctxt);
4313 cr4 = ctxt->ops->get_cr(ctxt, 4);
4315 return emulate_ud(ctxt);
4317 if (check_dr7_gd(ctxt)) {
4320 ctxt->ops->get_dr(ctxt, 6, &dr6);
4323 ctxt->ops->set_dr(ctxt, 6, dr6);
4324 return emulate_db(ctxt);
4330 static int check_dr_write(struct x86_emulate_ctxt *ctxt)
4332 u64 new_val = ctxt->src.val64;
4333 int dr = ctxt->modrm_reg;
4336 return emulate_gp(ctxt, 0);
4338 return check_dr_read(ctxt);
4341 static int check_svme(struct x86_emulate_ctxt *ctxt)
4345 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
4348 return emulate_ud(ctxt);
4353 static int check_svme_pa(struct x86_emulate_ctxt *ctxt)
4355 u64 rax = reg_read(ctxt, VCPU_REGS_RAX);
4359 return emulate_gp(ctxt, 0);
4361 return check_svme(ctxt);
4364 static int check_rdtsc(struct x86_emulate_ctxt *ctxt)
4366 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
4368 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt))
4369 return emulate_ud(ctxt);
4374 static int check_rdpmc(struct x86_emulate_ctxt *ctxt)
4376 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
4377 u64 rcx = reg_read(ctxt, VCPU_REGS_RCX);
4386 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) ||
4387 ctxt->ops->check_pmc(ctxt, rcx))
4388 return emulate_gp(ctxt, 0);
4393 static int check_perm_in(struct x86_emulate_ctxt *ctxt)
4395 ctxt->dst.bytes = min(ctxt->dst.bytes, 4u);
4396 if (!emulator_io_permited(ctxt, ctxt->src.val, ctxt->dst.bytes))
4397 return emulate_gp(ctxt, 0);
4402 static int check_perm_out(struct x86_emulate_ctxt *ctxt)
4404 ctxt->src.bytes = min(ctxt->src.bytes, 4u);
4405 if (!emulator_io_permited(ctxt, ctxt->dst.val, ctxt->src.bytes))
4406 return emulate_gp(ctxt, 0);
4962 static unsigned imm_size(struct x86_emulate_ctxt *ctxt)
4966 size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
4972 static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op,
4979 op->addr.mem.ea = ctxt->_eip;
4983 op->val = insn_fetch(s8, ctxt);
4986 op->val = insn_fetch(s16, ctxt);
4989 op->val = insn_fetch(s32, ctxt);
4992 op->val = insn_fetch(s64, ctxt);
5012 static int decode_operand(struct x86_emulate_ctxt *ctxt, struct operand *op,
5019 decode_register_operand(ctxt, op);
5022 rc = decode_imm(ctxt, op, 1, false);
5025 ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
5027 *op = ctxt->memop;
5028 ctxt->memopp = op;
5029 if (ctxt->d & BitOp)
5030 fetch_bit_operand(ctxt);
5034 ctxt->memop.bytes = (ctxt->op_bytes == 8) ? 16 : 8;
5038 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
5039 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
5045 op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes;
5046 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RAX);
5051 if (ctxt->d & ByteOp) {
5056 op->bytes = ctxt->op_bytes;
5057 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
5063 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
5065 register_address(ctxt, VCPU_REGS_RDI);
5073 op->addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX);
5079 op->val = reg_read(ctxt, VCPU_REGS_RCX) & 0xff;
5082 rc = decode_imm(ctxt, op, 1, true);
5090 rc = decode_imm(ctxt, op, imm_size(ctxt), true);
5093 rc = decode_imm(ctxt, op, ctxt->op_bytes, true);
5096 ctxt->memop.bytes = 1;
5097 if (ctxt->memop.type == OP_REG) {
5098 ctxt->memop.addr.reg = decode_register(ctxt,
5099 ctxt->modrm_rm, true);
5100 fetch_register_operand(&ctxt->memop);
5104 ctxt->memop.bytes = 2;
5107 ctxt->memop.bytes = 4;
5110 rc = decode_imm(ctxt, op, 2, false);
5113 rc = decode_imm(ctxt, op, imm_size(ctxt), false);
5117 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
5119 register_address(ctxt, VCPU_REGS_RSI);
5120 op->addr.mem.seg = ctxt->seg_override;
5126 op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
5128 address_mask(ctxt,
5129 reg_read(ctxt, VCPU_REGS_RBX) +
5130 (reg_read(ctxt, VCPU_REGS_RAX) & 0xff));
5131 op->addr.mem.seg = ctxt->seg_override;
5136 op->addr.mem.ea = ctxt->_eip;
5137 op->bytes = ctxt->op_bytes + 2;
5138 insn_fetch_arr(op->valptr, op->bytes, ctxt);
5141 ctxt->memop.bytes = ctxt->op_bytes + 2;
5178 int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len)
5181 int mode = ctxt->mode;
5189 ctxt->memop.type = OP_NONE;
5190 ctxt->memopp = NULL;
5191 ctxt->_eip = ctxt->eip;
5192 ctxt->fetch.ptr = ctxt->fetch.data;
5193 ctxt->fetch.end = ctxt->fetch.data + insn_len;
5194 ctxt->opcode_len = 1;
5195 ctxt->intercept = x86_intercept_none;
5197 memcpy(ctxt->fetch.data, insn, insn_len);
5199 rc = __do_insn_fetch_bytes(ctxt, 1);
5208 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS);
5228 ctxt->op_bytes = def_op_bytes;
5229 ctxt->ad_bytes = def_ad_bytes;
5233 switch (ctxt->b = insn_fetch(u8, ctxt)) {
5237 ctxt->op_bytes = def_op_bytes ^ 6;
5242 ctxt->ad_bytes = def_ad_bytes ^ 12;
5245 ctxt->ad_bytes = def_ad_bytes ^ 6;
5249 ctxt->seg_override = VCPU_SREG_ES;
5253 ctxt->seg_override = VCPU_SREG_CS;
5257 ctxt->seg_override = VCPU_SREG_SS;
5261 ctxt->seg_override = VCPU_SREG_DS;
5265 ctxt->seg_override = VCPU_SREG_FS;
5269 ctxt->seg_override = VCPU_SREG_GS;
5274 ctxt->rex_prefix = ctxt->b;
5277 ctxt->lock_prefix = 1;
5281 ctxt->rep_prefix = ctxt->b;
5289 ctxt->rex_prefix = 0;
5295 if (ctxt->rex_prefix & 8)
5296 ctxt->op_bytes = 8; /* REX.W */
5299 opcode = opcode_table[ctxt->b];
5301 if (ctxt->b == 0x0f) {
5302 ctxt->opcode_len = 2;
5303 ctxt->b = insn_fetch(u8, ctxt);
5304 opcode = twobyte_table[ctxt->b];
5307 if (ctxt->b == 0x38) {
5308 ctxt->opcode_len = 3;
5309 ctxt->b = insn_fetch(u8, ctxt);
5310 opcode = opcode_map_0f_38[ctxt->b];
5313 ctxt->d = opcode.flags;
5315 if (ctxt->d & ModRM)
5316 ctxt->modrm = insn_fetch(u8, ctxt);
5319 if (ctxt->opcode_len == 1 && (ctxt->b == 0xc5 || ctxt->b == 0xc4) &&
5320 (mode == X86EMUL_MODE_PROT64 || (ctxt->modrm & 0xc0) == 0xc0)) {
5321 ctxt->d = NotImpl;
5324 while (ctxt->d & GroupMask) {
5325 switch (ctxt->d & GroupMask) {
5327 goffset = (ctxt->modrm >> 3) & 7;
5331 goffset = (ctxt->modrm >> 3) & 7;
5332 if ((ctxt->modrm >> 6) == 3)
5338 goffset = ctxt->modrm & 7;
5342 if (ctxt->rep_prefix && op_prefix)
5344 simd_prefix = op_prefix ? 0x66 : ctxt->rep_prefix;
5353 if (ctxt->modrm > 0xbf) {
5356 ctxt->modrm - 0xc0, size);
5360 opcode = opcode.u.esc->op[(ctxt->modrm >> 3) & 7];
5364 if ((ctxt->modrm >> 6) == 3)
5370 if (ctxt->mode == X86EMUL_MODE_PROT64)
5379 ctxt->d &= ~(u64)GroupMask;
5380 ctxt->d |= opcode.flags;
5384 if (ctxt->d == 0)
5387 ctxt->execute = opcode.u.execute;
5389 if (unlikely(ctxt->ud) && likely(!(ctxt->d & EmulateOnUD)))
5392 if (unlikely(ctxt->d &
5399 ctxt->check_perm = opcode.check_perm;
5400 ctxt->intercept = opcode.intercept;
5402 if (ctxt->d & NotImpl)
5406 if (ctxt->op_bytes == 4 && (ctxt->d & Stack))
5407 ctxt->op_bytes = 8;
5408 else if (ctxt->d & NearBranch)
5409 ctxt->op_bytes = 8;
5412 if (ctxt->d & Op3264) {
5414 ctxt->op_bytes = 8;
5416 ctxt->op_bytes = 4;
5419 if ((ctxt->d & No16) && ctxt->op_bytes == 2)
5420 ctxt->op_bytes = 4;
5422 if (ctxt->d & Sse)
5423 ctxt->op_bytes = 16;
5424 else if (ctxt->d & Mmx)
5425 ctxt->op_bytes = 8;
5429 if (ctxt->d & ModRM) {
5430 rc = decode_modrm(ctxt, &ctxt->memop);
5433 ctxt->seg_override = ctxt->modrm_seg;
5435 } else if (ctxt->d & MemAbs)
5436 rc = decode_abs(ctxt, &ctxt->memop);
5441 ctxt->seg_override = VCPU_SREG_DS;
5443 ctxt->memop.addr.mem.seg = ctxt->seg_override;
5449 rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask);
5457 rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask);
5462 rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask);
5464 if (ctxt->rip_relative && likely(ctxt->memopp))
5465 ctxt->memopp->addr.mem.ea = address_mask(ctxt,
5466 ctxt->memopp->addr.mem.ea + ctxt->_eip);
5470 ctxt->have_exception = true;
5474 bool x86_page_table_writing_insn(struct x86_emulate_ctxt *ctxt)
5476 return ctxt->d & PageTable;
5479 static bool string_insn_completed(struct x86_emulate_ctxt *ctxt)
5488 if (((ctxt->b == 0xa6) || (ctxt->b == 0xa7) ||
5489 (ctxt->b == 0xae) || (ctxt->b == 0xaf))
5490 && (((ctxt->rep_prefix == REPE_PREFIX) &&
5491 ((ctxt->eflags & X86_EFLAGS_ZF) == 0))
5492 || ((ctxt->rep_prefix == REPNE_PREFIX) &&
5493 ((ctxt->eflags & X86_EFLAGS_ZF) == X86_EFLAGS_ZF))))
5499 static int flush_pending_x87_faults(struct x86_emulate_ctxt *ctxt)
5508 return emulate_exception(ctxt, MF_VECTOR, 0, false);
5519 static int fastop(struct x86_emulate_ctxt *ctxt, fastop_t fop)
5521 ulong flags = (ctxt->eflags & EFLAGS_MASK) | X86_EFLAGS_IF;
5523 if (!(ctxt->d & ByteOp))
5524 fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE;
5527 : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags),
5529 : "c"(ctxt->src2.val));
5531 ctxt->eflags = (ctxt->eflags & ~EFLAGS_MASK) | (flags & EFLAGS_MASK);
5533 return emulate_de(ctxt);
5537 void init_decode_cache(struct x86_emulate_ctxt *ctxt)
5539 memset(&ctxt->rip_relative, 0,
5540 (void *)&ctxt->modrm - (void *)&ctxt->rip_relative);
5542 ctxt->io_read.pos = 0;
5543 ctxt->io_read.end = 0;
5544 ctxt->mem_read.end = 0;
5547 int x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
5549 const struct x86_emulate_ops *ops = ctxt->ops;
5551 int saved_dst_type = ctxt->dst.type;
5554 ctxt->mem_read.pos = 0;
5557 if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) {
5558 rc = emulate_ud(ctxt);
5562 if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) {
5563 rc = emulate_ud(ctxt);
5567 emul_flags = ctxt->ops->get_hflags(ctxt);
5568 if (unlikely(ctxt->d &
5570 if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) ||
5571 (ctxt->d & Undefined)) {
5572 rc = emulate_ud(ctxt);
5576 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
5577 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
5578 rc = emulate_ud(ctxt);
5582 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
5583 rc = emulate_nm(ctxt);
5587 if (ctxt->d & Mmx) {
5588 rc = flush_pending_x87_faults(ctxt);
5595 fetch_possible_mmx_operand(&ctxt->src);
5596 fetch_possible_mmx_operand(&ctxt->src2);
5597 if (!(ctxt->d & Mov))
5598 fetch_possible_mmx_operand(&ctxt->dst);
5601 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && ctxt->intercept) {
5602 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5609 if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) {
5610 rc = emulate_ud(ctxt);
5615 if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
5616 if (ctxt->d & PrivUD)
5617 rc = emulate_ud(ctxt);
5619 rc = emulate_gp(ctxt, 0);
5624 if (ctxt->d & CheckPerm) {
5625 rc = ctxt->check_perm(ctxt);
5630 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) {
5631 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5637 if (ctxt->rep_prefix && (ctxt->d & String)) {
5639 if (address_mask(ctxt, reg_read(ctxt, VCPU_REGS_RCX)) == 0) {
5640 string_registers_quirk(ctxt);
5641 ctxt->eip = ctxt->_eip;
5642 ctxt->eflags &= ~X86_EFLAGS_RF;
5648 if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) {
5649 rc = segmented_read(ctxt, ctxt->src.addr.mem,
5650 ctxt->src.valptr, ctxt->src.bytes);
5653 ctxt->src.orig_val64 = ctxt->src.val64;
5656 if (ctxt->src2.type == OP_MEM) {
5657 rc = segmented_read(ctxt, ctxt->src2.addr.mem,
5658 &ctxt->src2.val, ctxt->src2.bytes);
5663 if ((ctxt->d & DstMask) == ImplicitOps)
5667 if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) {
5669 rc = segmented_read(ctxt, ctxt->dst.addr.mem,
5670 &ctxt->dst.val, ctxt->dst.bytes);
5672 if (!(ctxt->d & NoWrite) &&
5674 ctxt->exception.vector == PF_VECTOR)
5675 ctxt->exception.error_code |= PFERR_WRITE_MASK;
5680 ctxt->dst.orig_val64 = ctxt->dst.val64;
5684 if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) {
5685 rc = emulator_check_intercept(ctxt, ctxt->intercept,
5691 if (ctxt->rep_prefix && (ctxt->d & String))
5692 ctxt->eflags |= X86_EFLAGS_RF;
5694 ctxt->eflags &= ~X86_EFLAGS_RF;
5696 if (ctxt->execute) {
5697 if (ctxt->d & Fastop)
5698 rc = fastop(ctxt, ctxt->fop);
5700 rc = ctxt->execute(ctxt);
5706 if (ctxt->opcode_len == 2)
5708 else if (ctxt->opcode_len == 3)
5711 switch (ctxt->b) {
5713 if (test_cc(ctxt->b, ctxt->eflags))
5714 rc = jmp_rel(ctxt, ctxt->src.val);
5717 ctxt->dst.val = ctxt->src.addr.mem.ea;
5720 if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX))
5721 ctxt->dst.type = OP_NONE;
5723 rc = em_xchg(ctxt);
5726 switch (ctxt->op_bytes) {
5727 case 2: ctxt->dst.val = (s8)ctxt->dst.val; break;
5728 case 4: ctxt->dst.val = (s16)ctxt->dst.val; break;
5729 case 8: ctxt->dst.val = (s32)ctxt->dst.val; break;
5733 rc = emulate_int(ctxt, 3);
5736 rc = emulate_int(ctxt, ctxt->src.val);
5739 if (ctxt->eflags & X86_EFLAGS_OF)
5740 rc = emulate_int(ctxt, 4);
5744 rc = jmp_rel(ctxt, ctxt->src.val);
5745 ctxt->dst.type = OP_NONE; /* Disable writeback. */
5748 ctxt->ops->halt(ctxt);
5752 ctxt->eflags ^= X86_EFLAGS_CF;
5755 ctxt->eflags &= ~X86_EFLAGS_CF;
5758 ctxt->eflags |= X86_EFLAGS_CF;
5761 ctxt->eflags &= ~X86_EFLAGS_DF;
5764 ctxt->eflags |= X86_EFLAGS_DF;
5774 if (ctxt->d & SrcWrite) {
5775 BUG_ON(ctxt->src.type == OP_MEM || ctxt->src.type == OP_MEM_STR);
5776 rc = writeback(ctxt, &ctxt->src);
5780 if (!(ctxt->d & NoWrite)) {
5781 rc = writeback(ctxt, &ctxt->dst);
5790 ctxt->dst.type = saved_dst_type;
5792 if ((ctxt->d & SrcMask) == SrcSI)
5793 string_addr_inc(ctxt, VCPU_REGS_RSI, &ctxt->src);
5795 if ((ctxt->d & DstMask) == DstDI)
5796 string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst);
5798 if (ctxt->rep_prefix && (ctxt->d & String)) {
5800 struct read_cache *r = &ctxt->io_read;
5801 if ((ctxt->d & SrcMask) == SrcSI)
5802 count = ctxt->src.count;
5804 count = ctxt->dst.count;
5805 register_address_increment(ctxt, VCPU_REGS_RCX, -count);
5807 if (!string_insn_completed(ctxt)) {
5812 if ((r->end != 0 || reg_read(ctxt, VCPU_REGS_RCX) & 0x3ff) &&
5819 ctxt->mem_read.end = 0;
5820 writeback_registers(ctxt);
5825 ctxt->eflags &= ~X86_EFLAGS_RF;
5828 ctxt->eip = ctxt->_eip;
5829 if (ctxt->mode != X86EMUL_MODE_PROT64)
5830 ctxt->eip = (u32)ctxt->_eip;
5834 WARN_ON(ctxt->exception.vector > 0x1f);
5835 ctxt->have_exception = true;
5841 writeback_registers(ctxt);
5846 switch (ctxt->b) {
5848 (ctxt->ops->wbinvd)(ctxt);
5856 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg);
5859 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val);
5862 if (test_cc(ctxt->b, ctxt->eflags))
5863 ctxt->dst.val = ctxt->src.val;
5864 else if (ctxt->op_bytes != 4)
5865 ctxt->dst.type = OP_NONE; /* no writeback */
5868 if (test_cc(ctxt->b, ctxt->eflags))
5869 rc = jmp_rel(ctxt, ctxt->src.val);
5872 ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags);
5875 ctxt->dst.bytes = ctxt->op_bytes;
5876 ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val
5877 : (u16) ctxt->src.val;
5880 ctxt->dst.bytes = ctxt->op_bytes;
5881 ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val :
5882 (s16) ctxt->src.val;
5899 void emulator_invalidate_register_cache(struct x86_emulate_ctxt *ctxt)
5901 invalidate_registers(ctxt);
5904 void emulator_writeback_register_cache(struct x86_emulate_ctxt *ctxt)
5906 writeback_registers(ctxt);
5909 bool emulator_can_use_gpa(struct x86_emulate_ctxt *ctxt)
5911 if (ctxt->rep_prefix && (ctxt->d & String))
5914 if (ctxt->d & TwoMemOp)