Lines Matching defs:ops

247 		ctxt->_regs[nr] = ctxt->ops->read_gpr(ctxt, nr);
270 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]);
521 return ctxt->ops->intercept(ctxt, &info, stage);
560 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS);
615 return ctxt->ops->get_cached_segment_base(ctxt, seg);
668 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
679 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
680 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
685 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48;
750 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
822 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
824 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) {
840 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS))
884 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true);
891 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true);
905 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false);
919 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false);
962 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end,
1232 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1245 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1261 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1501 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size,
1539 return ctxt->ops->write_emulated(ctxt, linear, data, size,
1554 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data,
1575 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n))
1599 ctxt->ops->get_idt(ctxt, &dt);
1611 const struct x86_emulate_ops *ops = ctxt->ops;
1619 if (!ops->get_segment(ctxt, &sel, &desc, &base3,
1626 ops->get_gdt(ctxt, dt);
1647 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1705 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg);
1735 * ctxt->ops->set_segment expects the CPL to be in
1793 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1848 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc,
1854 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1865 u8 cpl = ctxt->ops->cpl(ctxt);
1975 int cpl = ctxt->ops->cpl(ctxt);
2129 const struct x86_emulate_ops *ops = ctxt->ops;
2154 ops->get_idt(ctxt, &dt);
2253 ctxt->ops->set_nmi_mask(ctxt, false);
2278 u8 cpl = ctxt->ops->cpl(ctxt);
2352 int cpl = ctxt->ops->cpl(ctxt);
2431 return ctxt->ops->guest_has_long_mode(ctxt);
2466 ctxt->ops->set_segment(ctxt, selector, &desc, 0, n);
2487 ctxt->ops->set_segment(ctxt, selector, &desc, base3, n);
2505 bad = ctxt->ops->set_cr(ctxt, 3, cr3);
2514 bad = ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE);
2518 bad = ctxt->ops->set_cr(ctxt, 0, cr0);
2523 bad = ctxt->ops->set_cr(ctxt, 4, cr4);
2527 bad = ctxt->ops->set_cr(ctxt, 3, cr3 | pcid);
2556 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1))
2561 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1))
2568 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_TR);
2574 ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_LDTR);
2578 ctxt->ops->set_gdt(ctxt, &dt);
2582 ctxt->ops->set_idt(ctxt, &dt);
2592 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7ef8));
2616 if (ctxt->ops->set_dr(ctxt, 6, (val & DR6_VOLATILE) | DR6_FIXED_1))
2621 if (ctxt->ops->set_dr(ctxt, 7, (val & DR7_VOLATILE) | DR7_FIXED_1))
2627 ctxt->ops->set_smbase(ctxt, GET_SMSTATE(u32, smstate, 0x7f00));
2630 if (ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA))
2638 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_TR);
2642 ctxt->ops->set_idt(ctxt, &dt);
2649 ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_LDTR);
2653 ctxt->ops->set_gdt(ctxt, &dt);
2676 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_MASK) == 0)
2679 smbase = ctxt->ops->get_smbase(ctxt);
2681 ret = ctxt->ops->read_phys(ctxt, smbase + 0xfe00, buf, sizeof(buf));
2685 if ((ctxt->ops->get_hflags(ctxt) & X86EMUL_SMM_INSIDE_NMI_MASK) == 0)
2686 ctxt->ops->set_nmi_mask(ctxt, false);
2688 ctxt->ops->set_hflags(ctxt, ctxt->ops->get_hflags(ctxt) &
2700 cr4 = ctxt->ops->get_cr(ctxt, 4);
2702 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PCIDE);
2708 ctxt->ops->set_segment(ctxt, 0, &cs_desc, 0, VCPU_SREG_CS);
2712 cr0 = ctxt->ops->get_cr(ctxt, 0);
2714 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE));
2718 cr4 = ctxt->ops->get_cr(ctxt, 4);
2720 ctxt->ops->set_cr(ctxt, 4, cr4 & ~X86_CR4_PAE);
2724 ctxt->ops->set_msr(ctxt, MSR_EFER, efer);
2732 if (ctxt->ops->pre_leave_smm(ctxt, buf))
2747 ctxt->ops->post_leave_smm(ctxt);
2784 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2790 const struct x86_emulate_ops *ops = ctxt->ops;
2802 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2825 const struct x86_emulate_ops *ops = ctxt->ops;
2839 ops->get_msr(ctxt, MSR_EFER, &efer);
2844 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2853 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2854 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2861 ops->get_msr(ctxt,
2866 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
2872 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2884 const struct x86_emulate_ops *ops = ctxt->ops;
2890 ops->get_msr(ctxt, MSR_EFER, &efer);
2907 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2920 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2921 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2923 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
2926 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
2937 const struct x86_emulate_ops *ops = ctxt->ops;
2960 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2985 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2986 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
3003 return ctxt->ops->cpl(ctxt) > iopl;
3012 const struct x86_emulate_ops *ops = ctxt->ops;
3028 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR);
3037 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true);
3042 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true);
3231 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
3349 const struct x86_emulate_ops *ops = ctxt->ops;
3354 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR);
3389 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl)
3431 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS);
3432 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR);
3441 ops->get_dr(ctxt, 7, &dr7);
3442 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN));
3573 const struct x86_emulate_ops *ops = ctxt->ops;
3574 int cpl = ctxt->ops->cpl(ctxt);
3578 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS);
3605 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS);
3658 if (!ctxt->ops->guest_has_rdpid(ctxt))
3661 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux);
3670 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc);
3680 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc))
3697 if (!ctxt->ops->guest_has_movbe(ctxt))
3731 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val))
3760 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0)
3776 r = ctxt->ops->set_msr(ctxt, msr_index, msr_data);
3793 r = ctxt->ops->get_msr(ctxt, msr_index, &msr_data);
3809 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3810 ctxt->ops->cpl(ctxt) > 0)
3877 ctxt->ops->invlpg(ctxt, linear);
3887 cr0 = ctxt->ops->get_cr(ctxt, 0);
3889 ctxt->ops->set_cr(ctxt, 0, cr0);
3895 int rc = ctxt->ops->fix_hypercall(ctxt);
3913 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3914 ctxt->ops->cpl(ctxt) > 0)
3932 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt);
3937 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt);
3956 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3958 ctxt->ops->set_idt(ctxt, &desc_ptr);
3976 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3977 ctxt->ops->cpl(ctxt) > 0)
3982 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0);
3988 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul)
4027 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val,
4058 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
4060 ctxt->ops->cpl(ctxt)) {
4066 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
4129 if (!ctxt->ops->guest_has_fxsr(ctxt))
4132 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
4160 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR;
4269 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax))
4299 ctxt->ops->get_dr(ctxt, 7, &dr7);
4313 cr4 = ctxt->ops->get_cr(ctxt, 4);
4320 ctxt->ops->get_dr(ctxt, 6, &dr6);
4323 ctxt->ops->set_dr(ctxt, 6, dr6);
4345 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
4366 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
4368 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt))
4376 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
4386 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) ||
4387 ctxt->ops->check_pmc(ctxt, rcx))
5208 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS);
5549 const struct x86_emulate_ops *ops = ctxt->ops;
5567 emul_flags = ctxt->ops->get_hflags(ctxt);
5576 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
5577 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
5582 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
5615 if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
5748 ctxt->ops->halt(ctxt);
5848 (ctxt->ops->wbinvd)(ctxt);
5856 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg);
5859 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val);