Lines Matching defs:ops
252 ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]);
484 return ctxt->ops->intercept(ctxt, &info, stage);
523 ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS);
578 return ctxt->ops->get_cached_segment_base(ctxt, seg);
633 ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
644 ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
645 ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
650 return (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_LA57) ? 57 : 48;
715 usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
787 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
789 if (!(ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PE)) {
805 if (!ctxt->ops->get_segment(ctxt, &selector, &cs, &base3, VCPU_SREG_CS))
849 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, true);
856 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, true);
870 return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception, false);
884 return ctxt->ops->write_std(ctxt, linear, data, size, &ctxt->exception, false);
927 rc = ctxt->ops->fetch(ctxt, linear, ctxt->fetch.end,
1095 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1108 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1124 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
1367 rc = ctxt->ops->read_emulated(ctxt, addr, mc->data + mc->end, size,
1405 return ctxt->ops->write_emulated(ctxt, linear, data, size,
1420 return ctxt->ops->cmpxchg_emulated(ctxt, linear, orig_data, data,
1441 if (!ctxt->ops->pio_in_emulated(ctxt, size, port, rc->data, n))
1465 ctxt->ops->get_idt(ctxt, &dt);
1477 const struct x86_emulate_ops *ops = ctxt->ops;
1485 if (!ops->get_segment(ctxt, &sel, &desc, &base3,
1492 ops->get_gdt(ctxt, dt);
1513 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1571 ctxt->ops->get_segment(ctxt, &dummy, &seg_desc, NULL, seg);
1601 * ctxt->ops->set_segment expects the CPL to be in
1687 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1742 ret = ctxt->ops->cmpxchg_emulated(ctxt, desc_addr, &old_desc, &seg_desc,
1748 ctxt->ops->set_segment(ctxt, selector, &seg_desc, base3, seg);
1759 u8 cpl = ctxt->ops->cpl(ctxt);
1867 int cpl = ctxt->ops->cpl(ctxt);
2021 const struct x86_emulate_ops *ops = ctxt->ops;
2046 ops->get_idt(ctxt, &dt);
2145 ctxt->ops->set_nmi_mask(ctxt, false);
2170 u8 cpl = ctxt->ops->cpl(ctxt);
2244 int cpl = ctxt->ops->cpl(ctxt);
2319 if (!ctxt->ops->is_smm(ctxt))
2322 if (ctxt->ops->leave_smm(ctxt))
2323 ctxt->ops->triple_fault(ctxt);
2359 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2365 const struct x86_emulate_ops *ops = ctxt->ops;
2377 ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, true);
2400 const struct x86_emulate_ops *ops = ctxt->ops;
2414 ops->get_msr(ctxt, MSR_EFER, &efer);
2419 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2428 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2429 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2436 ops->get_msr(ctxt,
2441 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
2447 ops->get_msr(ctxt, MSR_STAR, &msr_data);
2459 const struct x86_emulate_ops *ops = ctxt->ops;
2465 ops->get_msr(ctxt, MSR_EFER, &efer);
2482 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2495 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2496 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2498 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
2501 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
2512 const struct x86_emulate_ops *ops = ctxt->ops;
2535 ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
2560 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2561 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2578 return ctxt->ops->cpl(ctxt) > iopl;
2587 const struct x86_emulate_ops *ops = ctxt->ops;
2603 ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR);
2612 r = ops->read_std(ctxt, base + 102, &io_bitmap_ptr, 2, NULL, true);
2617 r = ops->read_std(ctxt, base + io_bitmap_ptr + port/8, &perm, 2, NULL, true);
2805 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
2922 const struct x86_emulate_ops *ops = ctxt->ops;
2927 ops->get_cached_segment_base(ctxt, VCPU_SREG_TR);
2962 if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl)
3003 ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS);
3004 ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR);
3013 ops->get_dr(ctxt, 7, &dr7);
3014 ops->set_dr(ctxt, 7, dr7 & ~(DR_LOCAL_ENABLE_MASK | DR_LOCAL_SLOWDOWN));
3145 const struct x86_emulate_ops *ops = ctxt->ops;
3146 int cpl = ctxt->ops->cpl(ctxt);
3150 ops->get_segment(ctxt, &old_cs, &old_desc, NULL, VCPU_SREG_CS);
3177 ops->set_segment(ctxt, old_cs, &old_desc, 0, VCPU_SREG_CS);
3230 if (!ctxt->ops->guest_has_rdpid(ctxt))
3233 ctxt->ops->get_msr(ctxt, MSR_TSC_AUX, &tsc_aux);
3242 ctxt->ops->get_msr(ctxt, MSR_IA32_TSC, &tsc);
3252 if (ctxt->ops->read_pmc(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &pmc))
3269 if (!ctxt->ops->guest_has_movbe(ctxt))
3303 if (ctxt->ops->set_cr(ctxt, cr_num, ctxt->src.val))
3332 if (ctxt->ops->set_dr(ctxt, ctxt->modrm_reg, val) < 0)
3348 r = ctxt->ops->set_msr_with_filter(ctxt, msr_index, msr_data);
3362 r = ctxt->ops->get_msr_with_filter(ctxt, msr_index, &msr_data);
3377 (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3378 ctxt->ops->cpl(ctxt) > 0)
3445 ctxt->ops->invlpg(ctxt, linear);
3455 cr0 = ctxt->ops->get_cr(ctxt, 0);
3457 ctxt->ops->set_cr(ctxt, 0, cr0);
3463 int rc = ctxt->ops->fix_hypercall(ctxt);
3481 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3482 ctxt->ops->cpl(ctxt) > 0)
3500 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_gdt);
3505 return emulate_store_desc_ptr(ctxt, ctxt->ops->get_idt);
3524 ctxt->ops->set_gdt(ctxt, &desc_ptr);
3526 ctxt->ops->set_idt(ctxt, &desc_ptr);
3544 if ((ctxt->ops->get_cr(ctxt, 4) & X86_CR4_UMIP) &&
3545 ctxt->ops->cpl(ctxt) > 0)
3550 ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0);
3556 ctxt->ops->set_cr(ctxt, 0, (ctxt->ops->get_cr(ctxt, 0) & ~0x0eul)
3595 ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val,
3626 ctxt->ops->get_msr(ctxt, MSR_MISC_FEATURES_ENABLES, &msr);
3628 ctxt->ops->cpl(ctxt)) {
3634 ctxt->ops->get_cpuid(ctxt, &eax, &ebx, &ecx, &edx, false);
3697 if (!ctxt->ops->guest_has_fxsr(ctxt))
3700 if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
3728 cr4_osfxsr = ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR;
3833 if (!(ctxt->ops->get_cr(ctxt, 4) & X86_CR4_OSXSAVE))
3840 if (ctxt->ops->set_xcr(ctxt, ecx, ((u64)edx << 32) | eax))
3870 ctxt->ops->get_dr(ctxt, 7, &dr7);
3883 cr4 = ctxt->ops->get_cr(ctxt, 4);
3890 ctxt->ops->get_dr(ctxt, 6, &dr6);
3893 ctxt->ops->set_dr(ctxt, 6, dr6);
3915 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
3936 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
3938 if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt))
3946 u64 cr4 = ctxt->ops->get_cr(ctxt, 4);
3961 if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) ||
3962 ctxt->ops->check_pmc(ctxt, rcx))
4790 ctxt->ops->get_segment(ctxt, &dummy, &desc, NULL, VCPU_SREG_CS);
5139 const struct x86_emulate_ops *ops = ctxt->ops;
5142 bool is_guest_mode = ctxt->ops->is_guest_mode(ctxt);
5165 if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
5166 || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
5171 if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
5204 if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
5337 ctxt->ops->halt(ctxt);
5438 (ctxt->ops->wbinvd)(ctxt);
5446 ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg);
5449 ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val);