/kernel/linux/linux-5.10/arch/x86/realmode/rm/ |
H A D | wakeup_asm.S | 53 orb $X86_CR0_PE, %al 63 andb $~X86_CR0_PE, %al
|
H A D | trampoline_64.S | 73 movl $X86_CR0_PE, %eax # protected mode (PE) bit 152 movl $(X86_CR0_PG | X86_CR0_WP | X86_CR0_PE), %eax
|
/kernel/linux/linux-6.6/arch/x86/realmode/rm/ |
H A D | wakeup_asm.S | 53 orb $X86_CR0_PE, %al 63 andb $~X86_CR0_PE, %al
|
/kernel/linux/linux-6.6/arch/x86/platform/pvh/ |
H A D | head.S | 88 mov $(X86_CR0_PG | X86_CR0_PE), %eax 119 or $(X86_CR0_PG | X86_CR0_PE), %eax
|
/kernel/linux/linux-5.10/arch/x86/platform/pvh/ |
H A D | head.S | 88 mov $(X86_CR0_PG | X86_CR0_PE), %eax 129 or $(X86_CR0_PG | X86_CR0_PE), %eax
|
/kernel/linux/linux-5.10/arch/x86/include/uapi/asm/ |
H A D | processor-flags.h | 52 #define X86_CR0_PE _BITUL(X86_CR0_PE_BIT) macro 162 #define CR0_STATE (X86_CR0_PE | X86_CR0_MP | X86_CR0_ET | \
|
/kernel/linux/linux-6.6/arch/x86/include/uapi/asm/ |
H A D | processor-flags.h | 52 #define X86_CR0_PE _BITUL(X86_CR0_PE_BIT) macro 170 #define CR0_STATE (X86_CR0_PE | X86_CR0_MP | X86_CR0_ET | \
|
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/asm-x86/asm/ |
H A D | processor-flags.h | 59 #define X86_CR0_PE _BITUL(X86_CR0_PE_BIT) macro 146 #define CR0_STATE (X86_CR0_PE | X86_CR0_MP | X86_CR0_ET | X86_CR0_NE | X86_CR0_WP | X86_CR0_AM | X86_CR0_PG)
|
/kernel/linux/patches/linux-6.6/prebuilts/usr/include/asm-x86/asm/ |
H A D | processor-flags.h | 59 #define X86_CR0_PE _BITUL(X86_CR0_PE_BIT) macro 146 #define CR0_STATE (X86_CR0_PE | X86_CR0_MP | X86_CR0_ET | X86_CR0_NE | X86_CR0_WP | X86_CR0_AM | X86_CR0_PG)
|
/kernel/linux/linux-5.10/arch/x86/boot/compressed/ |
H A D | head_64.S | 281 movl $(X86_CR0_PG | X86_CR0_PE), %eax /* Enable Paging and Protected mode */ 668 movl $(X86_CR0_PG | X86_CR0_PE), %eax 865 movl $(X86_CR0_PG | X86_CR0_PE), %ecx /* Enable Paging and Protected mode */
|
/kernel/linux/linux-6.6/arch/x86/kvm/ |
H A D | smm.c | 325 cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG); in enter_smm() 616 if (cr0 & X86_CR0_PE) in emulator_leave_smm() 617 kvm_set_cr0(vcpu, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in emulator_leave_smm()
|
H A D | x86.h | 140 return kvm_is_cr0_bit_set(vcpu, X86_CR0_PE); in is_protmode()
|
H A D | pmu.c | 538 kvm_is_cr0_bit_set(vcpu, X86_CR0_PE)) in kvm_pmu_rdpmc()
|
/kernel/linux/linux-5.10/arch/x86/kvm/vmx/ |
H A D | nested.h | 265 fixed0 &= ~(X86_CR0_PE | X86_CR0_PG); in nested_guest_cr0_valid()
|
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/ |
H A D | nested.h | 265 fixed0 &= ~(X86_CR0_PE | X86_CR0_PG); in nested_guest_cr0_valid()
|
/kernel/linux/linux-5.10/arch/x86/kernel/ |
H A D | relocate_kernel_64.S | 131 orl $(X86_CR0_PG | X86_CR0_PE), %eax
|
H A D | relocate_kernel_32.S | 118 orl $(X86_CR0_PE), %eax
|
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/include/x86_64/ |
H A D | processor.h | 380 #define X86_CR0_PE (1UL<<0) /* Protection Enable */ macro
|
/kernel/linux/linux-6.6/arch/x86/kernel/ |
H A D | relocate_kernel_64.S | 141 orl $(X86_CR0_PG | X86_CR0_PE), %eax
|
H A D | relocate_kernel_32.S | 118 orl $(X86_CR0_PE), %eax
|
/kernel/linux/linux-5.10/arch/x86/kvm/ |
H A D | x86.h | 82 return kvm_read_cr0_bits(vcpu, X86_CR0_PE); in is_protmode()
|
H A D | pmu.c | 379 (kvm_read_cr0(vcpu) & X86_CR0_PE)) in kvm_pmu_rdpmc()
|
/kernel/linux/linux-6.6/arch/x86/kvm/svm/ |
H A D | nested.c | 298 CC(!(save->cr0 & X86_CR0_PE)) || in __nested_vmcb_check_save() 1111 svm_set_cr0(vcpu, vmcb01->save.cr0 | X86_CR0_PE); in nested_svm_vmexit() 1724 !(save->cr0 & X86_CR0_PE) || in svm_set_nested_state()
|
/kernel/linux/linux-5.10/arch/x86/kvm/svm/ |
H A D | nested.c | 277 !(vmcb12->save.cr0 & X86_CR0_PE) || in nested_vmcb_check_save() 704 svm_set_cr0(&svm->vcpu, hsave->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
|
/kernel/linux/linux-5.10/arch/x86/include/asm/ |
H A D | kvm_host.h | 93 (~(unsigned long)(X86_CR0_PE | X86_CR0_MP | X86_CR0_EM | X86_CR0_TS \
|