/kernel/linux/linux-6.6/tools/testing/selftests/kvm/x86_64/ |
H A D | pmu_event_filter_test.c | 160 check_msr(MSR_CORE_PERF_GLOBAL_CTRL, 1); in intel_guest_code() 166 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0); in intel_guest_code() 171 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0x3); in intel_guest_code() 523 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0); in intel_masked_events_guest_code() 532 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0x7); in intel_masked_events_guest_code() 815 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0); in intel_run_fixed_counter_guest_code() 820 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, in intel_run_fixed_counter_guest_code() 823 wrmsr(MSR_CORE_PERF_GLOBAL_CTRL, 0); in intel_run_fixed_counter_guest_code()
|
/kernel/linux/linux-5.10/arch/x86/kvm/vmx/ |
H A D | pmu_intel.c | 188 case MSR_CORE_PERF_GLOBAL_CTRL: in intel_is_valid_msr() 227 case MSR_CORE_PERF_GLOBAL_CTRL: in intel_pmu_get_msr() 276 case MSR_CORE_PERF_GLOBAL_CTRL: in intel_pmu_set_msr()
|
H A D | nested.c | 2638 WARN_ON_ONCE(kvm_set_msr(vcpu, MSR_CORE_PERF_GLOBAL_CTRL, in prepare_vmcs02() 4288 WARN_ON_ONCE(kvm_set_msr(vcpu, MSR_CORE_PERF_GLOBAL_CTRL, in load_vmcs12_host_state() 4792 if (kvm_x86_ops.pmu_ops->is_valid_msr(vcpu, MSR_CORE_PERF_GLOBAL_CTRL)) { in nested_vmx_pmu_entry_exit_ctls_update()
|
H A D | vmx.c | 988 case MSR_CORE_PERF_GLOBAL_CTRL: in clear_atomic_switch_msr() 1044 case MSR_CORE_PERF_GLOBAL_CTRL: in add_atomic_switch_msr()
|
/kernel/linux/linux-5.10/arch/x86/events/zhaoxin/ |
H A D | core.c | 257 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in zhaoxin_pmu_disable_all() 262 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, x86_pmu.intel_ctrl); in zhaoxin_pmu_enable_all()
|
/kernel/linux/linux-5.10/arch/x86/xen/ |
H A D | pmu.c | 154 case MSR_CORE_PERF_GLOBAL_CTRL: in is_intel_pmu_msr() 212 case MSR_CORE_PERF_GLOBAL_CTRL: in xen_intel_pmu_emulate()
|
/kernel/linux/linux-6.6/arch/x86/events/zhaoxin/ |
H A D | core.c | 257 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in zhaoxin_pmu_disable_all() 262 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, x86_pmu.intel_ctrl); in zhaoxin_pmu_enable_all()
|
/kernel/linux/linux-6.6/arch/x86/xen/ |
H A D | pmu.c | 163 case MSR_CORE_PERF_GLOBAL_CTRL: in is_intel_pmu_msr() 221 case MSR_CORE_PERF_GLOBAL_CTRL: in xen_intel_pmu_emulate()
|
/kernel/linux/linux-6.6/arch/x86/kvm/ |
H A D | pmu.c | 557 case MSR_CORE_PERF_GLOBAL_CTRL: in kvm_pmu_is_valid_msr() 587 case MSR_CORE_PERF_GLOBAL_CTRL: in kvm_pmu_get_msr() 630 case MSR_CORE_PERF_GLOBAL_CTRL: in kvm_pmu_set_msr()
|
H A D | x86.c | 1473 MSR_CORE_PERF_GLOBAL_CTRL, MSR_CORE_PERF_GLOBAL_OVF_CTRL,
|
/kernel/linux/linux-6.6/tools/testing/selftests/kvm/lib/x86_64/ |
H A D | vmx.c | 269 rdmsr(MSR_CORE_PERF_GLOBAL_CTRL)); in init_vmcs_host_state()
|
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/x86_64/ |
H A D | vmx.c | 284 rdmsr(MSR_CORE_PERF_GLOBAL_CTRL)); in init_vmcs_host_state()
|
/kernel/linux/linux-5.10/arch/x86/events/intel/ |
H A D | core.c | 1973 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in __intel_pmu_disable_all() 1991 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, in __intel_pmu_enable_all() 2039 * 1) Clear MSR_IA32_PEBS_ENABLE and MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround() 2042 * 3) set bit0~bit3 of MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround() 2043 * 4) Clear MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround() 2070 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0xf); in intel_pmu_nhm_workaround() 2071 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0x0); in intel_pmu_nhm_workaround() 2573 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in intel_pmu_reset() 3707 arr[0].msr = MSR_CORE_PERF_GLOBAL_CTRL; in intel_guest_get_msrs()
|
/kernel/linux/linux-6.6/arch/x86/events/intel/ |
H A D | core.c | 2214 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in __intel_pmu_disable_all() 2239 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, in __intel_pmu_enable_all() 2328 * 1) Clear MSR_IA32_PEBS_ENABLE and MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround() 2331 * 3) set bit0~bit3 of MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround() 2332 * 4) Clear MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround() 2359 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0xf); in intel_pmu_nhm_workaround() 2360 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0x0); in intel_pmu_nhm_workaround() 2907 wrmsrl(MSR_CORE_PERF_GLOBAL_CTRL, 0); in intel_pmu_reset() 4073 .msr = MSR_CORE_PERF_GLOBAL_CTRL, in intel_guest_get_msrs()
|
/kernel/linux/linux-5.10/arch/x86/include/asm/ |
H A D | msr-index.h | 948 #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f macro
|
/kernel/linux/linux-5.10/tools/arch/x86/include/asm/ |
H A D | msr-index.h | 914 #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f macro
|
/kernel/linux/linux-6.6/arch/x86/include/asm/ |
H A D | msr-index.h | 1063 #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f macro
|
/kernel/linux/linux-6.6/tools/arch/x86/include/asm/ |
H A D | msr-index.h | 1049 #define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f macro
|
/kernel/linux/linux-5.10/arch/x86/events/ |
H A D | core.c | 1502 rdmsrl(MSR_CORE_PERF_GLOBAL_CTRL, ctrl); in perf_event_print_debug()
|
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/ |
H A D | nested.c | 2652 WARN_ON_ONCE(kvm_set_msr(vcpu, MSR_CORE_PERF_GLOBAL_CTRL, in prepare_vmcs02() 4527 WARN_ON_ONCE(kvm_set_msr(vcpu, MSR_CORE_PERF_GLOBAL_CTRL, in load_vmcs12_host_state()
|
H A D | vmx.c | 1001 case MSR_CORE_PERF_GLOBAL_CTRL: in clear_atomic_switch_msr() 1057 case MSR_CORE_PERF_GLOBAL_CTRL: in add_atomic_switch_msr()
|
/kernel/linux/linux-6.6/arch/x86/events/ |
H A D | core.c | 1537 rdmsrl(MSR_CORE_PERF_GLOBAL_CTRL, ctrl); in perf_event_print_debug()
|
/kernel/linux/linux-5.10/arch/x86/kvm/ |
H A D | x86.c | 1245 MSR_CORE_PERF_GLOBAL_CTRL, MSR_CORE_PERF_GLOBAL_OVF_CTRL,
|