Home
last modified time | relevance | path

Searched refs:eventsel (Results 1 - 25 of 28) sorted by relevance

12

/kernel/linux/linux-5.10/arch/x86/kvm/
H A Dpmu.c181 void reprogram_gp_counter(struct kvm_pmc *pmc, u64 eventsel) in reprogram_gp_counter() argument
190 if (eventsel & ARCH_PERFMON_EVENTSEL_PIN_CONTROL) in reprogram_gp_counter()
193 pmc->eventsel = eventsel; in reprogram_gp_counter()
197 if (!(eventsel & ARCH_PERFMON_EVENTSEL_ENABLE) || !pmc_is_enabled(pmc)) in reprogram_gp_counter()
202 __u64 key = eventsel & AMD64_RAW_EVENT_MASK_NB; in reprogram_gp_counter()
213 if (!(eventsel & (ARCH_PERFMON_EVENTSEL_EDGE | in reprogram_gp_counter()
224 config = eventsel & pmu->raw_event_mask; in reprogram_gp_counter()
226 if (pmc->current_config == eventsel && pmc_resume_counter(pmc)) in reprogram_gp_counter()
231 pmc->current_config = eventsel; in reprogram_gp_counter()
[all...]
H A Dpmu.h21 u8 eventsel; member
142 void reprogram_gp_counter(struct kvm_pmc *pmc, u64 eventsel);
/kernel/linux/linux-6.6/arch/x86/kvm/
H A Dpmu.c319 static bool filter_contains_match(u64 *events, u64 nevents, u64 eventsel) in filter_contains_match() argument
321 u64 event_select = eventsel & kvm_pmu_ops.EVENTSEL_EVENT; in filter_contains_match()
322 u64 umask = eventsel & ARCH_PERFMON_EVENTSEL_UMASK; in filter_contains_match()
353 u64 eventsel) in is_gp_event_allowed()
355 if (filter_contains_match(f->includes, f->nr_includes, eventsel) && in is_gp_event_allowed()
356 !filter_contains_match(f->excludes, f->nr_excludes, eventsel)) in is_gp_event_allowed()
387 return is_gp_event_allowed(filter, pmc->eventsel); in check_pmu_event_filter()
402 u64 eventsel = pmc->eventsel; in reprogram_counter() local
403 u64 new_config = eventsel; in reprogram_counter()
352 is_gp_event_allowed(struct kvm_x86_pmu_event_filter *f, u64 eventsel) is_gp_event_allowed() argument
[all...]
H A Dpmu.h158 return pmc->eventsel & ARCH_PERFMON_EVENTSEL_ENABLE; in pmc_speculative_in_use()
/kernel/linux/linux-5.10/arch/x86/kvm/svm/
H A Dpmu.c148 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in amd_pmc_perf_hw_id()
149 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in amd_pmc_perf_hw_id()
158 if (event_mapping[i].eventsel == event_select in amd_pmc_perf_hw_id()
255 msr_info->data = pmc->eventsel; in amd_pmu_get_msr()
279 if (data != pmc->eventsel) in amd_pmu_set_msr()
331 pmc->counter = pmc->eventsel = 0; in amd_pmu_reset()
/kernel/linux/linux-6.6/arch/x86/kvm/vmx/
H A Dpmu_intel.c51 u8 eventsel; member
107 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in intel_hw_event_available()
108 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in intel_hw_event_available()
118 if (intel_arch_events[i].eventsel != event_select || in intel_hw_event_available()
380 msr_info->data = pmc->eventsel; in intel_pmu_get_msr()
454 if (data != pmc->eventsel) { in intel_pmu_set_msr()
455 pmc->eventsel = data; in intel_pmu_set_msr()
480 pmc->eventsel = (intel_arch_events[event].unit_mask << 8) | in setup_fixed_pmc_eventsel()
481 intel_arch_events[event].eventsel; in setup_fixed_pmc_eventsel()
/kernel/linux/linux-5.10/arch/x86/kvm/vmx/
H A Dpmu_intel.c74 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in intel_pmc_perf_hw_id()
75 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in intel_pmc_perf_hw_id()
79 if (intel_arch_events[i].eventsel == event_select in intel_pmc_perf_hw_id()
246 msr_info->data = pmc->eventsel; in intel_pmu_get_msr()
313 if (data == pmc->eventsel) in intel_pmu_set_msr()
435 pmc->counter = pmc->eventsel = 0; in intel_pmu_reset()
/kernel/linux/linux-5.10/arch/arm64/kvm/
H A Dpmu-emul.c110 u64 eventsel, reg; in kvm_pmu_idx_has_chain_evtype() local
118 eventsel = __vcpu_sys_reg(vcpu, reg) & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_idx_has_chain_evtype()
120 return eventsel == ARMV8_PMUV3_PERFCTR_CHAIN; in kvm_pmu_idx_has_chain_evtype()
604 u64 eventsel, counter, reg, data; in kvm_pmu_create_perf_event() local
619 eventsel = ARMV8_PMUV3_PERFCTR_CPU_CYCLES; in kvm_pmu_create_perf_event()
621 eventsel = data & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_create_perf_event()
624 if (eventsel == ARMV8_PMUV3_PERFCTR_SW_INCR) in kvm_pmu_create_perf_event()
632 !test_bit(eventsel, vcpu->kvm->arch.pmu_filter)) in kvm_pmu_create_perf_event()
644 attr.config = eventsel; in kvm_pmu_create_perf_event()
/kernel/linux/linux-6.6/arch/x86/kvm/svm/
H A Dpmu.c146 msr_info->data = pmc->eventsel; in amd_pmu_get_msr()
171 if (data != pmc->eventsel) { in amd_pmu_set_msr()
172 pmc->eventsel = data; in amd_pmu_set_msr()
/kernel/linux/linux-6.6/arch/arm64/kvm/
H A Dpmu-emul.c586 u64 eventsel, reg, data; in kvm_pmu_create_perf_event() local
593 eventsel = ARMV8_PMUV3_PERFCTR_CPU_CYCLES; in kvm_pmu_create_perf_event()
595 eventsel = data & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_create_perf_event()
601 if (eventsel == ARMV8_PMUV3_PERFCTR_SW_INCR || in kvm_pmu_create_perf_event()
602 eventsel == ARMV8_PMUV3_PERFCTR_CHAIN) in kvm_pmu_create_perf_event()
610 !test_bit(eventsel, vcpu->kvm->arch.pmu_filter)) in kvm_pmu_create_perf_event()
622 attr.config = eventsel; in kvm_pmu_create_perf_event()
/kernel/linux/linux-5.10/arch/x86/events/amd/
H A Dcore.c281 static inline int amd_pmu_addr_offset(int index, bool eventsel) in amd_pmu_addr_offset() argument
288 if (eventsel) in amd_pmu_addr_offset()
301 if (eventsel) in amd_pmu_addr_offset()
917 .eventsel = MSR_K7_EVNTSEL0,
957 x86_pmu.eventsel = MSR_F15H_PERF_CTL; in amd_core_pmu_init()
/kernel/linux/linux-6.6/arch/x86/events/amd/
H A Dcore.c286 static inline int amd_pmu_addr_offset(int index, bool eventsel) in amd_pmu_addr_offset() argument
293 if (eventsel) in amd_pmu_addr_offset()
306 if (eventsel) in amd_pmu_addr_offset()
1251 .eventsel = MSR_K7_EVNTSEL0,
1353 x86_pmu.eventsel = MSR_F15H_PERF_CTL; in amd_core_pmu_init()
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Ddf_v3_6.c462 uint32_t eventsel, instance, unitmask; in df_v3_6_pmc_get_ctrl_settings() local
473 eventsel = DF_V3_6_GET_EVENT(config) & 0x3f; in df_v3_6_pmc_get_ctrl_settings()
481 *lo_val = (unitmask << 8) | (instance_10 << 6) | eventsel; in df_v3_6_pmc_get_ctrl_settings()
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Ddf_v3_6.c412 uint32_t eventsel, instance, unitmask; in df_v3_6_pmc_get_ctrl_settings() local
424 eventsel = DF_V3_6_GET_EVENT(config) & 0x3f; in df_v3_6_pmc_get_ctrl_settings()
432 *lo_val = (unitmask << 8) | (instance_10 << 6) | eventsel; in df_v3_6_pmc_get_ctrl_settings()
/kernel/linux/linux-5.10/arch/x86/events/
H A Dperf_event.h649 unsigned eventsel; member
651 int (*addr_offset)(int index, bool eventsel);
946 return x86_pmu.eventsel + (x86_pmu.addr_offset ? in x86_pmu_config_addr()
/kernel/linux/linux-5.10/arch/x86/events/intel/
H A Dp6.c210 .eventsel = MSR_P6_EVNTSEL0,
H A Dknc.c299 .eventsel = MSR_KNC_EVNTSEL0,
H A Dp4.c1309 .eventsel = MSR_P4_BPU_CCCR0,
/kernel/linux/linux-6.6/arch/x86/events/intel/
H A Dp6.c210 .eventsel = MSR_P6_EVNTSEL0,
H A Dknc.c299 .eventsel = MSR_KNC_EVNTSEL0,
H A Dp4.c1345 .eventsel = MSR_P4_BPU_CCCR0,
/kernel/linux/linux-6.6/arch/x86/events/
H A Dperf_event.h758 unsigned eventsel; member
760 int (*addr_offset)(int index, bool eventsel);
1098 return x86_pmu.eventsel + (x86_pmu.addr_offset ? in x86_pmu_config_addr()
/kernel/linux/linux-5.10/arch/x86/events/zhaoxin/
H A Dcore.c468 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
/kernel/linux/linux-6.6/arch/x86/events/zhaoxin/
H A Dcore.c468 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0,
/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dkvm_host.h420 u64 eventsel; member
424 * eventsel value for general purpose counters,

Completed in 34 milliseconds

12