Home
last modified time | relevance | path

Searched refs:ARMV8_PMU_CYCLE_IDX (Results 1 - 8 of 8) sorted by relevance

/kernel/linux/linux-5.10/arch/arm64/kvm/
H A Dpmu-emul.c45 return (select_idx == ARMV8_PMU_CYCLE_IDX && in kvm_pmu_idx_is_64bit()
114 if (select_idx == ARMV8_PMU_CYCLE_IDX) in kvm_pmu_idx_has_chain_evtype()
142 reg = (pmc->idx == ARMV8_PMU_CYCLE_IDX) in kvm_pmu_get_pair_counter_value()
174 else if (select_idx != ARMV8_PMU_CYCLE_IDX) in kvm_pmu_get_counter_value()
190 reg = (select_idx == ARMV8_PMU_CYCLE_IDX) in kvm_pmu_set_counter_value()
228 if (pmc->idx == ARMV8_PMU_CYCLE_IDX) { in kvm_pmu_stop_counter()
296 return BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_valid_counter_mask()
298 return GENMASK(val - 1, 0) | BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_valid_counter_mask()
526 for (i = 0; i < ARMV8_PMU_CYCLE_IDX; i++) { in kvm_pmu_software_increment()
578 kvm_pmu_set_counter_value(vcpu, ARMV8_PMU_CYCLE_IDX, in kvm_pmu_handle_pmcr()
[all...]
H A Dpmu.c103 * or PMCCFILTR_EL0 where idx is ARMV8_PMU_CYCLE_IDX (31).
109 case ARMV8_PMU_CYCLE_IDX: in kvm_vcpu_pmu_read_evtype_direct()
120 * or PMCCFILTR_EL0 where idx is ARMV8_PMU_CYCLE_IDX (31).
126 case ARMV8_PMU_CYCLE_IDX: in kvm_vcpu_pmu_write_evtype_direct()
H A Dsys_regs.c795 if (idx >= val && idx != ARMV8_PMU_CYCLE_IDX) { in pmu_counter_idx_valid()
825 idx = ARMV8_PMU_CYCLE_IDX; in access_pmu_evcntr()
834 idx = ARMV8_PMU_CYCLE_IDX; in access_pmu_evcntr()
877 if (idx == ARMV8_PMU_CYCLE_IDX) in access_pmu_evtyper()
/kernel/linux/linux-6.6/arch/arm64/kvm/
H A Dpmu-emul.c69 return (pmc->idx == ARMV8_PMU_CYCLE_IDX || in kvm_pmc_is_64bit()
77 return (pmc->idx < ARMV8_PMU_CYCLE_IDX && (val & ARMV8_PMU_PMCR_LP)) || in kvm_pmc_has_64bit_overflow()
78 (pmc->idx == ARMV8_PMU_CYCLE_IDX && (val & ARMV8_PMU_PMCR_LC)); in kvm_pmc_has_64bit_overflow()
83 return (!(pmc->idx & 1) && (pmc->idx + 1) < ARMV8_PMU_CYCLE_IDX && in kvm_pmu_counter_can_chain()
89 return (idx == ARMV8_PMU_CYCLE_IDX) ? PMCCNTR_EL0 : PMEVCNTR0_EL0 + idx; in counter_index_to_reg()
94 return (idx == ARMV8_PMU_CYCLE_IDX) ? PMCCFILTR_EL0 : PMEVTYPER0_EL0 + idx; in counter_index_to_evtreg()
141 if (vcpu_mode_is_32bit(vcpu) && pmc->idx != ARMV8_PMU_CYCLE_IDX && in kvm_pmu_set_pmc_value()
257 return BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_valid_counter_mask()
259 return GENMASK(val - 1, 0) | BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_valid_counter_mask()
435 for_each_set_bit(i, &mask, ARMV8_PMU_CYCLE_IDX) { in kvm_pmu_counter_increment()
[all...]
H A Dpmu.c109 * or PMCCFILTR_EL0 where idx is ARMV8_PMU_CYCLE_IDX (31).
115 case ARMV8_PMU_CYCLE_IDX: in kvm_vcpu_pmu_read_evtype_direct()
126 * or PMCCFILTR_EL0 where idx is ARMV8_PMU_CYCLE_IDX (31).
132 case ARMV8_PMU_CYCLE_IDX: in kvm_vcpu_pmu_write_evtype_direct()
H A Dsys_regs.c722 u64 n, mask = BIT(ARMV8_PMU_CYCLE_IDX); in reset_pmu_reg()
884 if (idx >= val && idx != ARMV8_PMU_CYCLE_IDX) { in pmu_counter_idx_valid()
899 idx = ARMV8_PMU_CYCLE_IDX; in get_pmu_evcntr()
927 idx = ARMV8_PMU_CYCLE_IDX; in access_pmu_evcntr()
934 idx = ARMV8_PMU_CYCLE_IDX; in access_pmu_evcntr()
975 if (idx == ARMV8_PMU_CYCLE_IDX) in access_pmu_evtyper()
/kernel/linux/linux-5.10/include/kvm/
H A Darm_pmu.h13 #define ARMV8_PMU_CYCLE_IDX (ARMV8_PMU_MAX_COUNTERS - 1) macro
/kernel/linux/linux-6.6/include/kvm/
H A Darm_pmu.h13 #define ARMV8_PMU_CYCLE_IDX (ARMV8_PMU_MAX_COUNTERS - 1) macro

Completed in 12 milliseconds