Lines Matching refs:reg
110 u64 eventsel, reg;
117 reg = PMEVTYPER0_EL0 + select_idx;
118 eventsel = __vcpu_sys_reg(vcpu, reg) & kvm_pmu_event_mask(vcpu->kvm);
131 u64 counter, counter_high, reg, enabled, running;
135 reg = PMEVCNTR0_EL0 + pmc->idx;
137 counter = __vcpu_sys_reg(vcpu, reg);
138 counter_high = __vcpu_sys_reg(vcpu, reg + 1);
142 reg = (pmc->idx == ARMV8_PMU_CYCLE_IDX)
144 counter = __vcpu_sys_reg(vcpu, reg);
188 u64 reg;
190 reg = (select_idx == ARMV8_PMU_CYCLE_IDX)
192 __vcpu_sys_reg(vcpu, reg) += (s64)val - kvm_pmu_get_counter_value(vcpu, select_idx);
220 u64 counter, reg, val;
229 reg = PMCCNTR_EL0;
232 reg = PMEVCNTR0_EL0 + pmc->idx;
236 __vcpu_sys_reg(vcpu, reg) = val;
239 __vcpu_sys_reg(vcpu, reg + 1) = upper_32_bits(counter);
370 u64 reg = 0;
373 reg = __vcpu_sys_reg(vcpu, PMOVSSET_EL0);
374 reg &= __vcpu_sys_reg(vcpu, PMCNTENSET_EL0);
375 reg &= __vcpu_sys_reg(vcpu, PMINTENSET_EL1);
376 reg &= kvm_pmu_valid_counter_mask(vcpu);
379 return reg;
527 u64 type, reg;
539 reg = __vcpu_sys_reg(vcpu, PMEVCNTR0_EL0 + i) + 1;
540 reg = lower_32_bits(reg);
541 __vcpu_sys_reg(vcpu, PMEVCNTR0_EL0 + i) = reg;
543 if (reg) /* no overflow on the low part */
548 reg = __vcpu_sys_reg(vcpu, PMEVCNTR0_EL0 + i + 1) + 1;
549 reg = lower_32_bits(reg);
550 __vcpu_sys_reg(vcpu, PMEVCNTR0_EL0 + i + 1) = reg;
551 if (!reg) /* mark overflow on the high counter */
604 u64 eventsel, counter, reg, data;
613 reg = (pmc->idx == ARMV8_PMU_CYCLE_IDX)
615 data = __vcpu_sys_reg(vcpu, reg);
728 u64 reg, mask;
734 reg = (select_idx == ARMV8_PMU_CYCLE_IDX)
737 __vcpu_sys_reg(vcpu, reg) = data & mask;