Lines Matching refs:arch
28 * kvm->arch.config_lock (mutex)
48 * raw_spin_lock(vcpuX->arch.vgic_cpu.ap_list_lock);
49 * raw_spin_lock(vcpuY->arch.vgic_cpu.ap_list_lock);
62 struct vgic_dist *dist = &kvm->arch.vgic;
98 return &vcpu->arch.vgic_cpu.private_irqs[intid];
102 if (intid < (kvm->arch.vgic.nr_spis + VGIC_NR_PRIVATE_IRQS)) {
103 intid = array_index_nospec(intid, kvm->arch.vgic.nr_spis + VGIC_NR_PRIVATE_IRQS);
104 return &kvm->arch.vgic.spis[intid - VGIC_NR_PRIVATE_IRQS];
128 struct vgic_dist *dist = &kvm->arch.vgic;
141 struct vgic_dist *dist = &kvm->arch.vgic;
154 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
232 !irq->target_vcpu->kvm->arch.vgic.enabled))
301 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
381 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
398 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock,
410 list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head);
414 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
637 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
698 raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock);
699 raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock,
713 struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu;
722 raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock);
723 raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock);
776 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
801 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
853 vcpu->arch.vgic_cpu.vgic_v2.used_lrs = count;
855 vcpu->arch.vgic_cpu.vgic_v3.used_lrs = count;
873 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3);
882 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head))
889 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs;
891 used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
903 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3);
921 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head) &&
927 if (!list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) {
928 raw_spin_lock(&vcpu->arch.vgic_cpu.ap_list_lock);
930 raw_spin_unlock(&vcpu->arch.vgic_cpu.ap_list_lock);
975 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
981 if (!vcpu->kvm->arch.vgic.enabled)
984 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last)