Lines Matching refs:vgic_cpu
31 * vgic_cpu->ap_list_lock must be taken with IRQs disabled
48 * raw_spin_lock(vcpuX->arch.vgic_cpu.ap_list_lock);
49 * raw_spin_lock(vcpuY->arch.vgic_cpu.ap_list_lock);
98 return &vcpu->arch.vgic_cpu.private_irqs[intid];
154 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
158 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
160 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
170 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags);
301 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
303 lockdep_assert_held(&vgic_cpu->ap_list_lock);
305 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp);
381 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
398 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock,
410 list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head);
414 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
637 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
643 raw_spin_lock(&vgic_cpu->ap_list_lock);
645 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
684 raw_spin_unlock(&vgic_cpu->ap_list_lock);
698 raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock);
699 raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock,
713 struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu;
722 raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock);
723 raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock);
733 raw_spin_unlock(&vgic_cpu->ap_list_lock);
776 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
782 lockdep_assert_held(&vgic_cpu->ap_list_lock);
784 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
801 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
808 lockdep_assert_held(&vgic_cpu->ap_list_lock);
816 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
842 &vgic_cpu->ap_list_head))
853 vcpu->arch.vgic_cpu.vgic_v2.used_lrs = count;
855 vcpu->arch.vgic_cpu.vgic_v3.used_lrs = count;
873 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3);
882 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head))
889 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs;
891 used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
903 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3);
921 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head) &&
927 if (!list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) {
928 raw_spin_lock(&vcpu->arch.vgic_cpu.ap_list_lock);
930 raw_spin_unlock(&vcpu->arch.vgic_cpu.ap_list_lock);
975 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
984 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last)
989 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
991 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
1002 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags);