Lines Matching refs:vgic_cpu

29  *       vgic_cpu->ap_list_lock		must be taken with IRQs disabled
46 * raw_spin_lock(vcpuX->arch.vgic_cpu.ap_list_lock);
47 * raw_spin_lock(vcpuY->arch.vgic_cpu.ap_list_lock);
96 return &vcpu->arch.vgic_cpu.private_irqs[intid];
153 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
157 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
159 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
169 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags);
300 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
302 lockdep_assert_held(&vgic_cpu->ap_list_lock);
304 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp);
380 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
397 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock,
409 list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head);
413 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
621 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
627 raw_spin_lock(&vgic_cpu->ap_list_lock);
629 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
668 raw_spin_unlock(&vgic_cpu->ap_list_lock);
682 raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock);
683 raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock,
697 struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu;
706 raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock);
707 raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock);
717 raw_spin_unlock(&vgic_cpu->ap_list_lock);
760 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
766 lockdep_assert_held(&vgic_cpu->ap_list_lock);
768 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
785 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
792 lockdep_assert_held(&vgic_cpu->ap_list_lock);
800 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
826 &vgic_cpu->ap_list_head))
837 vcpu->arch.vgic_cpu.vgic_v2.used_lrs = count;
839 vcpu->arch.vgic_cpu.vgic_v3.used_lrs = count;
857 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3);
866 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head))
873 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs;
875 used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
887 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3);
905 if (list_empty(&vcpu->arch.vgic_cpu.ap_list_head) &&
911 if (!list_empty(&vcpu->arch.vgic_cpu.ap_list_head)) {
912 raw_spin_lock(&vcpu->arch.vgic_cpu.ap_list_lock);
914 raw_spin_unlock(&vcpu->arch.vgic_cpu.ap_list_lock);
956 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
965 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last)
970 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
972 list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
983 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags);