Lines Matching refs:intid
47 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
53 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
72 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
77 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
81 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
99 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
105 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
120 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
125 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
128 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
169 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
174 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
177 if (irq->hw && vgic_irq_is_sgi(irq->intid) && irq->enabled)
191 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
196 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
212 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
217 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
233 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
239 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
253 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
300 return (vgic_irq_is_sgi(irq->intid) &&
308 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
313 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
323 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
350 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
355 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
401 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
406 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
416 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
444 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
449 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
486 static void vgic_access_active_prepare(struct kvm_vcpu *vcpu, u32 intid)
490 intid >= VGIC_NR_PRIVATE_IRQS)
495 static void vgic_access_active_finish(struct kvm_vcpu *vcpu, u32 intid)
499 intid >= VGIC_NR_PRIVATE_IRQS)
506 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
512 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
530 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
534 vgic_access_active_prepare(vcpu, intid);
538 vgic_access_active_finish(vcpu, intid);
569 if (irq->hw && !vgic_irq_is_sgi(irq->intid)) {
571 } else if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
598 active && vgic_irq_is_sgi(irq->intid))
612 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
616 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
626 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
629 vgic_access_active_prepare(vcpu, intid);
633 vgic_access_active_finish(vcpu, intid);
649 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
653 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
663 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
666 vgic_access_active_prepare(vcpu, intid);
670 vgic_access_active_finish(vcpu, intid);
685 u32 intid = VGIC_ADDR_TO_INTID(addr, 8);
690 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
711 u32 intid = VGIC_ADDR_TO_INTID(addr, 8);
716 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
721 if (irq->hw && vgic_irq_is_sgi(irq->intid))
732 u32 intid = VGIC_ADDR_TO_INTID(addr, 2);
737 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
752 u32 intid = VGIC_ADDR_TO_INTID(addr, 2);
765 if (intid + i < VGIC_NR_PRIVATE_IRQS)
768 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
781 u32 vgic_read_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid)
790 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs)
793 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
803 void vgic_write_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid,
814 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs)
817 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);