Lines Matching refs:intid

47 	u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
53 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
72 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
77 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
81 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
99 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
105 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
120 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
125 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
128 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
169 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
174 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
177 if (irq->hw && vgic_irq_is_sgi(irq->intid) && irq->enabled)
191 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
196 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
212 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
217 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
233 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
239 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
244 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
281 return (vgic_irq_is_sgi(irq->intid) &&
289 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
294 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
304 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
331 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
336 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
382 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
387 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
397 if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
425 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
430 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
466 static void vgic_access_active_prepare(struct kvm_vcpu *vcpu, u32 intid)
469 intid >= VGIC_NR_PRIVATE_IRQS)
474 static void vgic_access_active_finish(struct kvm_vcpu *vcpu, u32 intid)
477 intid >= VGIC_NR_PRIVATE_IRQS)
484 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
490 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
508 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
512 vgic_access_active_prepare(vcpu, intid);
516 vgic_access_active_finish(vcpu, intid);
547 if (irq->hw && !vgic_irq_is_sgi(irq->intid)) {
549 } else if (irq->hw && vgic_irq_is_sgi(irq->intid)) {
576 active && vgic_irq_is_sgi(irq->intid))
590 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
594 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
604 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
607 vgic_access_active_prepare(vcpu, intid);
611 vgic_access_active_finish(vcpu, intid);
627 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
631 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
641 u32 intid = VGIC_ADDR_TO_INTID(addr, 1);
644 vgic_access_active_prepare(vcpu, intid);
648 vgic_access_active_finish(vcpu, intid);
663 u32 intid = VGIC_ADDR_TO_INTID(addr, 8);
668 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
689 u32 intid = VGIC_ADDR_TO_INTID(addr, 8);
694 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
699 if (irq->hw && vgic_irq_is_sgi(irq->intid))
710 u32 intid = VGIC_ADDR_TO_INTID(addr, 2);
715 struct vgic_irq *irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
730 u32 intid = VGIC_ADDR_TO_INTID(addr, 2);
743 if (intid + i < VGIC_NR_PRIVATE_IRQS)
746 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
759 u64 vgic_read_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid)
768 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs)
771 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);
781 void vgic_write_irq_line_level_info(struct kvm_vcpu *vcpu, u32 intid,
792 if ((intid + i) < VGIC_NR_SGIS || (intid + i) >= nr_irqs)
795 irq = vgic_get_irq(vcpu->kvm, vcpu, intid + i);