Lines Matching refs:arch
98 raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock);
99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true;
100 raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock);
117 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
189 struct vgic_dist *dist = &kvm->arch.vgic;
193 lockdep_assert_held(&kvm->arch.config_lock);
215 struct its_vpe *vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
241 struct vgic_dist *dist = &kvm->arch.vgic;
246 lockdep_assert_held(&kvm->arch.config_lock);
264 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
317 struct its_vm *its_vm = &kvm->arch.vgic.its_vm;
320 lockdep_assert_held(&kvm->arch.config_lock);
341 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
351 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
370 err = its_make_vpe_resident(vpe, false, vcpu->kvm->arch.vgic.enabled);
387 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
446 .vm = &kvm->arch.vgic.its_vm,
447 .vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe,
513 atomic_dec(&irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count);