Home
last modified time | relevance | path

Searched refs:vgic_cpu (Results 1 - 25 of 33) sorted by relevance

12

/kernel/linux/linux-5.10/arch/arm64/kvm/vgic/
H A Dvgic.c29 * vgic_cpu->ap_list_lock must be taken with IRQs disabled
46 * raw_spin_lock(vcpuX->arch.vgic_cpu.ap_list_lock);
47 * raw_spin_lock(vcpuY->arch.vgic_cpu.ap_list_lock);
96 return &vcpu->arch.vgic_cpu.private_irqs[intid]; in vgic_get_irq()
153 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_pending_lpis() local
157 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
159 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_pending_lpis()
169 raw_spin_unlock_irqrestore(&vgic_cpu in vgic_flush_pending_lpis()
300 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_sort_ap_list() local
621 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_prune_ap_list() local
760 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; compute_ap_list_depth() local
785 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_flush_lr_state() local
956 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; kvm_vgic_vcpu_pending_irq() local
[all...]
H A Dvgic-init.c187 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_init() local
192 vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF; in kvm_vgic_vcpu_init()
194 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_init()
195 raw_spin_lock_init(&vgic_cpu->ap_list_lock); in kvm_vgic_vcpu_init()
196 atomic_set(&vgic_cpu->vgic_v3.its_vpe.vlpi_count, 0); in kvm_vgic_vcpu_init()
203 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in kvm_vgic_vcpu_init()
277 struct vgic_cpu *vgic_cpu in vgic_init() local
356 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; kvm_vgic_vcpu_destroy() local
[all...]
H A Dvgic-mmio-v3.c227 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_read_v3r_ctlr() local
229 return vgic_cpu->lpis_enabled ? GICR_CTLR_ENABLE_LPIS : 0; in vgic_mmio_read_v3r_ctlr()
237 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_write_v3r_ctlr() local
238 bool was_enabled = vgic_cpu->lpis_enabled; in vgic_mmio_write_v3r_ctlr()
243 vgic_cpu->lpis_enabled = val & GICR_CTLR_ENABLE_LPIS; in vgic_mmio_write_v3r_ctlr()
245 if (was_enabled && !vgic_cpu in vgic_mmio_write_v3r_ctlr()
258 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_read_v3r_typer() local
481 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_write_propbase() local
500 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_read_pendbase() local
512 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_write_pendbase() local
701 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_register_redist_iodev() local
[all...]
H A Dvgic-v2.c31 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_underflow()
51 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v2_fold_lr_state() local
52 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; in vgic_v2_fold_lr_state()
59 for (lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++) { in vgic_v2_fold_lr_state()
212 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; in vgic_v2_populate_lr()
217 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; in vgic_v2_clear_lr()
222 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_vmcr()
249 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu in vgic_v2_get_vmcr()
[all...]
H A Dvgic-v3.c20 struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_underflow()
33 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v3_fold_lr_state() local
34 struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3; in vgic_v3_fold_lr_state()
197 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val; in vgic_v3_populate_lr()
202 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0; in vgic_v3_clear_lr()
207 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_vmcr()
237 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_get_vmcr()
273 struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu in vgic_v3_enable()
507 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_v3_map_resources() local
[all...]
H A Dvgic-v4.c98 raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler()
99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true; in vgic_v4_doorbell_handler()
100 raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler()
117 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_enable_vsgis()
237 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_init()
315 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_put()
325 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_load()
405 .vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe, in kvm_vgic_v4_set_forwarding()
453 atomic_dec(&irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count); in kvm_vgic_v4_unset_forwarding()
H A Dvgic-mmio-v2.c362 return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr; in vgic_mmio_read_apr()
364 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_read_apr()
388 vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val; in vgic_mmio_write_apr()
390 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_write_apr()
H A Dvgic.h260 struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu; in vgic_v3_max_apr_idx()
H A Dvgic-its.c372 map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in update_affinity()
430 gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); in its_sync_lpi_pending_table()
690 if (!vcpu->arch.vgic_cpu.lpis_enabled) in vgic_its_resolve_lpi()
1337 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm) in vgic_its_cmd_handle_invall()
1338 its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe); in vgic_its_cmd_handle_invall()
1805 if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ)) in vgic_enable_lpis()
/kernel/linux/linux-6.6/arch/arm64/kvm/vgic/
H A Dvgic.c31 * vgic_cpu->ap_list_lock must be taken with IRQs disabled
48 * raw_spin_lock(vcpuX->arch.vgic_cpu.ap_list_lock);
49 * raw_spin_lock(vcpuY->arch.vgic_cpu.ap_list_lock);
98 return &vcpu->arch.vgic_cpu.private_irqs[intid]; in vgic_get_irq()
154 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_pending_lpis() local
158 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
160 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_pending_lpis()
170 raw_spin_unlock_irqrestore(&vgic_cpu in vgic_flush_pending_lpis()
301 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_sort_ap_list() local
637 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_prune_ap_list() local
776 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; compute_ap_list_depth() local
801 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_flush_lr_state() local
975 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; kvm_vgic_vcpu_pending_irq() local
[all...]
H A Dvgic-mmio-v3.c240 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_lpis_enabled() local
242 return atomic_read(&vgic_cpu->ctlr) == GICR_CTLR_ENABLE_LPIS; in vgic_lpis_enabled()
248 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_read_v3r_ctlr() local
251 val = atomic_read(&vgic_cpu->ctlr); in vgic_mmio_read_v3r_ctlr()
262 struct vgic_cpu *vgic_cpu in vgic_mmio_write_v3r_ctlr() local
295 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_vcpu_rdist_is_last() local
508 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_read_pendbase() local
520 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_mmio_write_pendbase() local
775 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_register_redist_iodev() local
[all...]
H A Dvgic-v2.c31 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_underflow()
51 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v2_fold_lr_state() local
52 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; in vgic_v2_fold_lr_state()
59 for (lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++) { in vgic_v2_fold_lr_state()
197 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; in vgic_v2_populate_lr()
202 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; in vgic_v2_clear_lr()
207 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_vmcr()
234 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu in vgic_v2_get_vmcr()
[all...]
H A Dvgic-v3.c24 struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_underflow()
37 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v3_fold_lr_state() local
38 struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3; in vgic_v3_fold_lr_state()
186 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val; in vgic_v3_populate_lr()
191 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0; in vgic_v3_clear_lr()
196 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_vmcr()
226 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_get_vmcr()
262 struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu in vgic_v3_enable()
545 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; vgic_v3_map_resources() local
[all...]
H A Dvgic-init.c196 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_init() local
201 vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF; in kvm_vgic_vcpu_init()
203 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_init()
204 raw_spin_lock_init(&vgic_cpu->ap_list_lock); in kvm_vgic_vcpu_init()
205 atomic_set(&vgic_cpu->vgic_v3.its_vpe.vlpi_count, 0); in kvm_vgic_vcpu_init()
212 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in kvm_vgic_vcpu_init()
288 struct vgic_cpu *vgic_cpu in vgic_init() local
373 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; __kvm_vgic_vcpu_destroy() local
[all...]
H A Dvgic-v4.c98 raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler()
99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true; in vgic_v4_doorbell_handler()
100 raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler()
117 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_enable_vsgis()
215 struct its_vpe *vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_get_vlpi_state()
264 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_init()
341 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_put()
351 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_load()
387 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_commit()
447 .vpe = &irq->target_vcpu->arch.vgic_cpu in kvm_vgic_v4_set_forwarding()
[all...]
H A Dvgic-mmio-v2.c373 return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr; in vgic_mmio_read_apr()
375 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_read_apr()
399 vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val; in vgic_mmio_write_apr()
401 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_write_apr()
H A Dvgic.h277 struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu; in vgic_v3_max_apr_idx()
/kernel/linux/linux-6.6/arch/arm64/kvm/
H A Dvgic-sys-reg-v3.c17 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in set_gic_ctlr()
62 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in get_gic_ctlr()
210 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in set_apr_reg()
220 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in get_apr_reg()
293 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in get_gic_sre()
/kernel/linux/linux-5.10/arch/arm64/kvm/hyp/nvhe/
H A Dswitch.c113 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state()
114 __vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state()
122 __vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
123 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
/kernel/linux/linux-5.10/arch/arm64/kvm/
H A Dvgic-sys-reg-v3.c17 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in access_gic_ctlr()
186 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_access_apr_reg()
233 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in access_gic_sre()
/kernel/linux/linux-6.6/arch/arm64/kvm/hyp/nvhe/
H A Dswitch.c119 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state()
120 __vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state()
128 __vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
129 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
H A Dhyp-main.c49 hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3 = host_vcpu->arch.vgic_cpu.vgic_v3; in flush_hyp_vcpu()
55 struct vgic_v3_cpu_if *hyp_cpu_if = &hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3; in sync_hyp_vcpu()
56 struct vgic_v3_cpu_if *host_cpu_if = &host_vcpu->arch.vgic_cpu.vgic_v3; in sync_hyp_vcpu()
/kernel/linux/linux-5.10/arch/arm64/include/asm/
H A Dkvm_host.h333 struct vgic_cpu vgic_cpu; member
/kernel/linux/linux-5.10/include/kvm/
H A Darm_vgic.h300 struct vgic_cpu { struct
/kernel/linux/linux-6.6/include/kvm/
H A Darm_vgic.h325 struct vgic_cpu { struct

Completed in 24 milliseconds

12