Lines Matching defs:kvm
11 #include <linux/kvm.h>
68 extern void kvm_spapr_tce_release_iommu_group(struct kvm *kvm,
77 list_for_each_entry_rcu(stt, &kvm->arch.spapr_tce_tables, list) {
96 extern long kvm_spapr_tce_attach_iommu_group(struct kvm *kvm, int tablefd,
112 list_for_each_entry_rcu(stt, &kvm->arch.spapr_tce_tables, list) {
250 struct kvm *kvm = stt->kvm;
252 mutex_lock(&kvm->lock);
254 mutex_unlock(&kvm->lock);
264 account_locked_vm(kvm->mm,
267 kvm_put_kvm(stt->kvm);
279 long kvm_vm_ioctl_create_spapr_tce(struct kvm *kvm,
284 struct mm_struct *mm = kvm->mm;
307 stt->kvm = kvm;
311 mutex_lock(&kvm->lock);
315 list_for_each_entry(siter, &kvm->arch.spapr_tce_tables, list) {
322 kvm_get_kvm(kvm);
324 ret = anon_inode_getfd("kvm-spapr-tce", &kvm_spapr_tce_fops,
328 list_add_rcu(&stt->list, &kvm->arch.spapr_tce_tables);
330 kvm_put_kvm_no_destroy(kvm);
332 mutex_unlock(&kvm->lock);
343 static long kvmppc_tce_to_ua(struct kvm *kvm, unsigned long tce,
349 memslot = search_memslots(kvm_memslots(kvm), gfn);
374 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua))
383 mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift);
439 static long kvmppc_tce_iommu_mapped_dec(struct kvm *kvm,
449 mem = mm_iommu_lookup(kvm->mm, be64_to_cpu(*pua), pgsize);
460 static long kvmppc_tce_iommu_do_unmap(struct kvm *kvm,
467 if (WARN_ON_ONCE(iommu_tce_xchg_no_kill(kvm->mm, tbl, entry, &hpa,
474 ret = kvmppc_tce_iommu_mapped_dec(kvm, tbl, entry);
476 iommu_tce_xchg_no_kill(kvm->mm, tbl, entry, &hpa, &dir);
481 static long kvmppc_tce_iommu_unmap(struct kvm *kvm,
490 ret = kvmppc_tce_iommu_do_unmap(kvm, tbl, io_entry + i);
500 static long kvmppc_tce_iommu_do_map(struct kvm *kvm, struct iommu_table *tbl,
513 mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift);
524 ret = iommu_tce_xchg_no_kill(kvm->mm, tbl, entry, &hpa, &dir);
531 kvmppc_tce_iommu_mapped_dec(kvm, tbl, entry);
538 static long kvmppc_tce_iommu_map(struct kvm *kvm,
550 ret = kvmppc_tce_iommu_do_map(kvm, tbl,
573 stt = kvmppc_find_table(vcpu->kvm, liobn);
581 idx = srcu_read_lock(&vcpu->kvm->srcu);
589 if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) {
598 ret = kvmppc_tce_iommu_unmap(vcpu->kvm, stt,
601 ret = kvmppc_tce_iommu_map(vcpu->kvm, stt, stit->tbl,
606 kvmppc_clear_tce(vcpu->kvm->mm, stt, stit->tbl, entry);
614 srcu_read_unlock(&vcpu->kvm->srcu, idx);
631 stt = kvmppc_find_table(vcpu->kvm, liobn);
650 idx = srcu_read_lock(&vcpu->kvm->srcu);
651 if (kvmppc_tce_to_ua(vcpu->kvm, tce_list, &ua)) {
686 if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) {
692 ret = kvmppc_tce_iommu_map(vcpu->kvm, stt,
697 kvmppc_clear_tce(vcpu->kvm->mm, stt, stit->tbl,
707 srcu_read_unlock(&vcpu->kvm->srcu, idx);
721 stt = kvmppc_find_table(vcpu->kvm, liobn);
737 ret = kvmppc_tce_iommu_unmap(vcpu->kvm, stt,
747 kvmppc_clear_tce(vcpu->kvm->mm, stt, stit->tbl, entry + i);