Lines Matching refs:va
441 * struct drm_gpuva *va;
445 * va = driver_gpuva_alloc();
446 * if (!va)
451 * drm_gpuva_map(mgr, va, &op->map);
452 * drm_gpuva_link(va);
458 * va = op->remap.unmap->va;
479 * drm_gpuva_unlink(va);
488 * va = op->unmap->va;
491 * drm_gpuva_unlink(va);
574 * drm_gpuva_unlink(op->remap.unmap->va);
575 * kfree(op->remap.unmap->va);
592 * drm_gpuva_unlink(op->unmap.va);
594 * kfree(op->unmap.va);
602 #define GPUVA_START(node) ((node)->va.addr)
603 #define GPUVA_LAST(node) ((node)->va.addr + (node)->va.range - 1)
613 struct drm_gpuva *va);
614 static void __drm_gpuva_remove(struct drm_gpuva *va);
639 u64 kstart = mgr->kernel_alloc_node.va.addr;
640 u64 krange = mgr->kernel_alloc_node.va.range;
690 mgr->kernel_alloc_node.va.addr = reserve_offset;
691 mgr->kernel_alloc_node.va.range = reserve_range;
712 if (mgr->kernel_alloc_node.va.range)
722 struct drm_gpuva *va)
728 GPUVA_START(va),
729 GPUVA_LAST(va)))
732 va->mgr = mgr;
734 drm_gpuva_it_insert(va, &mgr->rb.tree);
736 node = rb_prev(&va->rb.node);
742 list_add(&va->rb.entry, head);
750 * @va: the &drm_gpuva to insert
763 struct drm_gpuva *va)
765 u64 addr = va->va.addr;
766 u64 range = va->va.range;
771 return __drm_gpuva_insert(mgr, va);
776 __drm_gpuva_remove(struct drm_gpuva *va)
778 drm_gpuva_it_remove(va, &va->mgr->rb.tree);
779 list_del_init(&va->rb.entry);
784 * @va: the &drm_gpuva to remove
786 * This removes the given &va from the underlaying tree.
793 drm_gpuva_remove(struct drm_gpuva *va)
795 struct drm_gpuva_manager *mgr = va->mgr;
797 if (unlikely(va == &mgr->kernel_alloc_node)) {
802 __drm_gpuva_remove(va);
808 * @va: the &drm_gpuva to link
810 * This adds the given &va to the GPU VA list of the &drm_gem_object it is
817 drm_gpuva_link(struct drm_gpuva *va)
819 struct drm_gem_object *obj = va->gem.obj;
826 list_add_tail(&va->gem.entry, &obj->gpuva.list);
832 * @va: the &drm_gpuva to unlink
834 * This removes the given &va from the GPU VA list of the &drm_gem_object it is
841 drm_gpuva_unlink(struct drm_gpuva *va)
843 struct drm_gem_object *obj = va->gem.obj;
850 list_del_init(&va->gem.entry);
884 struct drm_gpuva *va;
886 va = drm_gpuva_find_first(mgr, addr, range);
887 if (!va)
890 if (va->va.addr != addr ||
891 va->va.range != range)
894 return va;
965 * @va: the &drm_gpuva to insert
966 * @op: the &drm_gpuva_op_map to initialize @va with
968 * Initializes the @va from the @op and inserts it into the given @mgr.
972 struct drm_gpuva *va,
975 drm_gpuva_init_from_op(va, op);
976 drm_gpuva_insert(mgr, va);
995 struct drm_gpuva *curr = op->unmap->va;
1022 drm_gpuva_remove(op->va);
1034 op.map.va.addr = addr;
1035 op.map.va.range = range;
1062 struct drm_gpuva *va, bool merge)
1067 op.unmap.va = va;
1079 struct drm_gpuva *va, *next;
1086 drm_gpuva_for_each_va_range_safe(va, next, mgr, req_addr, req_end) {
1087 struct drm_gem_object *obj = va->gem.obj;
1088 u64 offset = va->gem.offset;
1089 u64 addr = va->va.addr;
1090 u64 range = va->va.range;
1092 bool merge = !!va->gem.obj;
1099 ret = op_unmap_cb(ops, priv, va, merge);
1106 ret = op_unmap_cb(ops, priv, va, merge);
1114 .va.addr = req_end,
1115 .va.range = range - req_range,
1120 .va = va,
1132 .va.addr = addr,
1133 .va.range = ls_range,
1137 struct drm_gpuva_op_unmap u = { .va = va };
1159 .va.addr = req_end,
1160 .va.range = end - req_end,
1177 ret = op_unmap_cb(ops, priv, va, merge);
1184 ret = op_unmap_cb(ops, priv, va, merge);
1192 .va.addr = req_end,
1193 .va.range = end - req_end,
1198 .va = va,
1220 struct drm_gpuva *va, *next;
1227 drm_gpuva_for_each_va_range_safe(va, next, mgr, req_addr, req_end) {
1230 struct drm_gem_object *obj = va->gem.obj;
1231 u64 offset = va->gem.offset;
1232 u64 addr = va->va.addr;
1233 u64 range = va->va.range;
1237 prev.va.addr = addr;
1238 prev.va.range = req_addr - addr;
1246 next.va.addr = req_end;
1247 next.va.range = end - req_end;
1255 struct drm_gpuva_op_unmap unmap = { .va = va };
1264 ret = op_unmap_cb(ops, priv, va, false);
1609 struct drm_gpuva *va;
1619 drm_gpuva_for_each_va_range(va, mgr, addr, end) {
1627 op->prefetch.va = va;
1664 struct drm_gpuva *va;
1675 drm_gem_for_each_gpuva(va, obj) {
1683 op->unmap.va = va;