Lines Matching defs:mgr
427 * int driver_mapping_create(struct drm_gpuva_manager *mgr,
435 * ops = drm_gpuva_sm_map_ops_create(mgr, addr, range,
451 * drm_gpuva_map(mgr, va, &op->map);
507 * struct drm_gpuva_manager *mgr;
523 * int driver_mapping_create(struct drm_gpuva_manager *mgr,
532 * ctx.mgr = mgr;
543 * ret = drm_gpuva_sm_map(mgr, &ctx, addr, range, obj, offset);
557 * drm_gpuva_map(ctx->mgr, ctx->new_va, &op->map);
612 static int __drm_gpuva_insert(struct drm_gpuva_manager *mgr,
626 drm_gpuva_in_mm_range(struct drm_gpuva_manager *mgr, u64 addr, u64 range)
629 u64 mm_start = mgr->mm_start;
630 u64 mm_end = mm_start + mgr->mm_range;
636 drm_gpuva_in_kernel_node(struct drm_gpuva_manager *mgr, u64 addr, u64 range)
639 u64 kstart = mgr->kernel_alloc_node.va.addr;
640 u64 krange = mgr->kernel_alloc_node.va.range;
647 drm_gpuva_range_valid(struct drm_gpuva_manager *mgr,
651 drm_gpuva_in_mm_range(mgr, addr, range) &&
652 !drm_gpuva_in_kernel_node(mgr, addr, range);
657 * @mgr: pointer to the &drm_gpuva_manager to initialize
667 * Note that @mgr must be cleared to 0 before calling this function. The given
671 drm_gpuva_manager_init(struct drm_gpuva_manager *mgr,
677 mgr->rb.tree = RB_ROOT_CACHED;
678 INIT_LIST_HEAD(&mgr->rb.list);
681 mgr->mm_start = start_offset;
682 mgr->mm_range = range;
684 mgr->name = name ? name : "unknown";
685 mgr->ops = ops;
687 memset(&mgr->kernel_alloc_node, 0, sizeof(struct drm_gpuva));
690 mgr->kernel_alloc_node.va.addr = reserve_offset;
691 mgr->kernel_alloc_node.va.range = reserve_range;
695 __drm_gpuva_insert(mgr, &mgr->kernel_alloc_node);
702 * @mgr: pointer to the &drm_gpuva_manager to clean up
708 drm_gpuva_manager_destroy(struct drm_gpuva_manager *mgr)
710 mgr->name = NULL;
712 if (mgr->kernel_alloc_node.va.range)
713 __drm_gpuva_remove(&mgr->kernel_alloc_node);
715 WARN(!RB_EMPTY_ROOT(&mgr->rb.tree.rb_root),
721 __drm_gpuva_insert(struct drm_gpuva_manager *mgr,
727 if (drm_gpuva_it_iter_first(&mgr->rb.tree,
732 va->mgr = mgr;
734 drm_gpuva_it_insert(va, &mgr->rb.tree);
740 head = &mgr->rb.list;
749 * @mgr: the &drm_gpuva_manager to insert the &drm_gpuva in
762 drm_gpuva_insert(struct drm_gpuva_manager *mgr,
768 if (unlikely(!drm_gpuva_range_valid(mgr, addr, range)))
771 return __drm_gpuva_insert(mgr, va);
778 drm_gpuva_it_remove(va, &va->mgr->rb.tree);
795 struct drm_gpuva_manager *mgr = va->mgr;
797 if (unlikely(va == &mgr->kernel_alloc_node)) {
856 * @mgr: the &drm_gpuva_manager to search in
863 drm_gpuva_find_first(struct drm_gpuva_manager *mgr,
868 return drm_gpuva_it_iter_first(&mgr->rb.tree, addr, last);
874 * @mgr: the &drm_gpuva_manager to search in
881 drm_gpuva_find(struct drm_gpuva_manager *mgr,
886 va = drm_gpuva_find_first(mgr, addr, range);
903 * @mgr: the &drm_gpuva_manager to search in
914 drm_gpuva_find_prev(struct drm_gpuva_manager *mgr, u64 start)
916 if (!drm_gpuva_range_valid(mgr, start - 1, 1))
919 return drm_gpuva_it_iter_first(&mgr->rb.tree, start - 1, start);
925 * @mgr: the &drm_gpuva_manager to search in
936 drm_gpuva_find_next(struct drm_gpuva_manager *mgr, u64 end)
938 if (!drm_gpuva_range_valid(mgr, end, 1))
941 return drm_gpuva_it_iter_first(&mgr->rb.tree, end, end + 1);
948 * @mgr: the &drm_gpuva_manager to check the range for
955 drm_gpuva_interval_empty(struct drm_gpuva_manager *mgr, u64 addr, u64 range)
957 return !drm_gpuva_find_first(mgr, addr, range);
964 * @mgr: the &drm_gpuva_manager
968 * Initializes the @va from the @op and inserts it into the given @mgr.
971 drm_gpuva_map(struct drm_gpuva_manager *mgr,
976 drm_gpuva_insert(mgr, va);
996 struct drm_gpuva_manager *mgr = curr->mgr;
1002 drm_gpuva_insert(mgr, prev);
1007 drm_gpuva_insert(mgr, next);
1074 __drm_gpuva_sm_map(struct drm_gpuva_manager *mgr,
1083 if (unlikely(!drm_gpuva_range_valid(mgr, req_addr, req_range)))
1086 drm_gpuva_for_each_va_range_safe(va, next, mgr, req_addr, req_end) {
1216 __drm_gpuva_sm_unmap(struct drm_gpuva_manager *mgr,
1224 if (unlikely(!drm_gpuva_range_valid(mgr, req_addr, req_range)))
1227 drm_gpuva_for_each_va_range_safe(va, next, mgr, req_addr, req_end) {
1275 * @mgr: the &drm_gpuva_manager representing the GPU VA space
1306 drm_gpuva_sm_map(struct drm_gpuva_manager *mgr, void *priv,
1310 const struct drm_gpuva_fn_ops *ops = mgr->ops;
1317 return __drm_gpuva_sm_map(mgr, ops, priv,
1325 * @mgr: the &drm_gpuva_manager representing the GPU VA space
1351 drm_gpuva_sm_unmap(struct drm_gpuva_manager *mgr, void *priv,
1354 const struct drm_gpuva_fn_ops *ops = mgr->ops;
1360 return __drm_gpuva_sm_unmap(mgr, ops, priv,
1366 gpuva_op_alloc(struct drm_gpuva_manager *mgr)
1368 const struct drm_gpuva_fn_ops *fn = mgr->ops;
1383 gpuva_op_free(struct drm_gpuva_manager *mgr,
1386 const struct drm_gpuva_fn_ops *fn = mgr->ops;
1399 struct drm_gpuva_manager *mgr;
1402 struct drm_gpuva_manager *mgr = args->mgr;
1406 op = gpuva_op_alloc(mgr);
1445 gpuva_op_free(mgr, op);
1458 * @mgr: the &drm_gpuva_manager representing the GPU VA space
1489 drm_gpuva_sm_map_ops_create(struct drm_gpuva_manager *mgr,
1495 struct drm_gpuva_manager *mgr;
1506 args.mgr = mgr;
1509 ret = __drm_gpuva_sm_map(mgr, &gpuva_list_ops, &args,
1518 drm_gpuva_ops_free(mgr, ops);
1526 * @mgr: the &drm_gpuva_manager representing the GPU VA space
1553 drm_gpuva_sm_unmap_ops_create(struct drm_gpuva_manager *mgr,
1558 struct drm_gpuva_manager *mgr;
1569 args.mgr = mgr;
1572 ret = __drm_gpuva_sm_unmap(mgr, &gpuva_list_ops, &args,
1580 drm_gpuva_ops_free(mgr, ops);
1587 * @mgr: the &drm_gpuva_manager representing the GPU VA space
1604 drm_gpuva_prefetch_ops_create(struct drm_gpuva_manager *mgr,
1619 drm_gpuva_for_each_va_range(va, mgr, addr, end) {
1620 op = gpuva_op_alloc(mgr);
1634 drm_gpuva_ops_free(mgr, ops);
1641 * @mgr: the &drm_gpuva_manager representing the GPU VA space
1659 drm_gpuva_gem_unmap_ops_create(struct drm_gpuva_manager *mgr,
1676 op = gpuva_op_alloc(mgr);
1690 drm_gpuva_ops_free(mgr, ops);
1697 * @mgr: the &drm_gpuva_manager the ops were created for
1704 drm_gpuva_ops_free(struct drm_gpuva_manager *mgr,
1718 gpuva_op_free(mgr, op);