Lines Matching refs:ttm

44 #include <drm/ttm/ttm_bo.h>
45 #include <drm/ttm/ttm_placement.h>
46 #include <drm/ttm/ttm_range_manager.h>
47 #include <drm/ttm/ttm_tt.h>
55 static int radeon_ttm_tt_bind(struct ttm_device *bdev, struct ttm_tt *ttm,
57 static void radeon_ttm_tt_unbind(struct ttm_device *bdev, struct ttm_tt *ttm);
204 r = radeon_ttm_tt_bind(bo->bdev, bo->ttm, new_mem);
216 bo->ttm == NULL)) {
228 radeon_ttm_tt_unbind(bo->bdev, bo->ttm);
321 struct ttm_tt ttm;
331 static int radeon_ttm_tt_pin_userptr(struct ttm_device *bdev, struct ttm_tt *ttm)
334 struct radeon_ttm_tt *gtt = (void *)ttm;
348 unsigned long end = gtt->userptr + (u64)ttm->num_pages * PAGE_SIZE;
356 unsigned num_pages = ttm->num_pages - pinned;
358 struct page **pages = ttm->pages + pinned;
367 } while (pinned < ttm->num_pages);
369 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0,
370 (u64)ttm->num_pages << PAGE_SHIFT,
375 r = dma_map_sgtable(rdev->dev, ttm->sg, direction, 0);
379 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address,
380 ttm->num_pages);
385 kfree(ttm->sg);
388 release_pages(ttm->pages, pinned);
392 static void radeon_ttm_tt_unpin_userptr(struct ttm_device *bdev, struct ttm_tt *ttm)
395 struct radeon_ttm_tt *gtt = (void *)ttm;
403 if (!ttm->sg || !ttm->sg->sgl)
407 dma_unmap_sgtable(rdev->dev, ttm->sg, direction, 0);
409 for_each_sgtable_page(ttm->sg, &sg_iter, 0) {
418 sg_free_table(ttm->sg);
421 static bool radeon_ttm_backend_is_bound(struct ttm_tt *ttm)
423 struct radeon_ttm_tt *gtt = (void*)ttm;
429 struct ttm_tt *ttm,
432 struct radeon_ttm_tt *gtt = (void*)ttm;
442 radeon_ttm_tt_pin_userptr(bdev, ttm);
447 if (!ttm->num_pages) {
449 ttm->num_pages, bo_mem, ttm);
451 if (ttm->caching == ttm_cached)
453 r = radeon_gart_bind(rdev, gtt->offset, ttm->num_pages,
454 ttm->pages, gtt->ttm.dma_address, flags);
457 ttm->num_pages, (unsigned)gtt->offset);
464 static void radeon_ttm_backend_unbind(struct ttm_device *bdev, struct ttm_tt *ttm)
466 struct radeon_ttm_tt *gtt = (void *)ttm;
470 radeon_ttm_tt_unpin_userptr(bdev, ttm);
475 radeon_gart_unbind(rdev, gtt->offset, ttm->num_pages);
480 static void radeon_ttm_backend_destroy(struct ttm_device *bdev, struct ttm_tt *ttm)
482 struct radeon_ttm_tt *gtt = (void *)ttm;
484 ttm_tt_fini(&gtt->ttm);
515 if (ttm_sg_tt_init(&gtt->ttm, bo, page_flags, caching)) {
519 return &gtt->ttm;
523 struct ttm_tt *ttm)
530 if (!ttm)
532 return container_of(ttm, struct radeon_ttm_tt, ttm);
536 struct ttm_tt *ttm,
540 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
541 bool slave = !!(ttm->page_flags & TTM_TT_FLAG_EXTERNAL);
544 ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL);
545 if (!ttm->sg)
548 ttm->page_flags |= TTM_TT_FLAG_EXTERNAL;
552 if (slave && ttm->sg) {
553 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address,
554 ttm->num_pages);
558 return ttm_pool_alloc(&rdev->mman.bdev.pool, ttm, ctx);
561 static void radeon_ttm_tt_unpopulate(struct ttm_device *bdev, struct ttm_tt *ttm)
564 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
565 bool slave = !!(ttm->page_flags & TTM_TT_FLAG_EXTERNAL);
567 radeon_ttm_tt_unbind(bdev, ttm);
570 kfree(ttm->sg);
571 ttm->page_flags &= ~TTM_TT_FLAG_EXTERNAL;
578 return ttm_pool_free(&rdev->mman.bdev.pool, ttm);
582 struct ttm_tt *ttm, uint64_t addr,
585 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
597 struct ttm_tt *ttm)
602 return ttm_agp_is_bound(ttm);
604 return radeon_ttm_backend_is_bound(ttm);
608 struct ttm_tt *ttm,
619 return ttm_agp_bind(ttm, bo_mem);
622 return radeon_ttm_backend_bind(bdev, ttm, bo_mem);
626 struct ttm_tt *ttm)
632 ttm_agp_unbind(ttm);
636 radeon_ttm_backend_unbind(bdev, ttm);
640 struct ttm_tt *ttm)
646 ttm_agp_destroy(ttm);
650 radeon_ttm_backend_destroy(bdev, ttm);
654 struct ttm_tt *ttm)
656 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
665 struct ttm_tt *ttm)
667 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
761 DRM_INFO("radeon: ttm finalized\n");