Lines Matching defs:rdev

56 static int radeon_ttm_debugfs_init(struct radeon_device *rdev);
57 static void radeon_ttm_debugfs_fini(struct radeon_device *rdev);
66 struct radeon_device *rdev;
69 rdev = container_of(mman, struct radeon_device, mman);
70 return rdev;
73 static int radeon_ttm_init_vram(struct radeon_device *rdev)
75 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_VRAM,
76 false, rdev->mc.real_vram_size >> PAGE_SHIFT);
79 static int radeon_ttm_init_gtt(struct radeon_device *rdev)
81 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_TT,
82 true, rdev->mc.gtt_size >> PAGE_SHIFT);
107 if (rbo->rdev->ring[radeon_copy_ring_index(rbo->rdev)].ready == false)
109 else if (rbo->rdev->mc.visible_vram_size < rbo->rdev->mc.real_vram_size &&
110 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) {
111 unsigned fpfn = rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT;
145 struct radeon_device *rdev = radeon_get_rdev(bo->bdev);
147 if (radeon_ttm_tt_has_userptr(rdev, bo->ttm))
158 struct radeon_device *rdev;
164 rdev = radeon_get_rdev(bo->bdev);
165 ridx = radeon_copy_ring_index(rdev);
171 old_start += rdev->mc.vram_start;
174 old_start += rdev->mc.gtt_start;
182 new_start += rdev->mc.vram_start;
185 new_start += rdev->mc.gtt_start;
191 if (!rdev->ring[ridx].ready) {
199 fence = radeon_copy(rdev, old_start, new_start, num_pages, bo->base.resv);
302 struct radeon_device *rdev;
316 rdev = radeon_get_rdev(bo->bdev);
329 if (!rdev->ring[radeon_copy_ring_index(rdev)].ready ||
330 rdev->asic->copy.copy == NULL) {
357 atomic64_add((u64)bo->num_pages << PAGE_SHIFT, &rdev->num_bytes_moved);
363 struct radeon_device *rdev = radeon_get_rdev(bdev);
372 if (rdev->flags & RADEON_IS_AGP) {
375 rdev->mc.agp_base;
376 mem->bus.is_iomem = !rdev->ddev->agp->cant_use_aperture;
383 if ((mem->bus.offset + bus_size) > rdev->mc.visible_vram_size)
385 mem->bus.offset += rdev->mc.aper_base;
408 rdev->ddev->hose->dense_mem_base;
433 struct radeon_device *rdev = radeon_get_rdev(bdev);
475 r = dma_map_sgtable(rdev->dev, ttm->sg, direction, 0);
494 struct radeon_device *rdev = radeon_get_rdev(bdev);
507 dma_unmap_sgtable(rdev->dev, ttm->sg, direction, 0);
533 struct radeon_device *rdev = radeon_get_rdev(bdev);
553 r = radeon_gart_bind(rdev, gtt->offset, ttm->num_pages,
567 struct radeon_device *rdev = radeon_get_rdev(bdev);
575 radeon_gart_unbind(rdev, gtt->offset, ttm->num_pages);
594 struct radeon_device *rdev;
597 rdev = radeon_get_rdev(bo->bdev);
599 if (rdev->flags & RADEON_IS_AGP) {
600 return ttm_agp_tt_create(bo, rdev->ddev->agp->bridge,
616 static struct radeon_ttm_tt *radeon_ttm_tt_to_gtt(struct radeon_device *rdev,
620 if (rdev->flags & RADEON_IS_AGP)
633 struct radeon_device *rdev = radeon_get_rdev(bdev);
634 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
655 if (rdev->flags & RADEON_IS_AGP) {
661 if (rdev->need_swiotlb && swiotlb_nr_tbl()) {
662 return ttm_dma_populate(&gtt->ttm, rdev->dev, ctx);
666 return ttm_populate_and_map_pages(rdev->dev, &gtt->ttm, ctx);
671 struct radeon_device *rdev = radeon_get_rdev(bdev);
672 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
685 if (rdev->flags & RADEON_IS_AGP) {
692 if (rdev->need_swiotlb && swiotlb_nr_tbl()) {
693 ttm_dma_unpopulate(&gtt->ttm, rdev->dev);
698 ttm_unmap_and_unpopulate_pages(rdev->dev, &gtt->ttm);
701 int radeon_ttm_tt_set_userptr(struct radeon_device *rdev,
705 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
720 struct radeon_device *rdev = radeon_get_rdev(bdev);
721 if (rdev->flags & RADEON_IS_AGP)
732 struct radeon_device *rdev = radeon_get_rdev(bdev);
738 if (rdev->flags & RADEON_IS_AGP)
749 struct radeon_device *rdev = radeon_get_rdev(bdev);
751 if (rdev->flags & RADEON_IS_AGP) {
763 struct radeon_device *rdev = radeon_get_rdev(bdev);
765 if (rdev->flags & RADEON_IS_AGP) {
775 bool radeon_ttm_tt_has_userptr(struct radeon_device *rdev,
778 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
786 bool radeon_ttm_tt_is_readonly(struct radeon_device *rdev,
789 struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(rdev, ttm);
813 int radeon_ttm_init(struct radeon_device *rdev)
818 r = ttm_bo_device_init(&rdev->mman.bdev,
820 rdev->ddev->anon_inode->i_mapping,
821 rdev->ddev->vma_offset_manager,
822 dma_addressing_limited(&rdev->pdev->dev));
827 rdev->mman.initialized = true;
829 r = radeon_ttm_init_vram(rdev);
835 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
837 r = radeon_bo_create(rdev, 256 * 1024, PAGE_SIZE, true,
839 NULL, &rdev->stolen_vga_memory);
843 r = radeon_bo_reserve(rdev->stolen_vga_memory, false);
846 r = radeon_bo_pin(rdev->stolen_vga_memory, RADEON_GEM_DOMAIN_VRAM, NULL);
847 radeon_bo_unreserve(rdev->stolen_vga_memory);
849 radeon_bo_unref(&rdev->stolen_vga_memory);
853 (unsigned) (rdev->mc.real_vram_size / (1024 * 1024)));
855 r = radeon_ttm_init_gtt(rdev);
861 (unsigned)(rdev->mc.gtt_size / (1024 * 1024)));
863 r = radeon_ttm_debugfs_init(rdev);
871 void radeon_ttm_fini(struct radeon_device *rdev)
875 if (!rdev->mman.initialized)
877 radeon_ttm_debugfs_fini(rdev);
878 if (rdev->stolen_vga_memory) {
879 r = radeon_bo_reserve(rdev->stolen_vga_memory, false);
881 radeon_bo_unpin(rdev->stolen_vga_memory);
882 radeon_bo_unreserve(rdev->stolen_vga_memory);
884 radeon_bo_unref(&rdev->stolen_vga_memory);
886 ttm_range_man_fini(&rdev->mman.bdev, TTM_PL_VRAM);
887 ttm_range_man_fini(&rdev->mman.bdev, TTM_PL_TT);
888 ttm_bo_device_release(&rdev->mman.bdev);
889 radeon_gart_fini(rdev);
890 rdev->mman.initialized = false;
896 void radeon_ttm_set_active_vram_size(struct radeon_device *rdev, u64 size)
900 if (!rdev->mman.initialized)
903 man = ttm_manager_type(&rdev->mman.bdev, TTM_PL_VRAM);
911 struct radeon_device *rdev;
918 rdev = radeon_get_rdev(bo->bdev);
919 down_read(&rdev->pm.mclk_lock);
921 up_read(&rdev->pm.mclk_lock);
936 struct radeon_device *rdev = file_priv->minor->dev->dev_private;
938 if (rdev == NULL)
941 r = ttm_bo_mmap(filp, vma, &rdev->mman.bdev);
956 struct radeon_device *rdev = dev->dev_private;
957 struct ttm_resource_manager *man = ttm_manager_type(&rdev->mman.bdev, ttm_pl);
979 struct radeon_device *rdev = inode->i_private;
980 i_size_write(inode, rdev->mc.mc_vram_size);
988 struct radeon_device *rdev = f->private_data;
999 if (*pos >= rdev->mc.mc_vram_size)
1002 spin_lock_irqsave(&rdev->mmio_idx_lock, flags);
1004 if (rdev->family >= CHIP_CEDAR)
1007 spin_unlock_irqrestore(&rdev->mmio_idx_lock, flags);
1031 struct radeon_device *rdev = inode->i_private;
1032 i_size_write(inode, rdev->mc.gtt_size);
1040 struct radeon_device *rdev = f->private_data;
1051 if (p >= rdev->gart.num_cpu_pages)
1054 page = rdev->gart.pages[p];
1060 kunmap(rdev->gart.pages[p]);
1085 static int radeon_ttm_debugfs_init(struct radeon_device *rdev)
1090 struct drm_minor *minor = rdev->ddev->primary;
1093 rdev->mman.vram = debugfs_create_file("radeon_vram", S_IFREG | S_IRUGO,
1094 root, rdev,
1097 rdev->mman.gtt = debugfs_create_file("radeon_gtt", S_IFREG | S_IRUGO,
1098 root, rdev, &radeon_ttm_gtt_fops);
1103 if (!(rdev->need_swiotlb && swiotlb_nr_tbl()))
1107 return radeon_debugfs_add_files(rdev, radeon_ttm_debugfs_list, count);
1114 static void radeon_ttm_debugfs_fini(struct radeon_device *rdev)
1118 debugfs_remove(rdev->mman.vram);
1119 rdev->mman.vram = NULL;
1121 debugfs_remove(rdev->mman.gtt);
1122 rdev->mman.gtt = NULL;