/kernel/linux/linux-6.6/drivers/gpu/drm/loongson/ |
H A D | lsdc_ttm.c | 53 if (lbo->tbo.base.size <= PAGE_SIZE) in lsdc_bo_set_placement() 95 lsdc_ttm_tt_create(struct ttm_buffer_object *tbo, uint32_t page_flags) in lsdc_ttm_tt_create() argument 104 ret = ttm_sg_tt_init(tt, tbo, page_flags, ttm_cached); in lsdc_ttm_tt_create() 141 static void lsdc_bo_evict_flags(struct ttm_buffer_object *tbo, in lsdc_bo_evict_flags() argument 144 struct ttm_resource *resource = tbo->resource; in lsdc_bo_evict_flags() 145 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_bo_evict_flags() 160 static int lsdc_bo_move(struct ttm_buffer_object *tbo, in lsdc_bo_move() argument 166 struct drm_device *ddev = tbo->base.dev; in lsdc_bo_move() 167 struct ttm_resource *old_mem = tbo->resource; in lsdc_bo_move() 168 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_bo_move() 256 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_gpu_offset() local 273 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_size() local 291 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_pin() local 326 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_unpin() local 346 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_ref() local 353 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_unref() local 360 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_bo_kmap() local 421 lsdc_bo_destroy(struct ttm_buffer_object *tbo) lsdc_bo_destroy() argument 444 struct ttm_buffer_object *tbo; lsdc_bo_create() local [all...] |
H A D | lsdc_gem.c | 53 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_prime_get_sg_table() local 54 struct ttm_tt *tt = tbo->ttm; in lsdc_gem_prime_get_sg_table() 66 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_free() local 68 if (tbo) in lsdc_gem_object_free() 69 ttm_bo_put(tbo); in lsdc_gem_object_free() 74 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_vmap() local 75 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_gem_object_vmap() 89 ret = ttm_bo_vmap(tbo, &lbo->map); in lsdc_gem_object_vmap() 106 struct ttm_buffer_object *tbo = to_ttm_bo(obj); in lsdc_gem_object_vunmap() local 107 struct lsdc_bo *lbo = to_lsdc_bo(tbo); in lsdc_gem_object_vunmap() 124 struct ttm_buffer_object *tbo = to_ttm_bo(obj); lsdc_gem_object_mmap() local 295 struct ttm_buffer_object *tbo = &lbo->tbo; lsdc_show_buffer_object() local [all...] |
H A D | lsdc_ttm.h | 24 struct ttm_buffer_object tbo; member 52 static inline struct lsdc_bo *to_lsdc_bo(struct ttm_buffer_object *tbo) in to_lsdc_bo() argument 54 return container_of(tbo, struct lsdc_bo, tbo); in to_lsdc_bo() 59 return container_of(gem, struct lsdc_bo, tbo.base); in gem_to_lsdc_bo()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_object.c | 56 static void amdgpu_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_destroy() argument 58 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() 62 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy() 63 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 64 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy() 69 static void amdgpu_bo_user_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_user_destroy() argument 71 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy() 76 amdgpu_bo_destroy(tbo); in amdgpu_bo_user_destroy() 79 static void amdgpu_bo_vm_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_vm_destroy() argument 867 struct ttm_buffer_object *tbo; amdgpu_bo_unref() local [all...] |
H A D | amdgpu_dma_buf.c | 59 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach() 89 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach() 145 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_map() 149 if (!bo->tbo.pin_count) { in amdgpu_dma_buf_map() 160 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_dma_buf_map() 164 } else if (!(amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type) & in amdgpu_dma_buf_map() 169 switch (bo->tbo.resource->mem_type) { in amdgpu_dma_buf_map() 172 bo->tbo.ttm->pages, in amdgpu_dma_buf_map() 173 bo->tbo.ttm->num_pages); in amdgpu_dma_buf_map() 183 r = amdgpu_vram_mgr_alloc_sgt(adev, bo->tbo in amdgpu_dma_buf_map() [all...] |
H A D | amdgpu_object.h | 96 /* Protected by tbo.reserved */ 101 struct ttm_buffer_object tbo; member 159 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument 161 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo() 204 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve() 207 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve() 218 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 223 return bo->tbo.base.size; in amdgpu_bo_size() 228 return bo->tbo in amdgpu_bo_ngpu_pages() [all...] |
H A D | amdgpu_amdkfd_gpuvm.c | 286 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_amdkfd_release_notify() 321 ret = amdgpu_gem_object_create(adev, mem->bo->tbo.base.size, 1, in create_dmamap_sg_bo() 323 ttm_bo_type_sg, mem->bo->tbo.base.resv, &gem_obj, 0); in create_dmamap_sg_bo() 344 * NOTE: Must be called with BO reserved i.e. bo->tbo.resv->lock held. 358 dma_resv_replace_fences(bo->tbo.base.resv, ef->base.context, in amdgpu_amdkfd_remove_eviction_fence() 392 BUG_ON(!dma_resv_trylock(bo->tbo.base.resv)); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 394 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_amdkfd_remove_fence_on_pt_pd_bos() 406 if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm), in amdgpu_amdkfd_bo_validate() 412 ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in amdgpu_amdkfd_bo_validate() 437 struct amdgpu_device *adev = amdgpu_ttm_adev(pd->tbo in vm_validate_pt_pd_bos() [all...] |
H A D | amdgpu_vm.c | 184 if (bo->tbo.type == ttm_bo_type_kernel) in amdgpu_vm_bo_evicted() 289 if (!bo || bo->tbo.type != ttm_bo_type_kernel) in amdgpu_vm_bo_reset_state_machine() 320 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init() 323 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init() 325 ttm_bo_set_bulk_move(&bo->tbo, &vm->lru_bulk_move); in amdgpu_vm_bo_base_init() 326 if (bo->tbo.type == ttm_bo_type_kernel && bo->parent) in amdgpu_vm_bo_base_init() 332 amdgpu_mem_type_to_domain(bo->tbo.resource->mem_type)) in amdgpu_vm_bo_base_init() 356 return drm_exec_prepare_obj(exec, &vm->root.bo->tbo.base, in amdgpu_vm_lock_pd() 479 if (bo->tbo in amdgpu_vm_validate_pt_bos() [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/ |
H A D | radeon_object.c | 52 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy() 64 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 65 drm_gem_object_release(&bo->tbo.base); in radeon_ttm_bo_destroy() 155 drm_gem_private_object_init(rdev->ddev, &bo->tbo.base, size); in radeon_bo_create() 205 r = ttm_bo_init_validate(&rdev->mman.bdev, &bo->tbo, type, in radeon_bo_create() 224 r = dma_resv_wait_timeout(bo->tbo in radeon_bo_kmap() 267 struct ttm_buffer_object *tbo; radeon_bo_unref() local [all...] |
H A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.resource->mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.resource->start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.base.size; in radeon_bo_size() 117 return bo->tbo.base.size / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 122 return (bo->tbo.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
|
H A D | radeon_mn.c | 57 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 69 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_BOOKKEEP, in radeon_mn_invalidate() 75 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/qxl/ |
H A D | qxl_object.c | 35 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 40 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy() 41 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy() 48 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy() 65 if (qbo->tbo.base.size <= PAGE_SIZE) in qxl_ttm_placement_from_domain() 128 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size); in qxl_bo_create() 133 bo->tbo.base.funcs = &qxl_object_funcs; in qxl_bo_create() 143 bo->tbo.priority = priority; in qxl_bo_create() 144 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create() 155 ttm_bo_pin(&bo->tbo); in qxl_bo_create() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/qxl/ |
H A D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve() 37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve() 48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 53 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size() 58 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in qxl_bo_mmap_offset() 66 r = ttm_bo_reserve(&bo->tbo, true, no_wait, NULL); in qxl_bo_wait() 69 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_wait() 77 *mem_type = bo->tbo.mem.mem_type; in qxl_bo_wait() 79 r = ttm_bo_wait(&bo->tbo, true, no_wait); in qxl_bo_wait() 80 ttm_bo_unreserve(&bo->tbo); in qxl_bo_wait() [all...] |
H A D | qxl_object.c | 30 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 35 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy() 36 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy() 43 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy() 62 if (qbo->tbo.base.size <= PAGE_SIZE) in qxl_ttm_placement_from_domain() 124 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size); in qxl_bo_create() 129 bo->tbo.base.funcs = &qxl_object_funcs; in qxl_bo_create() 140 bo->tbo.priority = priority; in qxl_bo_create() 141 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create() 166 r = ttm_bo_kmap(&bo->tbo, in qxl_bo_kmap() [all...] |
H A D | qxl_gem.c | 35 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local 41 tbo = &qobj->tbo; in qxl_gem_object_free() 42 ttm_bo_put(tbo); in qxl_gem_object_free() 66 *obj = &qbo->tbo.base; in qxl_gem_object_create()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_object.c | 65 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_subtract_pin_size() 67 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) { in amdgpu_bo_subtract_pin_size() 71 } else if (bo->tbo.mem.mem_type == TTM_PL_TT) { in amdgpu_bo_subtract_pin_size() 76 static void amdgpu_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_bo_destroy() argument 78 struct amdgpu_device *adev = amdgpu_ttm_adev(tbo->bdev); in amdgpu_bo_destroy() 79 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() 86 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy() 87 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy() 88 drm_gem_object_release(&bo->tbo in amdgpu_bo_destroy() 863 struct ttm_buffer_object *tbo; amdgpu_bo_unref() local [all...] |
H A D | amdgpu_dma_buf.c | 58 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap() 95 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_gem_prime_mmap() 109 if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) || in amdgpu_gem_prime_mmap() 181 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_attach() 202 r = __dma_resv_make_exclusive(bo->tbo.base.resv); in amdgpu_dma_buf_attach() 224 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_dma_buf_detach() 248 if (bo->tbo.moving) { in amdgpu_dma_buf_pin() 249 r = dma_fence_wait(bo->tbo.moving, true); in amdgpu_dma_buf_pin() 292 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo in amdgpu_dma_buf_map() [all...] |
H A D | amdgpu_object.h | 84 /* Protected by tbo.reserved */ 89 struct ttm_buffer_object tbo; member 117 static inline struct amdgpu_bo *ttm_to_amdgpu_bo(struct ttm_buffer_object *tbo) in ttm_to_amdgpu_bo() argument 119 return container_of(tbo, struct amdgpu_bo, tbo); in ttm_to_amdgpu_bo() 160 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_reserve() 163 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in amdgpu_bo_reserve() 174 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 179 return bo->tbo.num_pages << PAGE_SHIFT; in amdgpu_bo_size() 184 return (bo->tbo in amdgpu_bo_ngpu_pages() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | radeon_object.c | 57 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage() 75 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 79 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 81 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy() 88 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy() 89 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in radeon_ttm_bo_destroy() 90 drm_gem_object_release(&bo->tbo.base); in radeon_ttm_bo_destroy() 212 drm_gem_private_object_init(rdev->ddev, &bo->tbo in radeon_bo_create() 319 struct ttm_buffer_object *tbo; radeon_bo_unref() local [all...] |
H A D | radeon_prime.c | 37 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table() 39 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table() 47 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap() 83 return &bo->tbo.base; in radeon_gem_prime_import_sg_table() 100 if (bo->tbo.moving) { in radeon_gem_prime_pin() 101 ret = dma_fence_wait(bo->tbo.moving, false); in radeon_gem_prime_pin() 134 if (radeon_ttm_tt_has_userptr(bo->rdev, bo->tbo.ttm)) in radeon_gem_prime_export()
|
H A D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 96 rdev = radeon_get_rdev(bo->tbo.bdev); in radeon_bo_gpu_offset() 98 switch (bo->tbo.mem.mem_type) { in radeon_bo_gpu_offset() 107 return (bo->tbo.mem.start << PAGE_SHIFT) + start; in radeon_bo_gpu_offset() 112 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size() 117 return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 122 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 133 return drm_vma_node_offset_addr(&bo->tbo.base.vma_node); in radeon_bo_mmap_offset()
|
H A D | radeon_mn.c | 56 if (!bo->tbo.ttm || !radeon_ttm_tt_is_bound(bo->tbo.bdev, bo->tbo.ttm)) in radeon_mn_invalidate() 68 r = dma_resv_wait_timeout_rcu(bo->tbo.base.resv, true, false, in radeon_mn_invalidate() 74 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx); in radeon_mn_invalidate()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/vmwgfx/ |
H A D | vmwgfx_bo.c | 37 WARN_ON(vbo->tbo.base.funcs && in vmw_bo_release() 38 kref_read(&vbo->tbo.base.refcount) != 0); in vmw_bo_release() 40 drm_gem_object_release(&vbo->tbo.base); in vmw_bo_release() 74 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_placement() 110 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_vram_or_gmr() 179 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_pin_in_start_of_vram() 195 buf->tbo.pin_count == 0) { in vmw_bo_pin_in_start_of_vram() 236 struct ttm_buffer_object *bo = &buf->tbo; in vmw_bo_unpin() 283 struct ttm_buffer_object *bo = &vbo->tbo; in vmw_bo_pin_reserved() 328 struct ttm_buffer_object *bo = &vbo->tbo; in vmw_bo_map_and_cache() [all...] |
H A D | vmwgfx_bo.h | 62 * @tbo: The TTM buffer object 75 struct ttm_buffer_object tbo; member 145 vbo->tbo.priority = i; in vmw_bo_prio_adjust() 150 vbo->tbo.priority = 3; in vmw_bo_prio_adjust() 189 ttm_bo_put(&tmp_buf->tbo); in vmw_bo_unreference() 194 ttm_bo_get(&buf->tbo); in vmw_bo_reference() 200 drm_gem_object_get(&vbo->tbo.base); in vmw_user_bo_ref() 210 drm_gem_object_put(&tmp_buf->tbo.base); in vmw_user_bo_unref() 215 return container_of((gobj), struct vmw_bo, tbo.base); in to_vmw_bo()
|
H A D | vmwgfx_gem.c | 123 (*p_vbo)->tbo.base.funcs = &vmw_gem_object_funcs; in vmw_gem_object_create() 147 ret = drm_gem_handle_create(filp, &(*p_vbo)->tbo.base, handle); in vmw_gem_object_create_with_handle() 171 rep->map_handle = drm_vma_node_offset_addr(&vbo->tbo.base.vma_node); in vmw_gem_object_create_ioctl() 175 drm_gem_object_put(&vbo->tbo.base); in vmw_gem_object_create_ioctl() 187 switch (bo->tbo.resource->mem_type) { in vmw_bo_print_info() 208 switch (bo->tbo.type) { in vmw_bo_print_info() 224 id, bo->tbo.base.size, placement, type); in vmw_bo_print_info() 226 bo->tbo.priority, in vmw_bo_print_info() 227 bo->tbo.pin_count, in vmw_bo_print_info() 228 kref_read(&bo->tbo in vmw_bo_print_info() [all...] |