Home
last modified time | relevance | path

Searched refs:bo (Results 1 - 25 of 551) sorted by relevance

12345678910>>...23

/kernel/linux/linux-6.6/drivers/staging/media/atomisp/pci/hmm/
H A Dhmm_bo.c47 static int __bo_init(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, in __bo_init() argument
59 memset(bo, 0, sizeof(*bo)); in __bo_init()
60 mutex_init(&bo->mutex); in __bo_init()
62 /* init the bo->list HEAD as an element of entire_bo_list */ in __bo_init()
63 INIT_LIST_HEAD(&bo->list); in __bo_init()
65 bo->bdev = bdev; in __bo_init()
66 bo->vmap_addr = NULL; in __bo_init()
67 bo->status = HMM_BO_FREE; in __bo_init()
68 bo in __bo_init()
132 struct hmm_buffer_object *bo; __bo_search_by_addr() local
157 struct hmm_buffer_object *bo; __bo_search_by_addr_in_range() local
178 __bo_insert_to_free_rbtree(struct rb_root *root, struct hmm_buffer_object *bo) __bo_insert_to_free_rbtree() argument
211 __bo_insert_to_alloc_rbtree(struct rb_root *root, struct hmm_buffer_object *bo) __bo_insert_to_alloc_rbtree() argument
236 __bo_break_up(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, unsigned int pgnr) __bo_break_up() argument
268 __bo_take_off_handling(struct hmm_buffer_object *bo) __bo_take_off_handling() argument
309 __bo_merge(struct hmm_buffer_object *bo, struct hmm_buffer_object *next_bo) __bo_merge() argument
336 struct hmm_buffer_object *bo; hmm_bo_device_init() local
396 struct hmm_buffer_object *bo, *new_bo; hmm_bo_alloc() local
439 hmm_bo_release(struct hmm_buffer_object *bo) hmm_bo_release() argument
503 struct hmm_buffer_object *bo; hmm_bo_device_exit() local
546 hmm_bo_allocated(struct hmm_buffer_object *bo) hmm_bo_allocated() argument
556 struct hmm_buffer_object *bo; hmm_bo_device_search_start() local
576 struct hmm_buffer_object *bo; hmm_bo_device_search_in_range() local
597 struct hmm_buffer_object *bo; hmm_bo_device_search_vmap_start() local
626 free_private_bo_pages(struct hmm_buffer_object *bo) free_private_bo_pages() argument
633 alloc_private_pages(struct hmm_buffer_object *bo) alloc_private_pages() argument
655 alloc_vmalloc_pages(struct hmm_buffer_object *bo, void *vmalloc_addr) alloc_vmalloc_pages() argument
680 hmm_bo_alloc_pages(struct hmm_buffer_object *bo, enum hmm_bo_type type, void *vmalloc_addr) hmm_bo_alloc_pages() argument
731 hmm_bo_free_pages(struct hmm_buffer_object *bo) hmm_bo_free_pages() argument
760 hmm_bo_page_allocated(struct hmm_buffer_object *bo) hmm_bo_page_allocated() argument
770 hmm_bo_bind(struct hmm_buffer_object *bo) hmm_bo_bind() argument
848 hmm_bo_unbind(struct hmm_buffer_object *bo) hmm_bo_unbind() argument
891 hmm_bo_binded(struct hmm_buffer_object *bo) hmm_bo_binded() argument
906 hmm_bo_vmap(struct hmm_buffer_object *bo, bool cached) hmm_bo_vmap() argument
937 hmm_bo_flush_vmap(struct hmm_buffer_object *bo) hmm_bo_flush_vmap() argument
951 hmm_bo_vunmap(struct hmm_buffer_object *bo) hmm_bo_vunmap() argument
966 hmm_bo_ref(struct hmm_buffer_object *bo) hmm_bo_ref() argument
981 hmm_bo_unref(struct hmm_buffer_object *bo) hmm_bo_unref() argument
990 struct hmm_buffer_object *bo = hmm_bo_vm_open() local
1008 struct hmm_buffer_object *bo = hmm_bo_vm_close() local
1035 hmm_bo_mmap(struct vm_area_struct *vma, struct hmm_buffer_object *bo) hmm_bo_mmap() argument
[all...]
H A Dhmm.c52 struct hmm_buffer_object *bo; in bo_show() local
67 list_for_each_entry(bo, bo_list, list) { in bo_show()
68 if ((active && (bo->status & HMM_BO_ALLOCED)) || in bo_show()
69 (!active && !(bo->status & HMM_BO_ALLOCED))) { in bo_show()
72 hmm_bo_type_string[bo->type], bo->pgnr); in bo_show()
74 total[bo->type] += bo->pgnr; in bo_show()
75 count[bo->type]++; in bo_show()
173 struct hmm_buffer_object *bo; in __hmm_alloc() local
232 struct hmm_buffer_object *bo; hmm_free() local
253 hmm_check_bo(struct hmm_buffer_object *bo, unsigned int ptr) hmm_check_bo() argument
281 struct hmm_buffer_object *bo; load_and_flush_by_kmap() local
324 struct hmm_buffer_object *bo; load_and_flush() local
381 struct hmm_buffer_object *bo; hmm_store() local
464 struct hmm_buffer_object *bo; hmm_set() local
525 struct hmm_buffer_object *bo; hmm_virt_to_phys() local
543 struct hmm_buffer_object *bo; hmm_mmap() local
559 struct hmm_buffer_object *bo; hmm_vmap() local
580 struct hmm_buffer_object *bo; hmm_flush_vmap() local
595 struct hmm_buffer_object *bo; hmm_vunmap() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/ttm/
H A Dttm_bo.c49 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument
60 man = ttm_manager_type(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
68 * @bo: The buffer object.
74 void ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo) in ttm_bo_move_to_lru_tail() argument
76 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail()
78 if (bo->resource) in ttm_bo_move_to_lru_tail()
79 ttm_resource_move_to_lru_tail(bo->resource); in ttm_bo_move_to_lru_tail()
86 * @bo: The buffer object.
97 void ttm_bo_set_bulk_move(struct ttm_buffer_object *bo, in ttm_bo_set_bulk_move() argument
100 dma_resv_assert_held(bo in ttm_bo_set_bulk_move()
115 ttm_bo_handle_move_mem(struct ttm_buffer_object *bo, struct ttm_resource *mem, bool evict, struct ttm_operation_ctx *ctx, struct ttm_place *hop) ttm_bo_handle_move_mem() argument
177 ttm_bo_cleanup_memtype_use(struct ttm_buffer_object *bo) ttm_bo_cleanup_memtype_use() argument
186 ttm_bo_individualize_resv(struct ttm_buffer_object *bo) ttm_bo_individualize_resv() argument
213 ttm_bo_flush_all_fences(struct ttm_buffer_object *bo) ttm_bo_flush_all_fences() argument
241 ttm_bo_cleanup_refs(struct ttm_buffer_object *bo, bool interruptible, bool no_wait_gpu, bool unlock_resv) ttm_bo_cleanup_refs() argument
307 struct ttm_buffer_object *bo; ttm_bo_delayed_delete() local
321 struct ttm_buffer_object *bo = ttm_bo_release() local
392 ttm_bo_put(struct ttm_buffer_object *bo) ttm_bo_put() argument
398 ttm_bo_bounce_temp_buffer(struct ttm_buffer_object *bo, struct ttm_resource **mem, struct ttm_operation_ctx *ctx, struct ttm_place *hop) ttm_bo_bounce_temp_buffer() argument
423 ttm_bo_evict(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx) ttm_bo_evict() argument
487 ttm_bo_eviction_valuable(struct ttm_buffer_object *bo, const struct ttm_place *place) ttm_bo_eviction_valuable() argument
514 ttm_bo_evict_swapout_allowable(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx, const struct ttm_place *place, bool *locked, bool *busy) ttm_bo_evict_swapout_allowable() argument
595 struct ttm_buffer_object *bo = NULL, *busy_bo = NULL; ttm_mem_evict_first() local
657 ttm_bo_pin(struct ttm_buffer_object *bo) ttm_bo_pin() argument
675 ttm_bo_unpin(struct ttm_buffer_object *bo) ttm_bo_unpin() argument
694 ttm_bo_add_move_fence(struct ttm_buffer_object *bo, struct ttm_resource_manager *man, struct ttm_resource *mem, bool no_wait_gpu) ttm_bo_add_move_fence() argument
726 ttm_bo_mem_force_space(struct ttm_buffer_object *bo, const struct ttm_place *place, struct ttm_resource **mem, struct ttm_operation_ctx *ctx) ttm_bo_mem_force_space() argument
771 ttm_bo_mem_space(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_resource **mem, struct ttm_operation_ctx *ctx) ttm_bo_mem_space() argument
838 ttm_bo_move_buffer(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_operation_ctx *ctx) ttm_bo_move_buffer() argument
890 ttm_bo_validate(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_operation_ctx *ctx) ttm_bo_validate() argument
961 ttm_bo_init_reserved(struct ttm_device *bdev, struct ttm_buffer_object *bo, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t alignment, struct ttm_operation_ctx *ctx, struct sg_table *sg, struct dma_resv *resv, void (*destroy) (struct ttm_buffer_object *)) ttm_bo_init_reserved() argument
1054 ttm_bo_init_validate(struct ttm_device *bdev, struct ttm_buffer_object *bo, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t alignment, bool interruptible, struct sg_table *sg, struct dma_resv *resv, void (*destroy) (struct ttm_buffer_object *)) ttm_bo_init_validate() argument
1084 ttm_bo_unmap_virtual(struct ttm_buffer_object *bo) ttm_bo_unmap_virtual() argument
1103 ttm_bo_wait_ctx(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx) ttm_bo_wait_ctx() argument
1125 ttm_bo_swapout(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx, gfp_t gfp_flags) ttm_bo_swapout() argument
1212 ttm_bo_tt_destroy(struct ttm_buffer_object *bo) ttm_bo_tt_destroy() argument
[all...]
H A Dttm_bo_vm.c41 static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument
49 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL)) in ttm_bo_vm_fault_idle()
61 ttm_bo_get(bo); in ttm_bo_vm_fault_idle()
63 (void)dma_resv_wait_timeout(bo->base.resv, in ttm_bo_vm_fault_idle()
66 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle()
67 ttm_bo_put(bo); in ttm_bo_vm_fault_idle()
74 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true, in ttm_bo_vm_fault_idle()
84 static unsigned long ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo, in ttm_bo_io_mem_pfn() argument
87 struct ttm_device *bdev = bo->bdev; in ttm_bo_io_mem_pfn()
90 return bdev->funcs->io_mem_pfn(bo, page_offse in ttm_bo_io_mem_pfn()
116 ttm_bo_vm_reserve(struct ttm_buffer_object *bo, struct vm_fault *vmf) ttm_bo_vm_reserve() argument
186 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_fault_reserved() local
294 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_dummy_page() local
325 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_fault() local
352 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_open() local
362 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_close() local
369 ttm_bo_vm_access_kmap(struct ttm_buffer_object *bo, unsigned long offset, uint8_t *buf, int len, int write) ttm_bo_vm_access_kmap() argument
411 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_access() local
459 ttm_bo_mmap_obj(struct vm_area_struct *vma, struct ttm_buffer_object *bo) ttm_bo_mmap_obj() argument
[all...]
H A Dttm_bo_util.c42 struct ttm_buffer_object *bo; member
129 * @bo: A pointer to a struct ttm_buffer_object.
136 * and update the (@bo)->mem placement flags. If unsuccessful, the old
142 int ttm_bo_move_memcpy(struct ttm_buffer_object *bo, in ttm_bo_move_memcpy() argument
146 struct ttm_device *bdev = bo->bdev; in ttm_bo_move_memcpy()
148 ttm_manager_type(bo->bdev, dst_mem->mem_type); in ttm_bo_move_memcpy()
149 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy()
150 struct ttm_resource *src_mem = bo->resource; in ttm_bo_move_memcpy()
173 dst_iter = ttm_kmap_iter_tt_init(&_dst_iter.tt, bo->ttm); in ttm_bo_move_memcpy()
179 src_iter = ttm_kmap_iter_tt_init(&_src_iter.tt, bo in ttm_bo_move_memcpy()
201 ttm_transfered_destroy(struct ttm_buffer_object *bo) ttm_transfered_destroy() argument
226 ttm_buffer_object_transfer(struct ttm_buffer_object *bo, struct ttm_buffer_object **new_obj) ttm_buffer_object_transfer() argument
290 ttm_io_prot(struct ttm_buffer_object *bo, struct ttm_resource *res, pgprot_t tmp) ttm_io_prot() argument
303 ttm_bo_ioremap(struct ttm_buffer_object *bo, unsigned long offset, unsigned long size, struct ttm_bo_kmap_obj *map) ttm_bo_ioremap() argument
329 ttm_bo_kmap_ttm(struct ttm_buffer_object *bo, unsigned long start_page, unsigned long num_pages, struct ttm_bo_kmap_obj *map) ttm_bo_kmap_ttm() argument
387 ttm_bo_kmap(struct ttm_buffer_object *bo, unsigned long start_page, unsigned long num_pages, struct ttm_bo_kmap_obj *map) ttm_bo_kmap() argument
460 ttm_bo_vmap(struct ttm_buffer_object *bo, struct iosys_map *map) ttm_bo_vmap() argument
529 ttm_bo_vunmap(struct ttm_buffer_object *bo, struct iosys_map *map) ttm_bo_vunmap() argument
548 ttm_bo_wait_free_node(struct ttm_buffer_object *bo, bool dst_use_tt) ttm_bo_wait_free_node() argument
566 ttm_bo_move_to_ghost(struct ttm_buffer_object *bo, struct dma_fence *fence, bool dst_use_tt) ttm_bo_move_to_ghost() argument
604 ttm_bo_move_pipeline_evict(struct ttm_buffer_object *bo, struct dma_fence *fence) ttm_bo_move_pipeline_evict() argument
642 ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo, struct dma_fence *fence, bool evict, bool pipeline, struct ttm_resource *new_mem) ttm_bo_move_accel_cleanup() argument
679 ttm_bo_move_sync_cleanup(struct ttm_buffer_object *bo, struct ttm_resource *new_mem) ttm_bo_move_sync_cleanup() argument
705 ttm_bo_pipeline_gutting(struct ttm_buffer_object *bo) ttm_bo_pipeline_gutting() argument
[all...]
/kernel/linux/linux-6.6/drivers/accel/ivpu/
H A Divpu_gem.c29 static int __must_check prime_alloc_pages_locked(struct ivpu_bo *bo) in prime_alloc_pages_locked() argument
35 static void prime_free_pages_locked(struct ivpu_bo *bo) in prime_free_pages_locked() argument
40 static int prime_map_pages_locked(struct ivpu_bo *bo) in prime_map_pages_locked() argument
42 struct ivpu_device *vdev = ivpu_bo_to_vdev(bo); in prime_map_pages_locked()
45 sgt = dma_buf_map_attachment_unlocked(bo->base.import_attach, DMA_BIDIRECTIONAL); in prime_map_pages_locked()
51 bo->sgt = sgt; in prime_map_pages_locked()
55 static void prime_unmap_pages_locked(struct ivpu_bo *bo) in prime_unmap_pages_locked() argument
57 dma_buf_unmap_attachment_unlocked(bo->base.import_attach, bo->sgt, DMA_BIDIRECTIONAL); in prime_unmap_pages_locked()
58 bo in prime_unmap_pages_locked()
70 shmem_alloc_pages_locked(struct ivpu_bo *bo) shmem_alloc_pages_locked() argument
88 shmem_free_pages_locked(struct ivpu_bo *bo) shmem_free_pages_locked() argument
97 ivpu_bo_map_pages_locked(struct ivpu_bo *bo) ivpu_bo_map_pages_locked() argument
124 ivpu_bo_unmap_pages_locked(struct ivpu_bo *bo) ivpu_bo_unmap_pages_locked() argument
143 internal_alloc_pages_locked(struct ivpu_bo *bo) internal_alloc_pages_locked() argument
172 internal_free_pages_locked(struct ivpu_bo *bo) internal_free_pages_locked() argument
195 ivpu_bo_alloc_and_map_pages_locked(struct ivpu_bo *bo) ivpu_bo_alloc_and_map_pages_locked() argument
221 ivpu_bo_unmap_and_free_pages(struct ivpu_bo *bo) ivpu_bo_unmap_and_free_pages() argument
241 ivpu_bo_pin(struct ivpu_bo *bo) ivpu_bo_pin() argument
278 ivpu_bo_alloc_vpu_addr(struct ivpu_bo *bo, struct ivpu_mmu_context *ctx, const struct ivpu_addr_range *range) ivpu_bo_alloc_vpu_addr() argument
305 ivpu_bo_free_vpu_addr(struct ivpu_bo *bo) ivpu_bo_free_vpu_addr() argument
333 struct ivpu_bo *bo, *tmp; ivpu_bo_remove_all_bos_from_context() local
344 struct ivpu_bo *bo; ivpu_bo_alloc() local
406 struct ivpu_bo *bo = to_ivpu_bo(obj); ivpu_bo_free() local
437 struct ivpu_bo *bo = to_ivpu_bo(obj); ivpu_bo_mmap() local
458 struct ivpu_bo *bo = to_ivpu_bo(obj); ivpu_bo_get_sg_table() local
479 struct ivpu_bo *bo = to_ivpu_bo(obj); ivpu_vm_fault() local
531 struct ivpu_bo *bo; ivpu_bo_create_ioctl() local
566 struct ivpu_bo *bo; ivpu_bo_alloc_internal() local
617 ivpu_bo_free_internal(struct ivpu_bo *bo) ivpu_bo_free_internal() argument
626 struct ivpu_bo *bo; ivpu_gem_prime_import() local
658 struct ivpu_bo *bo; ivpu_bo_info_ioctl() local
713 ivpu_bo_print_info(struct ivpu_bo *bo, struct drm_printer *p) ivpu_bo_print_info() argument
730 struct ivpu_bo *bo; ivpu_bo_list() local
[all...]
H A Divpu_gem.h45 int (*alloc_pages)(struct ivpu_bo *bo);
46 void (*free_pages)(struct ivpu_bo *bo);
47 int (*map_pages)(struct ivpu_bo *bo);
48 void (*unmap_pages)(struct ivpu_bo *bo);
51 int ivpu_bo_pin(struct ivpu_bo *bo);
58 void ivpu_bo_free_internal(struct ivpu_bo *bo);
60 void ivpu_bo_unmap_sgt_and_remove_from_context(struct ivpu_bo *bo);
71 static inline struct page *ivpu_bo_get_page(struct ivpu_bo *bo, u64 offset) in ivpu_bo_get_page() argument
73 if (offset > bo->base.size || !bo in ivpu_bo_get_page()
79 ivpu_bo_cache_mode(struct ivpu_bo *bo) ivpu_bo_cache_mode() argument
84 ivpu_bo_is_snooped(struct ivpu_bo *bo) ivpu_bo_is_snooped() argument
89 ivpu_bo_pgprot(struct ivpu_bo *bo, pgprot_t prot) ivpu_bo_pgprot() argument
100 ivpu_bo_to_vdev(struct ivpu_bo *bo) ivpu_bo_to_vdev() argument
105 ivpu_to_cpu_addr(struct ivpu_bo *bo, u32 vpu_addr) ivpu_to_cpu_addr() argument
116 cpu_to_vpu_addr(struct ivpu_bo *bo, void *cpu_addr) cpu_to_vpu_addr() argument
[all...]
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/hmm/
H A Dhmm_bo.c58 static int __bo_init(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, in __bo_init() argument
70 memset(bo, 0, sizeof(*bo)); in __bo_init()
71 mutex_init(&bo->mutex); in __bo_init()
73 /* init the bo->list HEAD as an element of entire_bo_list */ in __bo_init()
74 INIT_LIST_HEAD(&bo->list); in __bo_init()
76 bo->bdev = bdev; in __bo_init()
77 bo->vmap_addr = NULL; in __bo_init()
78 bo->status = HMM_BO_FREE; in __bo_init()
79 bo in __bo_init()
143 struct hmm_buffer_object *bo; __bo_search_by_addr() local
168 struct hmm_buffer_object *bo; __bo_search_by_addr_in_range() local
189 __bo_insert_to_free_rbtree(struct rb_root *root, struct hmm_buffer_object *bo) __bo_insert_to_free_rbtree() argument
222 __bo_insert_to_alloc_rbtree(struct rb_root *root, struct hmm_buffer_object *bo) __bo_insert_to_alloc_rbtree() argument
247 __bo_break_up(struct hmm_bo_device *bdev, struct hmm_buffer_object *bo, unsigned int pgnr) __bo_break_up() argument
279 __bo_take_off_handling(struct hmm_buffer_object *bo) __bo_take_off_handling() argument
320 __bo_merge(struct hmm_buffer_object *bo, struct hmm_buffer_object *next_bo) __bo_merge() argument
347 struct hmm_buffer_object *bo; hmm_bo_device_init() local
407 struct hmm_buffer_object *bo, *new_bo; hmm_bo_alloc() local
450 hmm_bo_release(struct hmm_buffer_object *bo) hmm_bo_release() argument
514 struct hmm_buffer_object *bo; hmm_bo_device_exit() local
557 hmm_bo_allocated(struct hmm_buffer_object *bo) hmm_bo_allocated() argument
567 struct hmm_buffer_object *bo; hmm_bo_device_search_start() local
587 struct hmm_buffer_object *bo; hmm_bo_device_search_in_range() local
608 struct hmm_buffer_object *bo; hmm_bo_device_search_vmap_start() local
629 free_private_bo_pages(struct hmm_buffer_object *bo, struct hmm_pool *dypool, struct hmm_pool *repool, int free_pgnr) free_private_bo_pages() argument
694 alloc_private_pages(struct hmm_buffer_object *bo, int from_highmem, bool cached, struct hmm_pool *dypool, struct hmm_pool *repool) alloc_private_pages() argument
851 free_private_pages(struct hmm_buffer_object *bo, struct hmm_pool *dypool, struct hmm_pool *repool) free_private_pages() argument
860 free_user_pages(struct hmm_buffer_object *bo, unsigned int page_nr) free_user_pages() argument
880 alloc_user_pages(struct hmm_buffer_object *bo, const void __user *userptr, bool cached) alloc_user_pages() argument
980 hmm_bo_alloc_pages(struct hmm_buffer_object *bo, enum hmm_bo_type type, int from_highmem, const void __user *userptr, bool cached) hmm_bo_alloc_pages() argument
1029 hmm_bo_free_pages(struct hmm_buffer_object *bo) hmm_bo_free_pages() argument
1056 hmm_bo_page_allocated(struct hmm_buffer_object *bo) hmm_bo_page_allocated() argument
1066 hmm_bo_get_page_info(struct hmm_buffer_object *bo, struct hmm_page_object **page_obj, int *pgnr) hmm_bo_get_page_info() argument
1092 hmm_bo_bind(struct hmm_buffer_object *bo) hmm_bo_bind() argument
1170 hmm_bo_unbind(struct hmm_buffer_object *bo) hmm_bo_unbind() argument
1213 hmm_bo_binded(struct hmm_buffer_object *bo) hmm_bo_binded() argument
1228 hmm_bo_vmap(struct hmm_buffer_object *bo, bool cached) hmm_bo_vmap() argument
1274 hmm_bo_flush_vmap(struct hmm_buffer_object *bo) hmm_bo_flush_vmap() argument
1288 hmm_bo_vunmap(struct hmm_buffer_object *bo) hmm_bo_vunmap() argument
1303 hmm_bo_ref(struct hmm_buffer_object *bo) hmm_bo_ref() argument
1318 hmm_bo_unref(struct hmm_buffer_object *bo) hmm_bo_unref() argument
1327 struct hmm_buffer_object *bo = hmm_bo_vm_open() local
1345 struct hmm_buffer_object *bo = hmm_bo_vm_close() local
1372 hmm_bo_mmap(struct vm_area_struct *vma, struct hmm_buffer_object *bo) hmm_bo_mmap() argument
[all...]
H A Dhmm.c58 struct hmm_buffer_object *bo; in bo_show() local
73 list_for_each_entry(bo, bo_list, list) { in bo_show()
74 if ((active && (bo->status & HMM_BO_ALLOCED)) || in bo_show()
75 (!active && !(bo->status & HMM_BO_ALLOCED))) { in bo_show()
78 hmm_bo_type_string[bo->type], bo->pgnr); in bo_show()
80 total[bo->type] += bo->pgnr; in bo_show()
81 count[bo->type]++; in bo_show()
229 struct hmm_buffer_object *bo; in hmm_alloc() local
287 struct hmm_buffer_object *bo; hmm_free() local
310 hmm_check_bo(struct hmm_buffer_object *bo, unsigned int ptr) hmm_check_bo() argument
338 struct hmm_buffer_object *bo; load_and_flush_by_kmap() local
381 struct hmm_buffer_object *bo; load_and_flush() local
438 struct hmm_buffer_object *bo; hmm_store() local
531 struct hmm_buffer_object *bo; hmm_set() local
592 struct hmm_buffer_object *bo; hmm_virt_to_phys() local
610 struct hmm_buffer_object *bo; hmm_mmap() local
626 struct hmm_buffer_object *bo; hmm_vmap() local
647 struct hmm_buffer_object *bo; hmm_flush_vmap() local
662 struct hmm_buffer_object *bo; hmm_vunmap() local
725 struct hmm_buffer_object *bo; hmm_host_vaddr_to_hrt_vaddr() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/ttm/
H A Dttm_bo.c62 static void ttm_bo_default_destroy(struct ttm_buffer_object *bo) in ttm_bo_default_destroy() argument
64 kfree(bo); in ttm_bo_default_destroy()
67 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument
75 bo, bo->mem.num_pages, bo->mem.size >> 10, in ttm_bo_mem_space_debug()
76 bo->mem.size >> 20); in ttm_bo_mem_space_debug()
81 man = ttm_manager_type(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
112 static void ttm_bo_add_mem_to_lru(struct ttm_buffer_object *bo, in ttm_bo_add_mem_to_lru() argument
115 struct ttm_bo_device *bdev = bo in ttm_bo_add_mem_to_lru()
134 ttm_bo_del_from_lru(struct ttm_buffer_object *bo) ttm_bo_del_from_lru() argument
152 ttm_bo_bulk_move_set_pos(struct ttm_lru_bulk_move_pos *pos, struct ttm_buffer_object *bo) ttm_bo_bulk_move_set_pos() argument
160 ttm_bo_move_to_lru_tail(struct ttm_buffer_object *bo, struct ttm_lru_bulk_move *bulk) ttm_bo_move_to_lru_tail() argument
236 ttm_bo_handle_move_mem(struct ttm_buffer_object *bo, struct ttm_resource *mem, bool evict, struct ttm_operation_ctx *ctx) ttm_bo_handle_move_mem() argument
321 ttm_bo_cleanup_memtype_use(struct ttm_buffer_object *bo) ttm_bo_cleanup_memtype_use() argument
330 ttm_bo_individualize_resv(struct ttm_buffer_object *bo) ttm_bo_individualize_resv() argument
357 ttm_bo_flush_all_fences(struct ttm_buffer_object *bo) ttm_bo_flush_all_fences() argument
392 ttm_bo_cleanup_refs(struct ttm_buffer_object *bo, bool interruptible, bool no_wait_gpu, bool unlock_resv) ttm_bo_cleanup_refs() argument
469 struct ttm_buffer_object *bo; ttm_bo_delayed_delete() local
512 struct ttm_buffer_object *bo = ttm_bo_release() local
580 ttm_bo_put(struct ttm_buffer_object *bo) ttm_bo_put() argument
600 ttm_bo_evict(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx) ttm_bo_evict() argument
646 ttm_bo_eviction_valuable(struct ttm_buffer_object *bo, const struct ttm_place *place) ttm_bo_eviction_valuable() argument
670 ttm_bo_evict_swapout_allowable(struct ttm_buffer_object *bo, struct ttm_operation_ctx *ctx, bool *locked, bool *busy) ttm_bo_evict_swapout_allowable() argument
740 struct ttm_buffer_object *bo = NULL, *busy_bo = NULL; ttm_mem_evict_first() local
811 ttm_bo_add_move_fence(struct ttm_buffer_object *bo, struct ttm_resource_manager *man, struct ttm_resource *mem, bool no_wait_gpu) ttm_bo_add_move_fence() argument
848 ttm_bo_mem_force_space(struct ttm_buffer_object *bo, const struct ttm_place *place, struct ttm_resource *mem, struct ttm_operation_ctx *ctx) ttm_bo_mem_force_space() argument
908 ttm_bo_mem_placement(struct ttm_buffer_object *bo, const struct ttm_place *place, struct ttm_resource *mem, struct ttm_operation_ctx *ctx) ttm_bo_mem_placement() argument
944 ttm_bo_mem_space(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_resource *mem, struct ttm_operation_ctx *ctx) ttm_bo_mem_space() argument
1015 ttm_bo_move_buffer(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_operation_ctx *ctx) ttm_bo_move_buffer() argument
1087 ttm_bo_validate(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_operation_ctx *ctx) ttm_bo_validate() argument
1130 ttm_bo_init_reserved(struct ttm_bo_device *bdev, struct ttm_buffer_object *bo, unsigned long size, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t page_alignment, struct ttm_operation_ctx *ctx, size_t acc_size, struct sg_table *sg, struct dma_resv *resv, void (*destroy) (struct ttm_buffer_object *)) ttm_bo_init_reserved() argument
1238 ttm_bo_init(struct ttm_bo_device *bdev, struct ttm_buffer_object *bo, unsigned long size, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t page_alignment, bool interruptible, size_t acc_size, struct sg_table *sg, struct dma_resv *resv, void (*destroy) (struct ttm_buffer_object *)) ttm_bo_init() argument
1301 struct ttm_buffer_object *bo; ttm_bo_create() local
1483 ttm_bo_unmap_virtual(struct ttm_buffer_object *bo) ttm_bo_unmap_virtual() argument
1492 ttm_bo_wait(struct ttm_buffer_object *bo, bool interruptible, bool no_wait) ttm_bo_wait() argument
1523 struct ttm_buffer_object *bo; ttm_bo_swapout() local
1625 ttm_bo_tt_destroy(struct ttm_buffer_object *bo) ttm_bo_tt_destroy() argument
1634 ttm_bo_tt_bind(struct ttm_buffer_object *bo, struct ttm_resource *mem) ttm_bo_tt_bind() argument
1639 ttm_bo_tt_unbind(struct ttm_buffer_object *bo) ttm_bo_tt_unbind() argument
[all...]
H A Dttm_bo_vm.c45 static vm_fault_t ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument
51 if (likely(!bo->moving)) in ttm_bo_vm_fault_idle()
57 if (dma_fence_is_signaled(bo->moving)) in ttm_bo_vm_fault_idle()
70 ttm_bo_get(bo); in ttm_bo_vm_fault_idle()
72 (void) dma_fence_wait(bo->moving, true); in ttm_bo_vm_fault_idle()
73 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle()
74 ttm_bo_put(bo); in ttm_bo_vm_fault_idle()
81 err = dma_fence_wait(bo->moving, true); in ttm_bo_vm_fault_idle()
89 dma_fence_put(bo->moving); in ttm_bo_vm_fault_idle()
90 bo in ttm_bo_vm_fault_idle()
96 ttm_bo_io_mem_pfn(struct ttm_buffer_object *bo, unsigned long page_offset) ttm_bo_io_mem_pfn() argument
128 ttm_bo_vm_reserve(struct ttm_buffer_object *bo, struct vm_fault *vmf) ttm_bo_vm_reserve() argument
178 ttm_bo_vm_insert_huge(struct vm_fault *vmf, struct ttm_buffer_object *bo, pgoff_t page_offset, pgoff_t fault_page_size, pgprot_t pgprot) ttm_bo_vm_insert_huge() argument
237 ttm_bo_vm_insert_huge(struct vm_fault *vmf, struct ttm_buffer_object *bo, pgoff_t page_offset, pgoff_t fault_page_size, pgprot_t pgprot) ttm_bo_vm_insert_huge() argument
272 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_fault_reserved() local
409 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_fault() local
429 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_open() local
439 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_close() local
446 ttm_bo_vm_access_kmap(struct ttm_buffer_object *bo, unsigned long offset, uint8_t *buf, int len, int write) ttm_bo_vm_access_kmap() argument
488 struct ttm_buffer_object *bo = vma->vm_private_data; ttm_bo_vm_access() local
533 struct ttm_buffer_object *bo = NULL; ttm_bo_vm_lookup() local
552 ttm_bo_mmap_vma_setup(struct ttm_buffer_object *bo, struct vm_area_struct *vma) ttm_bo_mmap_vma_setup() argument
578 struct ttm_buffer_object *bo; ttm_bo_mmap() local
605 ttm_bo_mmap_obj(struct vm_area_struct *vma, struct ttm_buffer_object *bo) ttm_bo_mmap_obj() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/
H A Dradeon_object.c45 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
54 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
56 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
58 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
59 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
60 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
61 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy()
62 WARN_ON_ONCE(!list_empty(&bo->va)); in radeon_ttm_bo_destroy()
63 if (bo->tbo.base.import_attach) in radeon_ttm_bo_destroy()
64 drm_prime_gem_destroy(&bo in radeon_ttm_bo_destroy()
69 radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) radeon_ttm_bo_is_radeon_bo() argument
136 struct radeon_bo *bo; radeon_bo_create() local
219 radeon_bo_kmap(struct radeon_bo *bo, void **ptr) radeon_bo_kmap() argument
247 radeon_bo_kunmap(struct radeon_bo *bo) radeon_bo_kunmap() argument
256 radeon_bo_ref(struct radeon_bo *bo) radeon_bo_ref() argument
265 radeon_bo_unref(struct radeon_bo **bo) radeon_bo_unref() argument
276 radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset, u64 *gpu_addr) radeon_bo_pin_restricted() argument
335 radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) radeon_bo_pin() argument
340 radeon_bo_unpin(struct radeon_bo *bo) radeon_bo_unpin() argument
372 struct radeon_bo *bo, *n; radeon_bo_force_delete() local
488 struct radeon_bo *bo = lobj->robj; radeon_bo_list_validate() local
542 radeon_bo_get_surface_reg(struct radeon_bo *bo) radeon_bo_get_surface_reg() argument
596 radeon_bo_clear_surface_reg(struct radeon_bo *bo) radeon_bo_clear_surface_reg() argument
611 radeon_bo_set_tiling_flags(struct radeon_bo *bo, uint32_t tiling_flags, uint32_t pitch) radeon_bo_set_tiling_flags() argument
671 radeon_bo_get_tiling_flags(struct radeon_bo *bo, uint32_t *tiling_flags, uint32_t *pitch) radeon_bo_get_tiling_flags() argument
683 radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, bool force_drop) radeon_bo_check_tiling() argument
712 radeon_bo_move_notify(struct ttm_buffer_object *bo) radeon_bo_move_notify() argument
724 radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) radeon_bo_fault_reserve_notify() argument
786 radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, bool shared) radeon_bo_fence() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/
H A Dradeon_object.c46 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
53 static void radeon_update_memory_usage(struct radeon_bo *bo, in radeon_update_memory_usage() argument
56 struct radeon_device *rdev = bo->rdev; in radeon_update_memory_usage()
57 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage()
77 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
79 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
81 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy()
83 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
84 list_del_init(&bo in radeon_ttm_bo_destroy()
94 radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) radeon_ttm_bo_is_radeon_bo() argument
189 struct radeon_bo *bo; radeon_bo_create() local
276 radeon_bo_kmap(struct radeon_bo *bo, void **ptr) radeon_bo_kmap() argument
299 radeon_bo_kunmap(struct radeon_bo *bo) radeon_bo_kunmap() argument
308 radeon_bo_ref(struct radeon_bo *bo) radeon_bo_ref() argument
317 radeon_bo_unref(struct radeon_bo **bo) radeon_bo_unref() argument
328 radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset, u64 *gpu_addr) radeon_bo_pin_restricted() argument
389 radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) radeon_bo_pin() argument
394 radeon_bo_unpin(struct radeon_bo *bo) radeon_bo_unpin() argument
437 struct radeon_bo *bo, *n; radeon_bo_force_delete() local
551 struct radeon_bo *bo = lobj->robj; radeon_bo_list_validate() local
605 radeon_bo_get_surface_reg(struct radeon_bo *bo) radeon_bo_get_surface_reg() argument
660 radeon_bo_clear_surface_reg(struct radeon_bo *bo) radeon_bo_clear_surface_reg() argument
675 radeon_bo_set_tiling_flags(struct radeon_bo *bo, uint32_t tiling_flags, uint32_t pitch) radeon_bo_set_tiling_flags() argument
735 radeon_bo_get_tiling_flags(struct radeon_bo *bo, uint32_t *tiling_flags, uint32_t *pitch) radeon_bo_get_tiling_flags() argument
747 radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, bool force_drop) radeon_bo_check_tiling() argument
776 radeon_bo_move_notify(struct ttm_buffer_object *bo, bool evict, struct ttm_resource *new_mem) radeon_bo_move_notify() argument
797 radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) radeon_bo_fault_reserve_notify() argument
847 radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type, bool no_wait) radeon_bo_wait() argument
870 radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, bool shared) radeon_bo_fence() argument
[all...]
H A Dradeon_prime.c36 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_get_sg_table() local
37 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table()
39 return drm_prime_pages_to_sg(obj->dev, bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
44 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vmap() local
47 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
48 &bo->dma_buf_vmap); in radeon_gem_prime_vmap()
52 return bo->dma_buf_vmap.virtual; in radeon_gem_prime_vmap()
57 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vunmap() local
59 ttm_bo_kunmap(&bo in radeon_gem_prime_vunmap()
68 struct radeon_bo *bo; radeon_gem_prime_import_sg_table() local
88 struct radeon_bo *bo = gem_to_radeon_bo(obj); radeon_gem_prime_pin() local
116 struct radeon_bo *bo = gem_to_radeon_bo(obj); radeon_gem_prime_unpin() local
133 struct radeon_bo *bo = gem_to_radeon_bo(gobj); radeon_gem_prime_export() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/tegra/
H A Dgem.c23 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
25 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_put()
53 static struct sg_table *tegra_bo_pin(struct device *dev, struct host1x_bo *bo, in tegra_bo_pin() argument
56 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_pin()
132 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
134 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_mmap()
145 static void tegra_bo_munmap(struct host1x_bo *bo, void *addr) in tegra_bo_munmap() argument
147 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_munmap()
157 static struct host1x_bo *tegra_bo_get(struct host1x_bo *bo) in tegra_bo_get() argument
159 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_get()
175 tegra_bo_iommu_map(struct tegra_drm *tegra, struct tegra_bo *bo) tegra_bo_iommu_map() argument
218 tegra_bo_iommu_unmap(struct tegra_drm *tegra, struct tegra_bo *bo) tegra_bo_iommu_unmap() argument
236 struct tegra_bo *bo; tegra_bo_alloc_object() local
263 tegra_bo_free(struct drm_device *drm, struct tegra_bo *bo) tegra_bo_free() argument
275 tegra_bo_get_pages(struct drm_device *drm, struct tegra_bo *bo) tegra_bo_get_pages() argument
305 tegra_bo_alloc(struct drm_device *drm, struct tegra_bo *bo) tegra_bo_alloc() argument
339 struct tegra_bo *bo; tegra_bo_create() local
370 struct tegra_bo *bo; tegra_bo_create_with_handle() local
393 struct tegra_bo *bo; tegra_bo_import() local
439 struct tegra_bo *bo = to_tegra_bo(gem); tegra_bo_free_object() local
461 struct tegra_bo *bo; tegra_bo_dumb_create() local
478 struct tegra_bo *bo = to_tegra_bo(gem); tegra_bo_fault() local
499 struct tegra_bo *bo = to_tegra_bo(gem); __tegra_gem_mmap() local
552 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_map_dma_buf() local
585 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_unmap_dma_buf() local
603 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_begin_cpu_access() local
616 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_end_cpu_access() local
640 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_vmap() local
678 struct tegra_bo *bo; tegra_gem_prime_import() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/qxl/
H A Dqxl_object.c32 static int __qxl_bo_pin(struct qxl_bo *bo);
33 static void __qxl_bo_unpin(struct qxl_bo *bo);
37 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
40 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
41 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy()
43 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
44 WARN_ON_ONCE(bo->map_count > 0); in qxl_ttm_bo_destroy()
46 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
48 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy()
49 kfree(bo); in qxl_ttm_bo_destroy()
52 qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) qxl_ttm_bo_is_qxl_bo() argument
115 struct qxl_bo *bo; qxl_bo_create() local
161 qxl_bo_vmap_locked(struct qxl_bo *bo, struct iosys_map *map) qxl_bo_vmap_locked() argument
194 qxl_bo_vmap(struct qxl_bo *bo, struct iosys_map *map) qxl_bo_vmap() argument
207 qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset) qxl_bo_kmap_atomic_page() argument
240 qxl_bo_vunmap_locked(struct qxl_bo *bo) qxl_bo_vunmap_locked() argument
254 qxl_bo_vunmap(struct qxl_bo *bo) qxl_bo_vunmap() argument
267 qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *pmap) qxl_bo_kunmap_atomic_page() argument
280 qxl_bo_unref(struct qxl_bo **bo) qxl_bo_unref() argument
289 qxl_bo_ref(struct qxl_bo *bo) qxl_bo_ref() argument
295 __qxl_bo_pin(struct qxl_bo *bo) __qxl_bo_pin() argument
314 __qxl_bo_unpin(struct qxl_bo *bo) __qxl_bo_unpin() argument
324 qxl_bo_pin(struct qxl_bo *bo) qxl_bo_pin() argument
342 qxl_bo_unpin(struct qxl_bo *bo) qxl_bo_unpin() argument
357 struct qxl_bo *bo, *n; qxl_bo_force_delete() local
384 qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo) qxl_bo_check_id() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/vc4/
H A Dvc4_bo.c130 struct vc4_bo *bo = to_vc4_bo(gem_obj); in vc4_bo_set_label() local
140 vc4->bo_labels[bo->label].num_allocated--; in vc4_bo_set_label()
141 vc4->bo_labels[bo->label].size_allocated -= gem_obj->size; in vc4_bo_set_label()
143 if (vc4->bo_labels[bo->label].num_allocated == 0 && in vc4_bo_set_label()
144 is_user_label(bo->label)) { in vc4_bo_set_label()
150 kfree(vc4->bo_labels[bo->label].name); in vc4_bo_set_label()
151 vc4->bo_labels[bo->label].name = NULL; in vc4_bo_set_label()
154 bo->label = label; in vc4_bo_set_label()
162 static void vc4_bo_destroy(struct vc4_bo *bo) in vc4_bo_destroy() argument
164 struct drm_gem_object *obj = &bo in vc4_bo_destroy()
181 vc4_bo_remove_from_cache(struct vc4_bo *bo) vc4_bo_remove_from_cache() argument
237 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_purge() local
245 vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo) vc4_bo_add_to_purgeable_pool() argument
256 vc4_bo_remove_from_purgeable_pool_locked(struct vc4_bo *bo) vc4_bo_remove_from_purgeable_pool_locked() argument
277 vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo) vc4_bo_remove_from_purgeable_pool() argument
288 struct vc4_bo *bo = to_vc4_bo(obj); vc4_bo_purge() local
307 struct vc4_bo *bo = list_first_entry(&vc4->purgeable.list, vc4_bo_userspace_cache_purge() local
354 struct vc4_bo *bo = NULL; vc4_bo_get_from_cache() local
388 struct vc4_bo *bo; vc4_create_object() local
412 struct vc4_bo *bo; vc4_bo_create() local
477 struct vc4_bo *bo = NULL; vc4_dumb_create() local
506 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_free_old() local
527 struct vc4_bo *bo = to_vc4_bo(gem_bo); vc4_free_object() local
600 vc4_bo_inc_usecnt(struct vc4_bo *bo) vc4_bo_inc_usecnt() argument
636 vc4_bo_dec_usecnt(struct vc4_bo *bo) vc4_bo_dec_usecnt() argument
660 struct vc4_bo *bo = to_vc4_bo(obj); vc4_prime_export() local
691 struct vc4_bo *bo = to_vc4_bo(obj); vc4_fault() local
707 struct vc4_bo *bo; vc4_mmap() local
760 struct vc4_bo *bo = to_vc4_bo(obj); vc4_prime_mmap() local
772 struct vc4_bo *bo = to_vc4_bo(obj); vc4_prime_vmap() local
819 struct vc4_bo *bo = NULL; vc4_create_bo_ioctl() local
868 struct vc4_bo *bo = NULL; vc4_create_shader_bo_ioctl() local
943 struct vc4_bo *bo; vc4_set_tiling_ioctl() local
986 struct vc4_bo *bo; vc4_get_tiling_ioctl() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/qxl/
H A Dqxl_object.c32 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
35 bo = to_qxl_bo(tbo); in qxl_ttm_bo_destroy()
36 qdev = to_qxl(bo->tbo.base.dev); in qxl_ttm_bo_destroy()
38 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
39 WARN_ON_ONCE(bo->map_count > 0); in qxl_ttm_bo_destroy()
41 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
43 drm_gem_object_release(&bo->tbo.base); in qxl_ttm_bo_destroy()
44 kfree(bo); in qxl_ttm_bo_destroy()
47 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument
49 if (bo in qxl_ttm_bo_is_qxl_bo()
111 struct qxl_bo *bo; qxl_bo_create() local
155 qxl_bo_kmap(struct qxl_bo *bo, void **ptr) qxl_bo_kmap() argument
176 qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset) qxl_bo_kmap_atomic_page() argument
207 qxl_bo_kunmap(struct qxl_bo *bo) qxl_bo_kunmap() argument
218 qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *pmap) qxl_bo_kunmap_atomic_page() argument
231 qxl_bo_unref(struct qxl_bo **bo) qxl_bo_unref() argument
240 qxl_bo_ref(struct qxl_bo *bo) qxl_bo_ref() argument
246 __qxl_bo_pin(struct qxl_bo *bo) __qxl_bo_pin() argument
266 __qxl_bo_unpin(struct qxl_bo *bo) __qxl_bo_unpin() argument
292 qxl_bo_pin(struct qxl_bo *bo) qxl_bo_pin() argument
310 qxl_bo_unpin(struct qxl_bo *bo) qxl_bo_unpin() argument
325 struct qxl_bo *bo, *n; qxl_bo_force_delete() local
352 qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo) qxl_bo_check_id() argument
[all...]
H A Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, false, NULL); in qxl_bo_reserve()
37 struct drm_device *ddev = bo->tbo.base.dev; in qxl_bo_reserve()
39 dev_err(ddev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
46 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument
48 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
51 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument
53 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size()
56 static inline u64 qxl_bo_mmap_offset(struct qxl_bo *bo) in qxl_bo_mmap_offset() argument
58 return drm_vma_node_offset_addr(&bo in qxl_bo_mmap_offset()
61 qxl_bo_wait(struct qxl_bo *bo, u32 *mem_type, bool no_wait) qxl_bo_wait() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/vc4/
H A Dvc4_bo.c134 struct vc4_bo *bo = to_vc4_bo(gem_obj); in vc4_bo_set_label() local
144 vc4->bo_labels[bo->label].num_allocated--; in vc4_bo_set_label()
145 vc4->bo_labels[bo->label].size_allocated -= gem_obj->size; in vc4_bo_set_label()
147 if (vc4->bo_labels[bo->label].num_allocated == 0 && in vc4_bo_set_label()
148 is_user_label(bo->label)) { in vc4_bo_set_label()
154 kfree(vc4->bo_labels[bo->label].name); in vc4_bo_set_label()
155 vc4->bo_labels[bo->label].name = NULL; in vc4_bo_set_label()
158 bo->label = label; in vc4_bo_set_label()
166 static void vc4_bo_destroy(struct vc4_bo *bo) in vc4_bo_destroy() argument
168 struct drm_gem_object *obj = &bo in vc4_bo_destroy()
186 vc4_bo_remove_from_cache(struct vc4_bo *bo) vc4_bo_remove_from_cache() argument
242 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_purge() local
250 vc4_bo_add_to_purgeable_pool(struct vc4_bo *bo) vc4_bo_add_to_purgeable_pool() argument
264 vc4_bo_remove_from_purgeable_pool_locked(struct vc4_bo *bo) vc4_bo_remove_from_purgeable_pool_locked() argument
288 vc4_bo_remove_from_purgeable_pool(struct vc4_bo *bo) vc4_bo_remove_from_purgeable_pool() argument
299 struct vc4_bo *bo = to_vc4_bo(obj); vc4_bo_purge() local
318 struct vc4_bo *bo = list_first_entry(&vc4->purgeable.list, vc4_bo_userspace_cache_purge() local
365 struct vc4_bo *bo = NULL; vc4_bo_get_from_cache() local
397 struct vc4_bo *bo; vc4_create_object() local
428 struct vc4_bo *bo; vc4_bo_create() local
496 struct vc4_bo *bo = NULL; vc4_bo_dumb_create() local
526 struct vc4_bo *bo = list_last_entry(&vc4->bo_cache.time_list, vc4_bo_cache_free_old() local
547 struct vc4_bo *bo = to_vc4_bo(gem_bo); vc4_free_object() local
620 vc4_bo_inc_usecnt(struct vc4_bo *bo) vc4_bo_inc_usecnt() argument
660 vc4_bo_dec_usecnt(struct vc4_bo *bo) vc4_bo_dec_usecnt() argument
689 struct vc4_bo *bo = to_vc4_bo(obj); vc4_prime_export() local
720 struct vc4_bo *bo = to_vc4_bo(obj); vc4_fault() local
734 struct vc4_bo *bo = to_vc4_bo(obj); vc4_gem_object_mmap() local
783 struct vc4_bo *bo = NULL; vc4_create_bo_ioctl() local
839 struct vc4_bo *bo = NULL; vc4_create_shader_bo_ioctl() local
918 struct vc4_bo *bo; vc4_set_tiling_ioctl() local
965 struct vc4_bo *bo; vc4_get_tiling_ioctl() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_object.c58 * @bo: &amdgpu_bo buffer object
63 static void amdgpu_bo_subtract_pin_size(struct amdgpu_bo *bo) in amdgpu_bo_subtract_pin_size() argument
65 struct amdgpu_device *adev = amdgpu_ttm_adev(bo->tbo.bdev); in amdgpu_bo_subtract_pin_size()
67 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) { in amdgpu_bo_subtract_pin_size()
68 atomic64_sub(amdgpu_bo_size(bo), &adev->vram_pin_size); in amdgpu_bo_subtract_pin_size()
69 atomic64_sub(amdgpu_vram_mgr_bo_visible_size(bo), in amdgpu_bo_subtract_pin_size()
71 } else if (bo->tbo.mem.mem_type == TTM_PL_TT) { in amdgpu_bo_subtract_pin_size()
72 atomic64_sub(amdgpu_bo_size(bo), &adev->gart_pin_size); in amdgpu_bo_subtract_pin_size()
79 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() local
81 if (bo in amdgpu_bo_destroy()
111 amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) amdgpu_bo_is_amdgpu_bo() argument
428 amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr, void **cpu_addr) amdgpu_bo_free_kernel() argument
533 struct amdgpu_bo *bo; amdgpu_bo_do_create() local
634 amdgpu_bo_create_shadow(struct amdgpu_device *adev, unsigned long size, struct amdgpu_bo *bo) amdgpu_bo_create_shadow() argument
718 amdgpu_bo_validate(struct amdgpu_bo *bo) amdgpu_bo_validate() argument
779 amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr) amdgpu_bo_kmap() argument
818 amdgpu_bo_kptr(struct amdgpu_bo *bo) amdgpu_bo_kptr() argument
831 amdgpu_bo_kunmap(struct amdgpu_bo *bo) amdgpu_bo_kunmap() argument
846 amdgpu_bo_ref(struct amdgpu_bo *bo) amdgpu_bo_ref() argument
861 amdgpu_bo_unref(struct amdgpu_bo **bo) amdgpu_bo_unref() argument
895 amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain, u64 min_offset, u64 max_offset) amdgpu_bo_pin_restricted() argument
998 amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain) amdgpu_bo_pin() argument
1013 amdgpu_bo_unpin(struct amdgpu_bo *bo) amdgpu_bo_unpin() argument
1144 amdgpu_bo_fbdev_mmap(struct amdgpu_bo *bo, struct vm_area_struct *vma) amdgpu_bo_fbdev_mmap() argument
1164 amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags) amdgpu_bo_set_tiling_flags() argument
1184 amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags) amdgpu_bo_get_tiling_flags() argument
1205 amdgpu_bo_set_metadata(struct amdgpu_bo *bo, void *metadata, uint32_t metadata_size, uint64_t flags) amdgpu_bo_set_metadata() argument
1249 amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer, size_t buffer_size, uint32_t *metadata_size, uint64_t *flags) amdgpu_bo_get_metadata() argument
1282 amdgpu_bo_move_notify(struct ttm_buffer_object *bo, bool evict, struct ttm_resource *new_mem) amdgpu_bo_move_notify() argument
1321 amdgpu_bo_release_notify(struct ttm_buffer_object *bo) amdgpu_bo_release_notify() argument
1368 amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo) amdgpu_bo_fault_reserve_notify() argument
1426 amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, bool shared) amdgpu_bo_fence() argument
1475 amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr) amdgpu_bo_sync_wait() argument
1493 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo) amdgpu_bo_gpu_offset() argument
1512 amdgpu_bo_gpu_offset_no_check(struct amdgpu_bo *bo) amdgpu_bo_gpu_offset_no_check() argument
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/tegra/
H A Dgem.c52 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
54 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_put()
59 static struct host1x_bo_mapping *tegra_bo_pin(struct device *dev, struct host1x_bo *bo, in tegra_bo_pin() argument
62 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_pin()
72 map->bo = host1x_bo_get(bo); in tegra_bo_pin()
173 host1x_bo_put(map->bo); in tegra_bo_unpin()
177 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
179 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_mmap()
194 static void tegra_bo_munmap(struct host1x_bo *bo, voi argument
207 tegra_bo_get(struct host1x_bo *bo) tegra_bo_get() argument
225 tegra_bo_iommu_map(struct tegra_drm *tegra, struct tegra_bo *bo) tegra_bo_iommu_map() argument
268 tegra_bo_iommu_unmap(struct tegra_drm *tegra, struct tegra_bo *bo) tegra_bo_iommu_unmap() argument
292 struct tegra_bo *bo; tegra_bo_alloc_object() local
321 tegra_bo_free(struct drm_device *drm, struct tegra_bo *bo) tegra_bo_free() argument
333 tegra_bo_get_pages(struct drm_device *drm, struct tegra_bo *bo) tegra_bo_get_pages() argument
363 tegra_bo_alloc(struct drm_device *drm, struct tegra_bo *bo) tegra_bo_alloc() argument
397 struct tegra_bo *bo; tegra_bo_create() local
428 struct tegra_bo *bo; tegra_bo_create_with_handle() local
451 struct tegra_bo *bo; tegra_bo_import() local
498 struct tegra_bo *bo = to_tegra_bo(gem); tegra_bo_free_object() local
529 struct tegra_bo *bo; tegra_bo_dumb_create() local
546 struct tegra_bo *bo = to_tegra_bo(gem); tegra_bo_fault() local
567 struct tegra_bo *bo = to_tegra_bo(gem); __tegra_gem_mmap() local
619 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_map_dma_buf() local
652 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_unmap_dma_buf() local
670 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_begin_cpu_access() local
683 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_end_cpu_access() local
707 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_vmap() local
722 struct tegra_bo *bo = to_tegra_bo(gem); tegra_gem_prime_vunmap() local
756 struct tegra_bo *bo; tegra_gem_prime_import() local
777 struct tegra_bo *bo; tegra_gem_lookup() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_object.c58 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_destroy() local
60 amdgpu_bo_kunmap(bo); in amdgpu_bo_destroy()
62 if (bo->tbo.base.import_attach) in amdgpu_bo_destroy()
63 drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg); in amdgpu_bo_destroy()
64 drm_gem_object_release(&bo->tbo.base); in amdgpu_bo_destroy()
65 amdgpu_bo_unref(&bo->parent); in amdgpu_bo_destroy()
66 kvfree(bo); in amdgpu_bo_destroy()
71 struct amdgpu_bo *bo = ttm_to_amdgpu_bo(tbo); in amdgpu_bo_user_destroy() local
74 ubo = to_amdgpu_bo_user(bo); in amdgpu_bo_user_destroy()
82 struct amdgpu_bo *shadow_bo = ttm_to_amdgpu_bo(tbo), *bo; amdgpu_bo_vm_destroy() local
107 amdgpu_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) amdgpu_bo_is_amdgpu_bo() argument
438 amdgpu_bo_free_kernel(struct amdgpu_bo **bo, u64 *gpu_addr, void **cpu_addr) amdgpu_bo_free_kernel() argument
555 struct amdgpu_bo *bo; amdgpu_bo_create() local
783 amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr) amdgpu_bo_kmap() argument
822 amdgpu_bo_kptr(struct amdgpu_bo *bo) amdgpu_bo_kptr() argument
835 amdgpu_bo_kunmap(struct amdgpu_bo *bo) amdgpu_bo_kunmap() argument
850 amdgpu_bo_ref(struct amdgpu_bo *bo) amdgpu_bo_ref() argument
865 amdgpu_bo_unref(struct amdgpu_bo **bo) amdgpu_bo_unref() argument
899 amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain, u64 min_offset, u64 max_offset) amdgpu_bo_pin_restricted() argument
1006 amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain) amdgpu_bo_pin() argument
1022 amdgpu_bo_unpin(struct amdgpu_bo *bo) amdgpu_bo_unpin() argument
1126 amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags) amdgpu_bo_set_tiling_flags() argument
1149 amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags) amdgpu_bo_get_tiling_flags() argument
1174 amdgpu_bo_set_metadata(struct amdgpu_bo *bo, void *metadata, u32 metadata_size, uint64_t flags) amdgpu_bo_set_metadata() argument
1221 amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer, size_t buffer_size, uint32_t *metadata_size, uint64_t *flags) amdgpu_bo_get_metadata() argument
1258 amdgpu_bo_move_notify(struct ttm_buffer_object *bo, bool evict) amdgpu_bo_move_notify() argument
1280 amdgpu_bo_get_memory(struct amdgpu_bo *bo, struct amdgpu_mem_stats *stats) amdgpu_bo_get_memory() argument
1328 amdgpu_bo_release_notify(struct ttm_buffer_object *bo) amdgpu_bo_release_notify() argument
1377 amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo) amdgpu_bo_fault_reserve_notify() argument
1429 amdgpu_bo_fence(struct amdgpu_bo *bo, struct dma_fence *fence, bool shared) amdgpu_bo_fence() argument
1484 amdgpu_bo_sync_wait(struct amdgpu_bo *bo, void *owner, bool intr) amdgpu_bo_sync_wait() argument
1502 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo) amdgpu_bo_gpu_offset() argument
1521 amdgpu_bo_gpu_offset_no_check(struct amdgpu_bo *bo) amdgpu_bo_gpu_offset_no_check() argument
1572 amdgpu_bo_print_info(int id, struct amdgpu_bo *bo, struct seq_file *m) amdgpu_bo_print_info() argument
[all...]
H A Damdgpu_object.h45 #define to_amdgpu_bo_user(abo) container_of((abo), struct amdgpu_bo_user, bo)
46 #define to_amdgpu_bo_vm(abo) container_of((abo), struct amdgpu_bo_vm, bo)
58 void (*destroy)(struct ttm_buffer_object *bo);
63 /* bo virtual addresses in a vm */
79 /* protected by bo being reserved */
123 struct amdgpu_bo bo; member
132 struct amdgpu_bo bo; member
194 * amdgpu_bo_reserve - reserve bo
195 * @bo: bo structur
202 amdgpu_bo_reserve(struct amdgpu_bo *bo, bool no_intr) amdgpu_bo_reserve() argument
216 amdgpu_bo_unreserve(struct amdgpu_bo *bo) amdgpu_bo_unreserve() argument
221 amdgpu_bo_size(struct amdgpu_bo *bo) amdgpu_bo_size() argument
226 amdgpu_bo_ngpu_pages(struct amdgpu_bo *bo) amdgpu_bo_ngpu_pages() argument
231 amdgpu_bo_gpu_page_alignment(struct amdgpu_bo *bo) amdgpu_bo_gpu_page_alignment() argument
242 amdgpu_bo_mmap_offset(struct amdgpu_bo *bo) amdgpu_bo_mmap_offset() argument
250 amdgpu_bo_in_cpu_visible_vram(struct amdgpu_bo *bo) amdgpu_bo_in_cpu_visible_vram() argument
272 amdgpu_bo_explicit_sync(struct amdgpu_bo *bo) amdgpu_bo_explicit_sync() argument
283 amdgpu_bo_encrypted(struct amdgpu_bo *bo) amdgpu_bo_encrypted() argument
296 amdgpu_bo_shadowed(struct amdgpu_bo *bo) amdgpu_bo_shadowed() argument
[all...]
/kernel/linux/linux-6.6/include/drm/ttm/
H A Dttm_bo.h78 * @type: The bo type.
115 * Members protected by the bo::resv::reserved lock.
132 * and the bo::lock when written to. Can be read with
145 * Object describing a kernel mapping. Since a TTM bo may be located
160 struct ttm_buffer_object *bo; member
191 * @bo: The buffer object.
193 static inline void ttm_bo_get(struct ttm_buffer_object *bo) in ttm_bo_get() argument
195 kref_get(&bo->kref); in ttm_bo_get()
201 * @bo: The buffer object.
206 * Returns: @bo i
209 ttm_bo_get_unless_zero(struct ttm_buffer_object *bo) ttm_bo_get_unless_zero() argument
238 ttm_bo_reserve(struct ttm_buffer_object *bo, bool interruptible, bool no_wait, struct ww_acquire_ctx *ticket) ttm_bo_reserve() argument
273 ttm_bo_reserve_slowpath(struct ttm_buffer_object *bo, bool interruptible, struct ww_acquire_ctx *ticket) ttm_bo_reserve_slowpath() argument
291 ttm_bo_move_to_lru_tail_unlocked(struct ttm_buffer_object *bo) ttm_bo_move_to_lru_tail_unlocked() argument
298 ttm_bo_assign_mem(struct ttm_buffer_object *bo, struct ttm_resource *new_mem) ttm_bo_assign_mem() argument
312 ttm_bo_move_null(struct ttm_buffer_object *bo, struct ttm_resource *new_mem) ttm_bo_move_null() argument
326 ttm_bo_unreserve(struct ttm_buffer_object *bo) ttm_bo_unreserve() argument
[all...]

Completed in 18 milliseconds

12345678910>>...23