/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ttm.c | 78 return ttm_range_man_init(&adev->mman.bdev, type, in amdgpu_ttm_init_on_chip() 135 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags() 198 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_ttm_map_buffer() 228 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_map_buffer() 231 r = amdgpu_job_alloc_with_ib(adev, &adev->mman.high_pr, in amdgpu_ttm_map_buffer() 299 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem() 304 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem() 312 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem() 343 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem() 518 if (adev->mman in amdgpu_bo_move() [all...] |
H A D | amdgpu_preempt_mgr.c | 42 struct ttm_resource_manager *man = &adev->mman.preempt_mgr; in mem_info_preempt_used_show() 102 struct ttm_resource_manager *man = &adev->mman.preempt_mgr; in amdgpu_preempt_mgr_init() 108 ttm_resource_manager_init(man, &adev->mman.bdev, (1 << 30)); in amdgpu_preempt_mgr_init() 116 ttm_set_driver_manager(&adev->mman.bdev, AMDGPU_PL_PREEMPT, man); in amdgpu_preempt_mgr_init() 131 struct ttm_resource_manager *man = &adev->mman.preempt_mgr; in amdgpu_preempt_mgr_fini() 136 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_preempt_mgr_fini() 143 ttm_set_driver_manager(&adev->mman.bdev, AMDGPU_PL_PREEMPT, NULL); in amdgpu_preempt_mgr_fini()
|
H A D | amdgpu_gtt_mgr.c | 51 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_total_show() 69 struct ttm_resource_manager *man = &adev->mman.gtt_mgr.manager; in amdgpu_mem_info_gtt_used_show() 196 adev = container_of(mgr, typeof(*adev), mman.gtt_mgr); in amdgpu_gtt_mgr_recover() 279 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_init() 286 ttm_resource_manager_init(man, &adev->mman.bdev, gtt_size); in amdgpu_gtt_mgr_init() 293 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, &mgr->manager); in amdgpu_gtt_mgr_init() 308 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_fini() 314 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_gtt_mgr_fini() 323 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, NULL); in amdgpu_gtt_mgr_fini()
|
H A D | amdgpu_gmc.c | 696 adev->mman.stolen_reserved_offset = 0; in amdgpu_gmc_get_vbios_allocations() 697 adev->mman.stolen_reserved_size = 0; in amdgpu_gmc_get_vbios_allocations() 709 adev->mman.keep_stolen_vga_memory = true; in amdgpu_gmc_get_vbios_allocations() 715 adev->mman.stolen_reserved_offset = 0x500000; in amdgpu_gmc_get_vbios_allocations() 716 adev->mman.stolen_reserved_size = 0x200000; in amdgpu_gmc_get_vbios_allocations() 722 adev->mman.keep_stolen_vga_memory = true; in amdgpu_gmc_get_vbios_allocations() 726 adev->mman.stolen_reserved_offset = 0x1ffb0000; in amdgpu_gmc_get_vbios_allocations() 727 adev->mman.stolen_reserved_size = 64 * PAGE_SIZE; in amdgpu_gmc_get_vbios_allocations() 731 adev->mman.keep_stolen_vga_memory = false; in amdgpu_gmc_get_vbios_allocations() 741 if (adev->mman in amdgpu_gmc_get_vbios_allocations() [all...] |
H A D | amdgpu_discovery.c | 222 discv_regn = memremap(pos, adev->mman.discovery_tmr_size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem() 224 memcpy(binary, discv_regn, adev->mman.discovery_tmr_size); in amdgpu_discovery_read_binary_from_sysmem() 259 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary_from_mem() 354 adev->mman.discovery_tmr_size = DISCOVERY_TMR_SIZE; in amdgpu_discovery_init() 355 adev->mman.discovery_bin = kzalloc(adev->mman.discovery_tmr_size, GFP_KERNEL); in amdgpu_discovery_init() 356 if (!adev->mman.discovery_bin) in amdgpu_discovery_init() 362 r = amdgpu_discovery_read_binary_from_file(adev, adev->mman.discovery_bin); in amdgpu_discovery_init() 372 adev, adev->mman.discovery_bin); in amdgpu_discovery_init() 378 if (!amdgpu_discovery_verify_binary_signature(adev->mman in amdgpu_discovery_init() [all...] |
H A D | amdgpu_virt.c | 434 if (adev->mman.fw_vram_usage_va) in amdgpu_virt_add_bad_page() 435 vram_usage_va = adev->mman.fw_vram_usage_va; in amdgpu_virt_add_bad_page() 437 vram_usage_va = adev->mman.drv_vram_usage_va; in amdgpu_virt_add_bad_page() 601 ttm_resource_manager_usage(&adev->mman.vram_mgr.manager) >> 20; in amdgpu_virt_write_vf2pf_data() 603 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr) >> 20; in amdgpu_virt_write_vf2pf_data() 652 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 654 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 677 if (adev->mman in amdgpu_virt_exchange_data() [all...] |
H A D | amdgpu_vram_mgr.c | 50 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device() 130 struct ttm_resource_manager *man = &adev->mman.vram_mgr.manager; in amdgpu_mem_info_vram_used_show() 151 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr)); in amdgpu_mem_info_vis_vram_used_show() 885 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init() 889 ttm_resource_manager_init(man, &adev->mman.bdev, in amdgpu_vram_mgr_init() 908 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init() 923 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini() 930 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_vram_mgr_fini() 947 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, NULL); in amdgpu_vram_mgr_fini()
|
H A D | gmc_v10_0.c | 327 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in gmc_v10_0_flush_gpu_tlb() 355 mutex_lock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 359 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 365 if (!adev->mman.buffer_funcs_enabled || in gmc_v10_0_flush_gpu_tlb() 370 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 379 r = amdgpu_job_alloc_with_ib(ring->adev, &adev->mman.high_pr, in gmc_v10_0_flush_gpu_tlb() 392 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 400 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 1051 amdgpu_gtt_mgr_recover(&adev->mman.gtt_mgr); in gmc_v10_0_gart_enable()
|
H A D | amdgpu_sdma.h | 153 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b, t) (adev)->mman.buffer_funcs->emit_copy_buffer((ib), (s), (d), (b), (t)) 154 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib), (s), (d), (b))
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_discovery.c | 139 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary() 171 adev->mman.discovery_tmr_size = DISCOVERY_TMR_SIZE; in amdgpu_discovery_init() 172 adev->mman.discovery_bin = kzalloc(adev->mman.discovery_tmr_size, GFP_KERNEL); in amdgpu_discovery_init() 173 if (!adev->mman.discovery_bin) in amdgpu_discovery_init() 176 r = amdgpu_discovery_read_binary(adev, adev->mman.discovery_bin); in amdgpu_discovery_init() 182 bhdr = (struct binary_header *)adev->mman.discovery_bin; in amdgpu_discovery_init() 195 if (!amdgpu_discovery_verify_checksum(adev->mman.discovery_bin + offset, in amdgpu_discovery_init() 205 ihdr = (struct ip_discovery_header *)(adev->mman.discovery_bin + offset); in amdgpu_discovery_init() 213 if (!amdgpu_discovery_verify_checksum(adev->mman in amdgpu_discovery_init() [all...] |
H A D | amdgpu_ttm.c | 74 return ttm_range_man_init(&adev->mman.bdev, type, in amdgpu_ttm_init_on_chip() 124 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags() 255 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_ttm_map_buffer() 269 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_map_buffer() 320 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_ttm_map_buffer() 360 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem() 365 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem() 388 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem() 448 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem() 693 if (!adev->mman in amdgpu_bo_move() [all...] |
H A D | amdgpu_gtt_mgr.c | 50 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_total_show() 69 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_used_show() 91 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_init() 118 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, &mgr->manager); in amdgpu_gtt_mgr_init() 133 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_fini() 139 ret = ttm_resource_manager_force_list_clean(&adev->mman.bdev, man); in amdgpu_gtt_mgr_fini() 151 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, NULL); in amdgpu_gtt_mgr_fini()
|
H A D | amdgpu_vram_mgr.c | 38 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device() 88 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_mem_info_vram_used_show() 107 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_mem_info_vis_vram_used_show() 177 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init() 193 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init() 208 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini() 214 ret = ttm_resource_manager_force_list_clean(&adev->mman.bdev, man); in amdgpu_vram_mgr_fini() 225 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, NULL); in amdgpu_vram_mgr_fini()
|
H A D | amdgpu_gmc.c | 490 adev->mman.keep_stolen_vga_memory = true; in amdgpu_gmc_get_vbios_allocations() 493 adev->mman.keep_stolen_vga_memory = false; in amdgpu_gmc_get_vbios_allocations() 502 if (adev->mman.keep_stolen_vga_memory) in amdgpu_gmc_get_vbios_allocations() 511 adev->mman.stolen_vga_size = AMDGPU_VBIOS_VGA_ALLOCATION; in amdgpu_gmc_get_vbios_allocations() 512 adev->mman.stolen_extended_size = size - adev->mman.stolen_vga_size; in amdgpu_gmc_get_vbios_allocations() 514 adev->mman.stolen_vga_size = size; in amdgpu_gmc_get_vbios_allocations() 515 adev->mman.stolen_extended_size = 0; in amdgpu_gmc_get_vbios_allocations()
|
H A D | amdgpu_sdma.h | 111 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b, t) (adev)->mman.buffer_funcs->emit_copy_buffer((ib), (s), (d), (b), (t)) 112 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib), (s), (d), (b))
|
H A D | gmc_v10_0.c | 276 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in gmc_v10_0_flush_gpu_tlb() 304 mutex_lock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 308 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 314 if (!adev->mman.buffer_funcs_enabled || in gmc_v10_0_flush_gpu_tlb() 319 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 337 r = amdgpu_job_submit(job, &adev->mman.entity, in gmc_v10_0_flush_gpu_tlb() 342 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 353 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb()
|
H A D | amdgpu_virt.c | 410 retired_page = *(uint64_t *)(adev->mman.fw_vram_usage_va + in amdgpu_virt_add_bad_page() 515 struct ttm_resource_manager *vram_man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_virt_write_vf2pf_data() 587 if (adev->mman.fw_vram_usage_va != NULL) { in amdgpu_virt_init_data_exchange() 610 if (adev->mman.fw_vram_usage_va != NULL) { in amdgpu_virt_exchange_data() 614 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data() 617 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_exchange_data()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/qxl/ |
H A D | qxl_ttm.c | 42 struct qxl_mman *mman; in qxl_get_qdev() local 45 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev() 46 qdev = container_of(mman, struct qxl_device, mman); in qxl_get_qdev() 189 return ttm_range_man_init(&qdev->mman.bdev, type, false, size); in qxl_ttm_init_mem_type() 198 r = ttm_device_init(&qdev->mman.bdev, &qxl_bo_driver, NULL, in qxl_ttm_init() 230 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_fini() 231 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_fini() 232 ttm_device_fini(&qdev->mman.bdev); in qxl_ttm_fini() 239 ttm_resource_manager_create_debugfs(ttm_manager_type(&qdev->mman in qxl_ttm_debugfs_init() [all...] |
H A D | qxl_object.c | 144 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create() 405 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_surf_evict() 406 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_surf_evict() 413 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_vram_evict() 414 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_vram_evict()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/qxl/ |
H A D | qxl_ttm.c | 43 struct qxl_mman *mman; in qxl_get_qdev() local 46 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev() 47 qdev = container_of(mman, struct qxl_device, mman); in qxl_get_qdev() 206 return ttm_range_man_init(&qdev->mman.bdev, type, false, size); in qxl_ttm_init_mem_type() 215 r = ttm_bo_device_init(&qdev->mman.bdev, in qxl_ttm_init() 248 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_fini() 249 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_fini() 250 ttm_bo_device_release(&qdev->mman.bdev); in qxl_ttm_fini() 284 qxl_mem_types_list[i].data = ttm_manager_type(&qdev->mman in qxl_ttm_debugfs_init() [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/ |
H A D | radeon_ttm.c | 61 struct radeon_mman *mman; in radeon_get_rdev() local 64 mman = container_of(bdev, struct radeon_mman, bdev); in radeon_get_rdev() 65 rdev = container_of(mman, struct radeon_device, mman); in radeon_get_rdev() 71 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_VRAM, in radeon_ttm_init_vram() 77 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_TT, in radeon_ttm_init_gtt() 558 return ttm_pool_alloc(&rdev->mman.bdev.pool, ttm, ctx); in radeon_ttm_tt_populate() 578 return ttm_pool_free(&rdev->mman.bdev.pool, ttm); in radeon_ttm_tt_unpopulate() 691 r = ttm_device_init(&rdev->mman.bdev, &radeon_bo_driver, rdev->dev, in radeon_ttm_init() 700 rdev->mman in radeon_ttm_init() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | radeon_ttm.c | 65 struct radeon_mman *mman; in radeon_get_rdev() local 68 mman = container_of(bdev, struct radeon_mman, bdev); in radeon_get_rdev() 69 rdev = container_of(mman, struct radeon_device, mman); in radeon_get_rdev() 75 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_VRAM, in radeon_ttm_init_vram() 81 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_TT, in radeon_ttm_init_gtt() 818 r = ttm_bo_device_init(&rdev->mman.bdev, in radeon_ttm_init() 827 rdev->mman.initialized = true; in radeon_ttm_init() 875 if (!rdev->mman.initialized) in radeon_ttm_fini() 886 ttm_range_man_fini(&rdev->mman in radeon_ttm_fini() [all...] |
/kernel/linux/linux-5.10/tools/perf/ |
H A D | check-headers.sh | 75 include/uapi/asm-generic/mman-common.h 147 check include/uapi/asm-generic/mman.h '-I "^#include <\(uapi/\)*asm-generic/mman-common\(-tools\)*.h>"' 148 check include/uapi/linux/mman.h '-I "^#include <\(uapi/\)*asm/mman.h>"'
|
/kernel/linux/linux-6.6/tools/perf/ |
H A D | check-headers.sh | 81 "include/uapi/asm-generic/mman-common.h" 165 check include/uapi/asm-generic/mman.h '-I "^#include <\(uapi/\)*asm-generic/mman-common\(-tools\)*.h>"' 166 check include/uapi/linux/mman.h '-I "^#include <\(uapi/\)*asm/mman.h>"'
|
/kernel/linux/linux-6.6/tools/testing/selftests/mm/ |
H A D | uffd-common.h | 22 #include <linux/mman.h> 23 #include <sys/mman.h>
|