Home
last modified time | relevance | path

Searched refs:xcp_id (Results 1 - 18 of 18) sorted by relevance

/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Daqua_vanjaram.c32 #define XCP_INST_MASK(num_inst, xcp_id) \
33 (num_inst ? GENMASK(num_inst - 1, 0) << (xcp_id * num_inst) : 0)
67 int xcp_id; in aqua_vanjaram_set_xcp_id() local
71 ring->xcp_id = AMDGPU_XCP_NO_PARTITION; in aqua_vanjaram_set_xcp_id()
97 for (xcp_id = 0; xcp_id < adev->xcp_mgr->num_xcps; xcp_id++) { in aqua_vanjaram_set_xcp_id()
98 if (adev->xcp_mgr->xcp[xcp_id].ip[ip_blk].inst_mask & inst_mask) { in aqua_vanjaram_set_xcp_id()
99 ring->xcp_id = xcp_id; in aqua_vanjaram_set_xcp_id()
337 __aqua_vanjaram_get_xcp_ip_info(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id, enum AMDGPU_XCP_IP_BLOCK ip_id, struct amdgpu_xcp_ip *ip) __aqua_vanjaram_get_xcp_ip_info() argument
590 aqua_vanjaram_get_xcp_ip_details(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id, enum AMDGPU_XCP_IP_BLOCK ip_id, struct amdgpu_xcp_ip *ip) aqua_vanjaram_get_xcp_ip_details() argument
[all...]
H A Damdgpu_xcp.h106 int (*get_ip_details)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id,
112 int (*prepare_suspend)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
113 int (*suspend)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
114 int (*prepare_resume)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
115 int (*resume)(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
122 int amdgpu_xcp_prepare_suspend(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
123 int amdgpu_xcp_suspend(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
124 int amdgpu_xcp_prepare_resume(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
125 int amdgpu_xcp_resume(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id);
H A Damdgpu_xcp.c62 static int amdgpu_xcp_run_transition(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id, in amdgpu_xcp_run_transition() argument
69 if (xcp_id >= MAX_XCP || !xcp_mgr->xcp[xcp_id].valid) in amdgpu_xcp_run_transition()
72 xcp = &xcp_mgr->xcp[xcp_id]; in amdgpu_xcp_run_transition()
83 int amdgpu_xcp_prepare_suspend(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id) in amdgpu_xcp_prepare_suspend() argument
85 return amdgpu_xcp_run_transition(xcp_mgr, xcp_id, in amdgpu_xcp_prepare_suspend()
89 int amdgpu_xcp_suspend(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id) in amdgpu_xcp_suspend() argument
91 return amdgpu_xcp_run_transition(xcp_mgr, xcp_id, AMDGPU_XCP_SUSPEND); in amdgpu_xcp_suspend()
94 int amdgpu_xcp_prepare_resume(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id) in amdgpu_xcp_prepare_resume() argument
96 return amdgpu_xcp_run_transition(xcp_mgr, xcp_id, in amdgpu_xcp_prepare_resume()
100 amdgpu_xcp_resume(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id) amdgpu_xcp_resume() argument
105 __amdgpu_xcp_add_block(struct amdgpu_xcp_mgr *xcp_mgr, int xcp_id, struct amdgpu_xcp_ip *ip) __amdgpu_xcp_add_block() argument
[all...]
H A Damdgpu_amdkfd.h244 uint32_t *flags, int8_t *xcp_id);
294 uint8_t xcp_id);
335 uint64_t size, u32 alloc_flag, int8_t xcp_id);
337 uint64_t size, u32 alloc_flag, int8_t xcp_id);
339 u64 amdgpu_amdkfd_xcp_memory_size(struct amdgpu_device *adev, int xcp_id);
341 #define KFD_XCP_MEM_ID(adev, xcp_id) \
342 ((adev)->xcp_mgr && (xcp_id) >= 0 ?\
343 (adev)->xcp_mgr->xcp[(xcp_id)].mem_id : -1)
345 #define KFD_XCP_MEMORY_SIZE(adev, xcp_id) amdgpu_amdkfd_xcp_memory_size((adev), (xcp_id))
[all...]
H A Damdgpu_amdkfd_gpuvm.c157 * @xcp_id: xcp_id is used to get xcp from xcp manager, one xcp is
164 uint64_t size, u32 alloc_flag, int8_t xcp_id) in amdgpu_amdkfd_reserve_mem_limit()
188 if (WARN_ONCE(xcp_id < 0, "invalid XCP ID %d", xcp_id)) in amdgpu_amdkfd_reserve_mem_limit()
191 vram_size = KFD_XCP_MEMORY_SIZE(adev, xcp_id); in amdgpu_amdkfd_reserve_mem_limit()
215 (adev && xcp_id >= 0 && adev->kfd.vram_used[xcp_id] + vram_needed > in amdgpu_amdkfd_reserve_mem_limit()
226 if (adev && xcp_id >= 0) { in amdgpu_amdkfd_reserve_mem_limit()
227 adev->kfd.vram_used[xcp_id] in amdgpu_amdkfd_reserve_mem_limit()
163 amdgpu_amdkfd_reserve_mem_limit(struct amdgpu_device *adev, uint64_t size, u32 alloc_flag, int8_t xcp_id) amdgpu_amdkfd_reserve_mem_limit() argument
240 amdgpu_amdkfd_unreserve_mem_limit(struct amdgpu_device *adev, uint64_t size, u32 alloc_flag, int8_t xcp_id) amdgpu_amdkfd_unreserve_mem_limit() argument
1593 amdgpu_amdkfd_get_available_memory(struct amdgpu_device *adev, uint8_t xcp_id) amdgpu_amdkfd_get_available_memory() argument
1645 int8_t xcp_id = -1; amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() local
[all...]
H A Damdgpu_amdkfd.c496 uint32_t *flags, int8_t *xcp_id) in amdgpu_amdkfd_get_dmabuf_info()
540 if (xcp_id) in amdgpu_amdkfd_get_dmabuf_info()
541 *xcp_id = bo->xcp_id; in amdgpu_amdkfd_get_dmabuf_info()
806 u64 amdgpu_amdkfd_xcp_memory_size(struct amdgpu_device *adev, int xcp_id) in amdgpu_amdkfd_xcp_memory_size() argument
809 s8 mem_id = KFD_XCP_MEM_ID(adev, xcp_id); in amdgpu_amdkfd_xcp_memory_size()
811 if (adev->gmc.num_mem_partitions && xcp_id >= 0 && mem_id >= 0) { in amdgpu_amdkfd_xcp_memory_size()
492 amdgpu_amdkfd_get_dmabuf_info(struct amdgpu_device *adev, int dma_buf_fd, struct amdgpu_device **dmabuf_adev, uint64_t *bo_size, void *metadata_buffer, size_t buffer_size, uint32_t *metadata_size, uint32_t *flags, int8_t *xcp_id) amdgpu_amdkfd_get_dmabuf_info() argument
H A Damdgpu_vm_pt.c501 * @xcp_id: GPU partition id
505 int32_t xcp_id) in amdgpu_vm_pt_create()
539 bp.xcp_id_plus1 = xcp_id + 1; in amdgpu_vm_pt_create()
565 bp.xcp_id_plus1 = xcp_id + 1; in amdgpu_vm_pt_create()
611 vm->root.bo->xcp_id); in amdgpu_vm_pt_alloc()
503 amdgpu_vm_pt_create(struct amdgpu_device *adev, struct amdgpu_vm *vm, int level, bool immediate, struct amdgpu_bo_vm **vmbo, int32_t xcp_id) amdgpu_vm_pt_create() argument
H A Damdgpu_vm.h403 int amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, int32_t xcp_id);
486 int32_t xcp_id);
H A Damdgpu_gem.c341 flags, ttm_bo_type_device, resv, &gobj, fpriv->xcp_id + 1); in amdgpu_gem_create_ioctl()
411 0, ttm_bo_type_device, NULL, &gobj, fpriv->xcp_id + 1); in amdgpu_gem_userptr_ioctl()
936 ttm_bo_type_device, NULL, &gobj, fpriv->xcp_id + 1); in amdgpu_mode_dumb_create()
H A Damdgpu_object.h119 int8_t xcp_id; member
H A Damdgpu_object.c135 int8_t mem_id = KFD_XCP_MEM_ID(adev, abo->xcp_id); in amdgpu_bo_placement_from_domain()
597 /* For GPUs with spatial partitioning, bo->xcp_id=-1 means any partition */ in amdgpu_bo_create()
598 bo->xcp_id = bp->xcp_id_plus1 - 1; in amdgpu_bo_create()
601 bo->xcp_id = 0; in amdgpu_bo_create()
H A Damdgpu_ring.h259 u32 xcp_id; member
H A Damdgpu_ttm.c1080 if (adev->gmc.mem_partitions && abo->xcp_id >= 0) in amdgpu_ttm_tt_create()
1081 gtt->pool_id = KFD_XCP_MEM_ID(adev, abo->xcp_id); in amdgpu_ttm_tt_create()
1083 gtt->pool_id = abo->xcp_id; in amdgpu_ttm_tt_create()
H A Damdgpu_vm.c2122 * @xcp_id: GPU partition selection id
2130 int32_t xcp_id) in amdgpu_vm_init()
2181 false, &root, xcp_id); in amdgpu_vm_init()
2129 amdgpu_vm_init(struct amdgpu_device *adev, struct amdgpu_vm *vm, int32_t xcp_id) amdgpu_vm_init() argument
H A Damdgpu.h473 uint32_t xcp_id; member
H A Dgmc_v9_0.c1250 KFD_XCP_MEM_ID(adev, bo->xcp_id) == vm->mem_id); in gmc_v9_0_get_coherence_flags()
H A Damdgpu_kms.c1251 r = amdgpu_vm_init(adev, &fpriv->vm, fpriv->xcp_id); in amdgpu_driver_open_kms()
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_chardev.c1513 int8_t xcp_id; in kfd_ioctl_get_dmabuf_info() local
1534 &args->metadata_size, &flags, &xcp_id); in kfd_ioctl_get_dmabuf_info()
1538 if (xcp_id >= 0) in kfd_ioctl_get_dmabuf_info()
1539 args->gpu_id = dmabuf_adev->kfd.dev->nodes[xcp_id]->id; in kfd_ioctl_get_dmabuf_info()

Completed in 28 milliseconds