/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_uvd.c | 42 #include "uvd/uvd_4_2_d.h" 155 if (adev->uvd.address_64_bit) in amdgpu_uvd_create_msg_bo_helper() 192 INIT_DELAYED_WORK(&adev->uvd.idle_work, amdgpu_uvd_idle_work_handler); in amdgpu_uvd_sw_init() 263 r = amdgpu_ucode_request(adev, &adev->uvd.fw, fw_name); in amdgpu_uvd_sw_init() 267 amdgpu_ucode_release(&adev->uvd.fw); in amdgpu_uvd_sw_init() 272 adev->uvd.max_handles = AMDGPU_DEFAULT_UVD_HANDLES; in amdgpu_uvd_sw_init() 274 hdr = (const struct common_firmware_header *)adev->uvd.fw->data; in amdgpu_uvd_sw_init() 293 adev->uvd.max_handles = AMDGPU_MAX_UVD_HANDLES; in amdgpu_uvd_sw_init() 295 adev->uvd.fw_version = ((version_major << 24) | (version_minor << 16) | in amdgpu_uvd_sw_init() 300 (adev->uvd in amdgpu_uvd_sw_init() [all...] |
H A D | uvd_v7_0.c | 34 #include "uvd/uvd_7_0_offset.h" 35 #include "uvd/uvd_7_0_sh_mask.h" 42 #include "ivsrcid/uvd/irqsrcs_uvd_7_0.h" 89 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr() 123 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_wptr() 161 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_set_wptr() 342 struct amdgpu_bo *bo = ring->adev->uvd.ib_bo; in uvd_v7_0_enc_ring_test_ib() 372 adev->uvd.num_uvd_inst = UVD7_MAX_HW_INSTANCES_VEGA20; in uvd_v7_0_early_init() 373 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in uvd_v7_0_early_init() 376 adev->uvd in uvd_v7_0_early_init() [all...] |
H A D | uvd_v6_0.c | 30 #include "uvd/uvd_6_0_d.h" 31 #include "uvd/uvd_6_0_sh_mask.h" 67 (!adev->uvd.fw_version || adev->uvd.fw_version >= FW_1_130_16)); in uvd_v6_0_enc_support() 95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr() 125 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_wptr() 156 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_set_wptr() 335 struct amdgpu_bo *bo = ring->adev->uvd.ib_bo; in uvd_v6_0_enc_ring_test_ib() 360 adev->uvd.num_uvd_inst = 1; in uvd_v6_0_early_init() 369 adev->uvd in uvd_v6_0_early_init() [all...] |
H A D | uvd_v3_1.c | 31 #include "uvd/uvd_3_1_d.h" 32 #include "uvd/uvd_3_1_sh_mask.h" 203 adev->uvd.inst->ring.funcs = &uvd_v3_1_ring_funcs; in uvd_v3_1_set_ring_funcs() 246 addr = (adev->uvd.inst->gpu_addr + AMDGPU_UVD_FIRMWARE_OFFSET) >> 3; in uvd_v3_1_mc_resume() 258 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles)) >> 3; in uvd_v3_1_mc_resume() 263 addr = (adev->uvd.inst->gpu_addr >> 28) & 0xF; in uvd_v3_1_mc_resume() 267 addr = (adev->uvd.inst->gpu_addr >> 32) & 0xFF; in uvd_v3_1_mc_resume() 285 uint32_t keysel = adev->uvd.keyselect; in uvd_v3_1_fw_validate() 322 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v3_1_start() 330 /* set uvd bus in uvd_v3_1_start() [all...] |
H A D | uvd_v4_2.c | 31 #include "uvd/uvd_4_2_d.h" 32 #include "uvd/uvd_4_2_sh_mask.h" 96 adev->uvd.num_uvd_inst = 1; in uvd_v4_2_early_init() 111 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 124, &adev->uvd.inst->irq); in uvd_v4_2_sw_init() 119 ring = &adev->uvd.inst->ring; in uvd_v4_2_sw_init() 120 sprintf(ring->name, "uvd"); in uvd_v4_2_sw_init() 121 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0, in uvd_v4_2_sw_init() 159 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v4_2_hw_init() 215 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v4_2_hw_fini() 239 cancel_delayed_work_sync(&adev->uvd in uvd_v4_2_suspend() [all...] |
H A D | uvd_v5_0.c | 31 #include "uvd/uvd_5_0_d.h" 32 #include "uvd/uvd_5_0_sh_mask.h" 94 adev->uvd.num_uvd_inst = 1; in uvd_v5_0_early_init() 109 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_UVD_SYSTEM_MESSAGE, &adev->uvd.inst->irq); in uvd_v5_0_sw_init() 117 ring = &adev->uvd.inst->ring; in uvd_v5_0_sw_init() 118 sprintf(ring->name, "uvd"); in uvd_v5_0_sw_init() 119 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0, in uvd_v5_0_sw_init() 155 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v5_0_hw_init() 213 cancel_delayed_work_sync(&adev->uvd.idle_work); in uvd_v5_0_hw_fini() 237 cancel_delayed_work_sync(&adev->uvd in uvd_v5_0_suspend() [all...] |
H A D | amdgpu_uvd.h | 37 (AMDGPU_GPU_PAGE_ALIGN(le32_to_cpu(((const struct common_firmware_header *)(adev)->uvd.fw->data)->ucode_size_bytes) + \
|
H A D | amdgpu_kms.c | 213 fw_info->ver = adev->uvd.fw_version; in amdgpu_firmware_info() 401 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in amdgpu_hw_ip_info() 402 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info() 405 if (adev->uvd.inst[i].ring.sched.ready) in amdgpu_hw_ip_info() 421 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in amdgpu_hw_ip_info() 422 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info() 425 for (j = 0; j < adev->uvd.num_enc_rings; j++) in amdgpu_hw_ip_info() 426 if (adev->uvd.inst[i].ring_enc[j].sched.ready) in amdgpu_hw_ip_info() 971 handle.uvd_max_handles = adev->uvd.max_handles; in amdgpu_info_ioctl()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_uvd.c | 40 #include "uvd/uvd_4_2_d.h" 145 INIT_DELAYED_WORK(&adev->uvd.idle_work, amdgpu_uvd_idle_work_handler); in amdgpu_uvd_sw_init() 216 r = request_firmware(&adev->uvd.fw, fw_name, adev->dev); in amdgpu_uvd_sw_init() 223 r = amdgpu_ucode_validate(adev->uvd.fw); in amdgpu_uvd_sw_init() 227 release_firmware(adev->uvd.fw); in amdgpu_uvd_sw_init() 228 adev->uvd.fw = NULL; in amdgpu_uvd_sw_init() 233 adev->uvd.max_handles = AMDGPU_DEFAULT_UVD_HANDLES; in amdgpu_uvd_sw_init() 235 hdr = (const struct common_firmware_header *)adev->uvd.fw->data; in amdgpu_uvd_sw_init() 254 adev->uvd.max_handles = AMDGPU_MAX_UVD_HANDLES; in amdgpu_uvd_sw_init() 256 adev->uvd in amdgpu_uvd_sw_init() [all...] |
H A D | uvd_v7_0.c | 33 #include "uvd/uvd_7_0_offset.h" 34 #include "uvd/uvd_7_0_sh_mask.h" 42 #include "ivsrcid/uvd/irqsrcs_uvd_7_0.h" 89 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_rptr() 123 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_get_wptr() 161 if (ring == &adev->uvd.inst[ring->me].ring_enc[0]) in uvd_v7_0_enc_ring_set_wptr() 379 adev->uvd.num_uvd_inst = UVD7_MAX_HW_INSTANCES_VEGA20; in uvd_v7_0_early_init() 380 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in uvd_v7_0_early_init() 383 adev->uvd.harvest_config |= 1 << i; in uvd_v7_0_early_init() 386 if (adev->uvd in uvd_v7_0_early_init() [all...] |
H A D | uvd_v6_0.c | 30 #include "uvd/uvd_6_0_d.h" 31 #include "uvd/uvd_6_0_sh_mask.h" 67 (!adev->uvd.fw_version || adev->uvd.fw_version >= FW_1_130_16)); in uvd_v6_0_enc_support() 95 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_rptr() 125 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_get_wptr() 156 if (ring == &adev->uvd.inst->ring_enc[0]) in uvd_v6_0_enc_ring_set_wptr() 368 adev->uvd.num_uvd_inst = 1; in uvd_v6_0_early_init() 377 adev->uvd.num_enc_rings = 2; in uvd_v6_0_early_init() 393 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_UVD_SYSTEM_MESSAGE, &adev->uvd in uvd_v6_0_sw_init() [all...] |
H A D | uvd_v3_1.c | 31 #include "uvd/uvd_3_1_d.h" 32 #include "uvd/uvd_3_1_sh_mask.h" 199 adev->uvd.inst->ring.funcs = &uvd_v3_1_ring_funcs; in uvd_v3_1_set_ring_funcs() 242 addr = (adev->uvd.inst->gpu_addr + AMDGPU_UVD_FIRMWARE_OFFSET) >> 3; in uvd_v3_1_mc_resume() 254 (AMDGPU_UVD_SESSION_SIZE * adev->uvd.max_handles)) >> 3; in uvd_v3_1_mc_resume() 259 addr = (adev->uvd.inst->gpu_addr >> 28) & 0xF; in uvd_v3_1_mc_resume() 263 addr = (adev->uvd.inst->gpu_addr >> 32) & 0xFF; in uvd_v3_1_mc_resume() 281 uint32_t keysel = adev->uvd.keyselect; in uvd_v3_1_fw_validate() 318 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v3_1_start() 326 /* set uvd bus in uvd_v3_1_start() [all...] |
H A D | uvd_v4_2.c | 31 #include "uvd/uvd_4_2_d.h" 32 #include "uvd/uvd_4_2_sh_mask.h" 96 adev->uvd.num_uvd_inst = 1; in uvd_v4_2_early_init() 111 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, 124, &adev->uvd.inst->irq); in uvd_v4_2_sw_init() 119 ring = &adev->uvd.inst->ring; in uvd_v4_2_sw_init() 120 sprintf(ring->name, "uvd"); in uvd_v4_2_sw_init() 121 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0, in uvd_v4_2_sw_init() 159 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v4_2_hw_init() 254 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v4_2_start() 262 /* set uvd bus in uvd_v4_2_start() [all...] |
H A D | uvd_v5_0.c | 31 #include "uvd/uvd_5_0_d.h" 32 #include "uvd/uvd_5_0_sh_mask.h" 94 adev->uvd.num_uvd_inst = 1; in uvd_v5_0_early_init() 109 r = amdgpu_irq_add_id(adev, AMDGPU_IRQ_CLIENTID_LEGACY, VISLANDS30_IV_SRCID_UVD_SYSTEM_MESSAGE, &adev->uvd.inst->irq); in uvd_v5_0_sw_init() 117 ring = &adev->uvd.inst->ring; in uvd_v5_0_sw_init() 118 sprintf(ring->name, "uvd"); in uvd_v5_0_sw_init() 119 r = amdgpu_ring_init(adev, ring, 512, &adev->uvd.inst->irq, 0, in uvd_v5_0_sw_init() 155 struct amdgpu_ring *ring = &adev->uvd.inst->ring; in uvd_v5_0_hw_init() 258 lower_32_bits(adev->uvd.inst->gpu_addr)); in uvd_v5_0_mc_resume() 260 upper_32_bits(adev->uvd in uvd_v5_0_mc_resume() [all...] |
H A D | amdgpu_kms.c | 231 fw_info->ver = adev->uvd.fw_version; in amdgpu_firmware_info() 374 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in amdgpu_hw_ip_info() 375 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info() 378 if (adev->uvd.inst[i].ring.sched.ready) in amdgpu_hw_ip_info() 394 for (i = 0; i < adev->uvd.num_uvd_inst; i++) { in amdgpu_hw_ip_info() 395 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info() 398 for (j = 0; j < adev->uvd.num_enc_rings; j++) in amdgpu_hw_ip_info() 399 if (adev->uvd.inst[i].ring_enc[j].sched.ready) in amdgpu_hw_ip_info() 408 if (adev->uvd.harvest_config & (1 << i)) in amdgpu_hw_ip_info() 420 if (adev->uvd in amdgpu_hw_ip_info() [all...] |
H A D | amdgpu_uvd.h | 37 (AMDGPU_GPU_PAGE_ALIGN(le32_to_cpu(((const struct common_firmware_header *)(adev)->uvd.fw->data)->ucode_size_bytes) + \
|
H A D | amdgpu_fence.c | 414 index = ALIGN(adev->uvd.fw->size, 8); in amdgpu_fence_driver_start_ring() 415 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; in amdgpu_fence_driver_start_ring() 416 ring->fence_drv.gpu_addr = adev->uvd.inst[ring->me].gpu_addr + index; in amdgpu_fence_driver_start_ring()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/ |
H A D | radeon_uvd.c | 72 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler); in radeon_uvd_init() 137 rdev->uvd.fw_header_present = false; in radeon_uvd_init() 138 rdev->uvd.max_handles = RADEON_DEFAULT_UVD_HANDLES; in radeon_uvd_init() 153 rdev->uvd.fw_header_present = true; in radeon_uvd_init() 166 rdev->uvd.max_handles = RADEON_MAX_UVD_HANDLES; in radeon_uvd_init() 186 RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles; in radeon_uvd_init() 189 NULL, &rdev->uvd.vcpu_bo); in radeon_uvd_init() 195 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); in radeon_uvd_init() 197 radeon_bo_unref(&rdev->uvd.vcpu_bo); in radeon_uvd_init() 202 r = radeon_bo_pin(rdev->uvd in radeon_uvd_init() [all...] |
H A D | uvd_v4_2.c | 46 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume() 47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume() 49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume() 62 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v4_2_resume() 67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume() 71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume() 74 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume() 75 WREG32(UVD_GP_SCRATCH4, rdev->uvd.max_handles); in uvd_v4_2_resume()
|
H A D | uvd_v2_2.c | 113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume() 125 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v2_2_resume() 130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume() 134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
|
H A D | uvd_v1_0.c | 121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume() 133 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v1_0_resume() 138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume() 142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume() 145 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr)); in uvd_v1_0_resume()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/ |
H A D | radeon_uvd.c | 72 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler); in radeon_uvd_init() 137 rdev->uvd.fw_header_present = false; in radeon_uvd_init() 138 rdev->uvd.max_handles = RADEON_DEFAULT_UVD_HANDLES; in radeon_uvd_init() 153 rdev->uvd.fw_header_present = true; in radeon_uvd_init() 168 rdev->uvd.max_handles = RADEON_MAX_UVD_HANDLES; in radeon_uvd_init() 188 RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles; in radeon_uvd_init() 191 NULL, &rdev->uvd.vcpu_bo); in radeon_uvd_init() 197 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); in radeon_uvd_init() 199 radeon_bo_unref(&rdev->uvd.vcpu_bo); in radeon_uvd_init() 204 r = radeon_bo_pin(rdev->uvd in radeon_uvd_init() [all...] |
H A D | uvd_v4_2.c | 46 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume() 47 addr = (rdev->uvd.gpu_addr + 0x200) >> 3; in uvd_v4_2_resume() 49 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume() 62 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v4_2_resume() 67 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume() 71 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume() 74 if (rdev->uvd.fw_header_present) in uvd_v4_2_resume() 75 WREG32(UVD_GP_SCRATCH4, rdev->uvd.max_handles); in uvd_v4_2_resume()
|
H A D | uvd_v2_2.c | 113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume() 125 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v2_2_resume() 130 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume() 134 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
|
H A D | uvd_v1_0.c | 121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume() 133 (RADEON_UVD_SESSION_SIZE * rdev->uvd.max_handles)) >> 3; in uvd_v1_0_resume() 138 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume() 142 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume() 145 WREG32(UVD_FW_START, *((uint32_t*)rdev->uvd.cpu_addr)); in uvd_v1_0_resume()
|