Home
last modified time | relevance | path

Searched refs:inst_idx (Results 1 - 17 of 17) sorted by relevance

/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Dvcn_v3_0.c69 int inst_idx, struct dpg_pause_state *new_state);
459 static void vcn_v3_0_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v3_0_mc_resume_dpg_mode() argument
467 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode()
468 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), in vcn_v3_0_mc_resume_dpg_mode()
469 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
470 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode()
471 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), in vcn_v3_0_mc_resume_dpg_mode()
472 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
473 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode()
474 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET in vcn_v3_0_mc_resume_dpg_mode()
780 vcn_v3_0_clock_gating_dpg_mode(struct amdgpu_device *adev, uint8_t sram_sel, int inst_idx, uint8_t indirect) vcn_v3_0_clock_gating_dpg_mode() argument
896 vcn_v3_0_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v3_0_start_dpg_mode() argument
1430 vcn_v3_0_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx) vcn_v3_0_stop_dpg_mode() argument
1536 vcn_v3_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v3_0_pause_dpg_mode() argument
[all...]
H A Damdgpu_vcn.h70 #define RREG32_SOC15_DPG_MODE_1_0(ip, inst_idx, reg, mask, sram_sel) \
71 ({ WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_MASK, mask); \
72 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_CTL, \
74 ((adev->reg_offset[ip##_HWIP][inst_idx][reg##_BASE_IDX] + reg) \
77 RREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_DATA); \
80 #define WREG32_SOC15_DPG_MODE_1_0(ip, inst_idx, reg, value, mask, sram_sel) \
82 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_DATA, value); \
83 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_MASK, mask); \
84 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_CTL, \
86 ((adev->reg_offset[ip##_HWIP][inst_idx][re
[all...]
H A Dvcn_v2_5.c60 int inst_idx, struct dpg_pause_state *new_state);
448 static void vcn_v2_5_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v2_5_mc_resume_dpg_mode() argument
456 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
458 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
459 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
461 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
462 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
465 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
467 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
469 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSE in vcn_v2_5_mc_resume_dpg_mode()
661 vcn_v2_5_clock_gating_dpg_mode(struct amdgpu_device *adev, uint8_t sram_sel, int inst_idx, uint8_t indirect) vcn_v2_5_clock_gating_dpg_mode() argument
771 vcn_v2_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v2_5_start_dpg_mode() argument
1304 vcn_v2_5_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx) vcn_v2_5_stop_dpg_mode() argument
1400 vcn_v2_5_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v2_5_pause_dpg_mode() argument
[all...]
H A Dvcn_v1_0.c54 int inst_idx, struct dpg_pause_state *new_state);
1211 int inst_idx, struct dpg_pause_state *new_state) in vcn_v1_0_pause_dpg_mode()
1219 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v1_0_pause_dpg_mode()
1221 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode()
1222 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode()
1271 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v1_0_pause_dpg_mode()
1275 if (adev->vcn.inst[inst_idx].pause_state.jpeg != new_state->jpeg) { in vcn_v1_0_pause_dpg_mode()
1277 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode()
1278 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode()
1332 adev->vcn.inst[inst_idx] in vcn_v1_0_pause_dpg_mode()
1210 vcn_v1_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v1_0_pause_dpg_mode() argument
[all...]
H A Dvcn_v2_0.c59 int inst_idx, struct dpg_pause_state *new_state);
1201 int inst_idx, struct dpg_pause_state *new_state) in vcn_v2_0_pause_dpg_mode()
1208 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v2_0_pause_dpg_mode()
1210 adev->vcn.inst[inst_idx].pause_state.fw_based, new_state->fw_based); in vcn_v2_0_pause_dpg_mode()
1271 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v2_0_pause_dpg_mode()
1200 vcn_v2_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v2_0_pause_dpg_mode() argument
H A Damdgpu_psp.h354 int psp_update_vcn_sram(struct amdgpu_device *adev, int inst_idx,
H A Damdgpu_psp.c2339 int psp_update_vcn_sram(struct amdgpu_device *adev, int inst_idx, in psp_update_vcn_sram() argument
2344 ucode.ucode_id = inst_idx ? AMDGPU_UCODE_ID_VCN1_RAM : in psp_update_vcn_sram()
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Dvcn_v4_0.c66 int inst_idx, struct dpg_pause_state *new_state);
431 * @inst_idx: instance number index
436 static void vcn_v4_0_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v4_0_mc_resume_dpg_mode() argument
446 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v4_0_mc_resume_dpg_mode()
447 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), in vcn_v4_0_mc_resume_dpg_mode()
448 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v4_0_mc_resume_dpg_mode()
449 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v4_0_mc_resume_dpg_mode()
450 VCN, inst_idx, regUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), in vcn_v4_0_mc_resume_dpg_mode()
451 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v4_0_mc_resume_dpg_mode()
452 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSE in vcn_v4_0_mc_resume_dpg_mode()
775 vcn_v4_0_disable_clock_gating_dpg_mode(struct amdgpu_device *adev, uint8_t sram_sel, int inst_idx, uint8_t indirect) vcn_v4_0_disable_clock_gating_dpg_mode() argument
883 vcn_v4_0_enable_ras(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v4_0_enable_ras() argument
914 vcn_v4_0_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v4_0_start_dpg_mode() argument
1429 vcn_v4_0_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx) vcn_v4_0_stop_dpg_mode() argument
1543 vcn_v4_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v4_0_pause_dpg_mode() argument
[all...]
H A Dvcn_v3_0.c75 int inst_idx, struct dpg_pause_state *new_state);
498 static void vcn_v3_0_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v3_0_mc_resume_dpg_mode() argument
506 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode()
507 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_LOW), in vcn_v3_0_mc_resume_dpg_mode()
508 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
509 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode()
510 VCN, inst_idx, mmUVD_LMI_VCPU_CACHE_64BIT_BAR_HIGH), in vcn_v3_0_mc_resume_dpg_mode()
511 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v3_0_mc_resume_dpg_mode()
512 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v3_0_mc_resume_dpg_mode()
513 VCN, inst_idx, mmUVD_VCPU_CACHE_OFFSET in vcn_v3_0_mc_resume_dpg_mode()
826 vcn_v3_0_clock_gating_dpg_mode(struct amdgpu_device *adev, uint8_t sram_sel, int inst_idx, uint8_t indirect) vcn_v3_0_clock_gating_dpg_mode() argument
942 vcn_v3_0_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v3_0_start_dpg_mode() argument
1491 vcn_v3_0_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx) vcn_v3_0_stop_dpg_mode() argument
1597 vcn_v3_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v3_0_pause_dpg_mode() argument
[all...]
H A Dvcn_v2_5.c64 int inst_idx, struct dpg_pause_state *new_state);
469 static void vcn_v2_5_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) in vcn_v2_5_mc_resume_dpg_mode() argument
477 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
479 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_lo), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
480 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
482 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx].tmr_mc_addr_hi), 0, indirect); in vcn_v2_5_mc_resume_dpg_mode()
483 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
486 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
488 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSET( in vcn_v2_5_mc_resume_dpg_mode()
490 WREG32_SOC15_DPG_MODE(inst_idx, SOC15_DPG_MODE_OFFSE in vcn_v2_5_mc_resume_dpg_mode()
682 vcn_v2_5_clock_gating_dpg_mode(struct amdgpu_device *adev, uint8_t sram_sel, int inst_idx, uint8_t indirect) vcn_v2_5_clock_gating_dpg_mode() argument
792 vcn_v2_6_enable_ras(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v2_6_enable_ras() argument
819 vcn_v2_5_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v2_5_start_dpg_mode() argument
1352 vcn_v2_5_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx) vcn_v2_5_stop_dpg_mode() argument
1448 vcn_v2_5_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v2_5_pause_dpg_mode() argument
[all...]
H A Damdgpu_vcn.h81 #define RREG32_SOC15_DPG_MODE_1_0(ip, inst_idx, reg, mask, sram_sel) \
82 ({ WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_MASK, mask); \
83 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_CTL, \
85 ((adev->reg_offset[ip##_HWIP][inst_idx][reg##_BASE_IDX] + reg) \
88 RREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_DATA); \
91 #define WREG32_SOC15_DPG_MODE_1_0(ip, inst_idx, reg, value, mask, sram_sel) \
93 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_DATA, value); \
94 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_MASK, mask); \
95 WREG32_SOC15(ip, inst_idx, mmUVD_DPG_LMA_CTL, \
97 ((adev->reg_offset[ip##_HWIP][inst_idx][re
[all...]
H A Dvcn_v4_0_3.c54 int inst_idx, struct dpg_pause_state *new_state);
58 int inst_idx, bool indirect);
326 * @inst_idx: instance number
330 static void vcn_v4_0_3_mc_resume(struct amdgpu_device *adev, int inst_idx) in vcn_v4_0_3_mc_resume() argument
338 vcn_inst = GET_INST(VCN, inst_idx); in vcn_v4_0_3_mc_resume()
343 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx] in vcn_v4_0_3_mc_resume()
347 (adev->firmware.ucode[AMDGPU_UCODE_ID_VCN + inst_idx] in vcn_v4_0_3_mc_resume()
353 lower_32_bits(adev->vcn.inst[inst_idx].gpu_addr)); in vcn_v4_0_3_mc_resume()
356 upper_32_bits(adev->vcn.inst[inst_idx].gpu_addr)); in vcn_v4_0_3_mc_resume()
365 lower_32_bits(adev->vcn.inst[inst_idx] in vcn_v4_0_3_mc_resume()
405 vcn_v4_0_3_mc_resume_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v4_0_3_mc_resume_dpg_mode() argument
518 vcn_v4_0_3_disable_clock_gating(struct amdgpu_device *adev, int inst_idx) vcn_v4_0_3_disable_clock_gating() argument
613 vcn_v4_0_3_disable_clock_gating_dpg_mode(struct amdgpu_device *adev, uint8_t sram_sel, int inst_idx, uint8_t indirect) vcn_v4_0_3_disable_clock_gating_dpg_mode() argument
662 vcn_v4_0_3_enable_clock_gating(struct amdgpu_device *adev, int inst_idx) vcn_v4_0_3_enable_clock_gating() argument
714 vcn_v4_0_3_start_dpg_mode(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v4_0_3_start_dpg_mode() argument
1219 vcn_v4_0_3_stop_dpg_mode(struct amdgpu_device *adev, int inst_idx) vcn_v4_0_3_stop_dpg_mode() argument
1338 vcn_v4_0_3_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v4_0_3_pause_dpg_mode() argument
1747 vcn_v4_0_3_enable_ras(struct amdgpu_device *adev, int inst_idx, bool indirect) vcn_v4_0_3_enable_ras() argument
[all...]
H A Dvcn_v1_0.c54 int inst_idx, struct dpg_pause_state *new_state);
1212 int inst_idx, struct dpg_pause_state *new_state) in vcn_v1_0_pause_dpg_mode()
1220 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v1_0_pause_dpg_mode()
1222 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode()
1223 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode()
1272 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v1_0_pause_dpg_mode()
1276 if (adev->vcn.inst[inst_idx].pause_state.jpeg != new_state->jpeg) { in vcn_v1_0_pause_dpg_mode()
1278 adev->vcn.inst[inst_idx].pause_state.fw_based, in vcn_v1_0_pause_dpg_mode()
1279 adev->vcn.inst[inst_idx].pause_state.jpeg, in vcn_v1_0_pause_dpg_mode()
1333 adev->vcn.inst[inst_idx] in vcn_v1_0_pause_dpg_mode()
1211 vcn_v1_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v1_0_pause_dpg_mode() argument
[all...]
H A Daqua_vanjaram.c65 uint32_t inst_idx, struct amdgpu_ring *ring) in aqua_vanjaram_set_xcp_id()
75 inst_mask = 1 << inst_idx; in aqua_vanjaram_set_xcp_id()
90 inst_mask = 1 << (inst_idx * 2); in aqua_vanjaram_set_xcp_id()
64 aqua_vanjaram_set_xcp_id(struct amdgpu_device *adev, uint32_t inst_idx, struct amdgpu_ring *ring) aqua_vanjaram_set_xcp_id() argument
H A Damdgpu_vcn.c1250 int amdgpu_vcn_psp_update_sram(struct amdgpu_device *adev, int inst_idx, in amdgpu_vcn_psp_update_sram() argument
1255 (inst_idx ? AMDGPU_UCODE_ID_VCN1_RAM : in amdgpu_vcn_psp_update_sram()
1257 .mc_addr = adev->vcn.inst[inst_idx].dpg_sram_gpu_addr, in amdgpu_vcn_psp_update_sram()
1258 .ucode_size = ((uintptr_t)adev->vcn.inst[inst_idx].dpg_sram_curr_addr - in amdgpu_vcn_psp_update_sram()
1259 (uintptr_t)adev->vcn.inst[inst_idx].dpg_sram_cpu_addr), in amdgpu_vcn_psp_update_sram()
H A Djpeg_v4_0_3.c413 static void jpeg_v4_0_3_disable_clock_gating(struct amdgpu_device *adev, int inst_idx) in jpeg_v4_0_3_disable_clock_gating() argument
418 jpeg_inst = GET_INST(JPEG, inst_idx); in jpeg_v4_0_3_disable_clock_gating()
438 static void jpeg_v4_0_3_enable_clock_gating(struct amdgpu_device *adev, int inst_idx) in jpeg_v4_0_3_enable_clock_gating() argument
443 jpeg_inst = GET_INST(JPEG, inst_idx); in jpeg_v4_0_3_enable_clock_gating()
H A Dvcn_v2_0.c62 int inst_idx, struct dpg_pause_state *new_state);
1202 int inst_idx, struct dpg_pause_state *new_state) in vcn_v2_0_pause_dpg_mode()
1209 if (adev->vcn.inst[inst_idx].pause_state.fw_based != new_state->fw_based) { in vcn_v2_0_pause_dpg_mode()
1211 adev->vcn.inst[inst_idx].pause_state.fw_based, new_state->fw_based); in vcn_v2_0_pause_dpg_mode()
1272 adev->vcn.inst[inst_idx].pause_state.fw_based = new_state->fw_based; in vcn_v2_0_pause_dpg_mode()
1201 vcn_v2_0_pause_dpg_mode(struct amdgpu_device *adev, int inst_idx, struct dpg_pause_state *new_state) vcn_v2_0_pause_dpg_mode() argument

Completed in 25 milliseconds