/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | umc_v8_7.c | 50 uint32_t umc_reg_offset) in umc_v8_7_clear_error_count_per_channel() 62 umc_reg_offset) * 4); in umc_v8_7_clear_error_count_per_channel() 66 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 70 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 75 umc_reg_offset) * 4); in umc_v8_7_clear_error_count_per_channel() 79 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 83 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 91 uint32_t umc_reg_offset = 0; in umc_v8_7_clear_error_count() local 94 umc_reg_offset = get_umc_8_reg_offset(adev, in umc_v8_7_clear_error_count() 99 umc_reg_offset); in umc_v8_7_clear_error_count() 49 umc_v8_7_clear_error_count_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v8_7_clear_error_count_per_channel() argument 103 umc_v8_7_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v8_7_query_correctable_error_count() argument 150 umc_v8_7_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v8_7_querry_uncorrectable_error_count() argument 177 uint32_t umc_reg_offset = 0; umc_v8_7_query_ras_error_count() local 195 umc_v8_7_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v8_7_query_error_address() argument 267 uint32_t umc_reg_offset = 0; umc_v8_7_query_ras_error_address() local 282 umc_v8_7_err_cnt_init_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v8_7_err_cnt_init_per_channel() argument 315 uint32_t umc_reg_offset = 0; umc_v8_7_err_cnt_init() local [all...] |
H A D | umc_v6_1.c | 94 uint32_t umc_reg_offset) in umc_v6_1_clear_error_count_per_channel() 119 umc_reg_offset) * 4); in umc_v6_1_clear_error_count_per_channel() 123 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 127 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 132 umc_reg_offset) * 4); in umc_v6_1_clear_error_count_per_channel() 136 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 140 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 148 uint32_t umc_reg_offset = 0; in umc_v6_1_clear_error_count() local 156 umc_reg_offset = get_umc_6_reg_offset(adev, in umc_v6_1_clear_error_count() 161 umc_reg_offset); in umc_v6_1_clear_error_count() 93 umc_v6_1_clear_error_count_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v6_1_clear_error_count_per_channel() argument 168 umc_v6_1_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v6_1_query_correctable_error_count() argument 225 umc_v6_1_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v6_1_querry_uncorrectable_error_count() argument 260 uint32_t umc_reg_offset = 0; umc_v6_1_query_ras_error_count() local 294 umc_v6_1_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v6_1_query_error_address() argument 375 uint32_t umc_reg_offset = 0; umc_v6_1_query_ras_error_address() local 406 umc_v6_1_err_cnt_init_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v6_1_err_cnt_init_per_channel() argument 448 uint32_t umc_reg_offset = 0; umc_v6_1_err_cnt_init() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | umc_v6_1.c | 95 uint32_t umc_reg_offset) in umc_v6_1_clear_error_count_per_channel() 120 umc_reg_offset) * 4); in umc_v6_1_clear_error_count_per_channel() 124 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 128 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 133 umc_reg_offset) * 4); in umc_v6_1_clear_error_count_per_channel() 137 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 141 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v6_1_clear_error_count_per_channel() 149 uint32_t umc_reg_offset = 0; in umc_v6_1_clear_error_count() local 157 umc_reg_offset = get_umc_6_reg_offset(adev, in umc_v6_1_clear_error_count() 162 umc_reg_offset); in umc_v6_1_clear_error_count() 94 umc_v6_1_clear_error_count_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v6_1_clear_error_count_per_channel() argument 169 umc_v6_1_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v6_1_query_correctable_error_count() argument 226 umc_v6_1_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v6_1_querry_uncorrectable_error_count() argument 261 uint32_t umc_reg_offset = 0; umc_v6_1_query_ras_error_count() local 295 umc_v6_1_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v6_1_query_error_address() argument 360 uint32_t umc_reg_offset = 0; umc_v6_1_query_ras_error_address() local 391 umc_v6_1_err_cnt_init_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v6_1_err_cnt_init_per_channel() argument 433 uint32_t umc_reg_offset = 0; umc_v6_1_err_cnt_init() local [all...] |
H A D | umc_v6_7.c | 61 uint64_t mc_umc_status, uint32_t umc_reg_offset) in umc_v6_7_query_error_status_helper() 70 dev_info(adev->dev, "MCA STATUS 0x%llx, umc_reg_offset 0x%x\n", mc_umc_status, umc_reg_offset); in umc_v6_7_query_error_status_helper() 75 reg_value = RREG64_PCIE((mc_umc_addr + umc_reg_offset) * 4); in umc_v6_7_query_error_status_helper() 77 dev_info(adev->dev, "MCA IPID 0x%llx, umc_reg_offset 0x%x\n", reg_value, umc_reg_offset); in umc_v6_7_query_error_status_helper() 82 reg_value = RREG64_PCIE((mc_umc_addr + umc_reg_offset) * 4); in umc_v6_7_query_error_status_helper() 84 dev_info(adev->dev, "MCA SYND 0x%llx, umc_reg_offset 0x%x\n", reg_value, umc_reg_offset); in umc_v6_7_query_error_status_helper() 89 reg_value = RREG64_PCIE((mc_umc_addr + umc_reg_offset) * in umc_v6_7_query_error_status_helper() 60 umc_v6_7_query_error_status_helper(struct amdgpu_device *adev, uint64_t mc_umc_status, uint32_t umc_reg_offset) umc_v6_7_query_error_status_helper() argument 100 uint32_t umc_reg_offset; umc_v6_7_ecc_info_query_correctable_error_count() local 142 uint32_t umc_reg_offset; umc_v6_7_ecc_info_querry_uncorrectable_error_count() local 261 umc_v6_7_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count, uint32_t ch_inst, uint32_t umc_inst) umc_v6_7_query_correctable_error_count() argument 337 umc_v6_7_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v6_7_querry_uncorrectable_error_count() argument 367 uint32_t umc_reg_offset = umc_v6_7_reset_error_count_per_channel() local 417 uint32_t umc_reg_offset = umc_v6_7_query_ecc_error_count() local 448 uint32_t umc_reg_offset = umc_v6_7_query_error_address() local 491 umc_v6_7_query_ras_poison_mode_per_channel( struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v6_7_query_ras_poison_mode_per_channel() argument 507 uint32_t umc_reg_offset = 0; umc_v6_7_query_ras_poison_mode() local [all...] |
H A D | umc_v8_7.c | 181 uint32_t umc_reg_offset) in umc_v8_7_clear_error_count_per_channel() 193 umc_reg_offset) * 4); in umc_v8_7_clear_error_count_per_channel() 197 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 201 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 206 umc_reg_offset) * 4); in umc_v8_7_clear_error_count_per_channel() 210 WREG32_PCIE((ecc_err_cnt_sel_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 214 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v8_7_clear_error_count_per_channel() 222 uint32_t umc_reg_offset = 0; in umc_v8_7_clear_error_count() local 225 umc_reg_offset = get_umc_v8_7_reg_offset(adev, in umc_v8_7_clear_error_count() 230 umc_reg_offset); in umc_v8_7_clear_error_count() 180 umc_v8_7_clear_error_count_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v8_7_clear_error_count_per_channel() argument 234 umc_v8_7_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v8_7_query_correctable_error_count() argument 281 umc_v8_7_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v8_7_querry_uncorrectable_error_count() argument 308 uint32_t umc_reg_offset = 0; umc_v8_7_query_ras_error_count() local 326 umc_v8_7_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v8_7_query_error_address() argument 375 uint32_t umc_reg_offset = 0; umc_v8_7_query_ras_error_address() local 390 umc_v8_7_err_cnt_init_per_channel(struct amdgpu_device *adev, uint32_t umc_reg_offset) umc_v8_7_err_cnt_init_per_channel() argument 423 uint32_t umc_reg_offset = 0; umc_v8_7_err_cnt_init() local [all...] |
H A D | umc_v8_10.c | 84 uint32_t umc_reg_offset = in umc_v8_10_clear_error_count_per_channel() local 91 WREG32_PCIE((ecc_err_cnt_addr + umc_reg_offset) * 4, in umc_v8_10_clear_error_count_per_channel() 104 uint32_t umc_reg_offset, in umc_v8_10_query_correctable_error_count() 117 mc_umc_status = RREG64_PCIE((mc_umc_status_addr + umc_reg_offset) * 4); in umc_v8_10_query_correctable_error_count() 124 uint32_t umc_reg_offset, in umc_v8_10_query_uncorrectable_error_count() 133 mc_umc_status = RREG64_PCIE((mc_umc_status_addr + umc_reg_offset) * 4); in umc_v8_10_query_uncorrectable_error_count() 148 uint32_t umc_reg_offset = in umc_v8_10_query_ecc_error_count() local 152 umc_reg_offset, in umc_v8_10_query_ecc_error_count() 155 umc_reg_offset, in umc_v8_10_query_ecc_error_count() 252 uint32_t umc_reg_offset in umc_v8_10_query_error_address() local 103 umc_v8_10_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v8_10_query_correctable_error_count() argument 123 umc_v8_10_query_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count) umc_v8_10_query_uncorrectable_error_count() argument 300 uint32_t umc_reg_offset = umc_v8_10_err_cnt_init_per_channel() local [all...] |