/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | umc_v8_7.c | 44 uint32_t umc_inst, in get_umc_v8_7_reg_offset() 47 return adev->umc.channel_offs*ch_inst + UMC_8_INST_DIST*umc_inst; in get_umc_v8_7_reg_offset() 51 uint32_t umc_inst, uint32_t ch_inst, in umc_v8_7_ecc_info_query_correctable_error_count() 58 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v8_7_ecc_info_query_correctable_error_count() 70 uint32_t umc_inst, uint32_t ch_inst, in umc_v8_7_ecc_info_querry_uncorrectable_error_count() 77 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v8_7_ecc_info_querry_uncorrectable_error_count() 95 uint32_t umc_inst = 0; in umc_v8_7_ecc_info_query_ras_error_count() local 101 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v8_7_ecc_info_query_ras_error_count() 103 umc_inst, ch_inst, in umc_v8_7_ecc_info_query_ras_error_count() 106 umc_inst, ch_ins in umc_v8_7_ecc_info_query_ras_error_count() 43 get_umc_v8_7_reg_offset(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst) get_umc_v8_7_reg_offset() argument 50 umc_v8_7_ecc_info_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst, unsigned long *error_count) umc_v8_7_ecc_info_query_correctable_error_count() argument 69 umc_v8_7_ecc_info_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst, unsigned long *error_count) umc_v8_7_ecc_info_querry_uncorrectable_error_count() argument 111 umc_v8_7_convert_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint64_t err_addr, uint32_t ch_inst, uint32_t umc_inst) umc_v8_7_convert_error_address() argument 130 umc_v8_7_ecc_info_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t ch_inst, uint32_t umc_inst) umc_v8_7_ecc_info_query_error_address() argument 165 uint32_t umc_inst = 0; umc_v8_7_ecc_info_query_ras_error_address() local 220 uint32_t umc_inst = 0; umc_v8_7_clear_error_count() local 306 uint32_t umc_inst = 0; umc_v8_7_query_ras_error_count() local 326 umc_v8_7_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v8_7_query_error_address() argument 373 uint32_t umc_inst = 0; umc_v8_7_query_ras_error_address() local 421 uint32_t umc_inst = 0; umc_v8_7_err_cnt_init() local [all...] |
H A D | umc_v6_7.c | 47 uint32_t umc_inst, in get_umc_v6_7_reg_offset() 50 uint32_t index = umc_inst * adev->umc.channel_inst_num + ch_inst; in get_umc_v6_7_reg_offset() 54 umc_inst = index / 4; in get_umc_v6_7_reg_offset() 57 return adev->umc.channel_offs * ch_inst + UMC_V6_7_INST_DIST * umc_inst; in get_umc_v6_7_reg_offset() 95 uint32_t umc_inst, uint32_t ch_inst, in umc_v6_7_ecc_info_query_correctable_error_count() 104 umc_inst, ch_inst); in umc_v6_7_ecc_info_query_correctable_error_count() 106 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v6_7_ecc_info_query_correctable_error_count() 119 adev->umc.channel_idx_tbl[umc_inst * adev->umc.channel_inst_num + ch_inst]; in umc_v6_7_ecc_info_query_correctable_error_count() 137 uint32_t umc_inst, uint32_t ch_inst, in umc_v6_7_ecc_info_querry_uncorrectable_error_count() 146 umc_inst, ch_ins in umc_v6_7_ecc_info_querry_uncorrectable_error_count() 46 get_umc_v6_7_reg_offset(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst) get_umc_v6_7_reg_offset() argument 94 umc_v6_7_ecc_info_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst, unsigned long *error_count) umc_v6_7_ecc_info_query_correctable_error_count() argument 136 umc_v6_7_ecc_info_querry_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst, unsigned long *error_count) umc_v6_7_ecc_info_querry_uncorrectable_error_count() argument 163 umc_v6_7_ecc_info_querry_ecc_error_count(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v6_7_ecc_info_querry_ecc_error_count() argument 187 umc_v6_7_convert_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint64_t err_addr, uint32_t ch_inst, uint32_t umc_inst) umc_v6_7_convert_error_address() argument 222 umc_v6_7_ecc_info_query_error_address(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v6_7_ecc_info_query_error_address() argument 261 umc_v6_7_query_correctable_error_count(struct amdgpu_device *adev, uint32_t umc_reg_offset, unsigned long *error_count, uint32_t ch_inst, uint32_t umc_inst) umc_v6_7_query_correctable_error_count() argument 361 umc_v6_7_reset_error_count_per_channel(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v6_7_reset_error_count_per_channel() argument 412 umc_v6_7_query_ecc_error_count(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v6_7_query_ecc_error_count() argument 441 umc_v6_7_query_error_address(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v6_7_query_error_address() argument [all...] |
H A D | amdgpu_umc.h | 40 #define LOOP_UMC_INST(umc_inst) for ((umc_inst) = 0; (umc_inst) < adev->umc.umc_inst_num; (umc_inst)++) 42 #define LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) LOOP_UMC_INST((umc_inst)) LOOP_UMC_CH_INST((ch_inst)) 47 #define LOOP_UMC_EACH_NODE_INST_AND_CH(node_inst, umc_inst, ch_inst) \ 48 LOOP_UMC_NODE_INST((node_inst)) LOOP_UMC_INST_AND_CH((umc_inst), (ch_inst)) 52 uint32_t umc_inst, uint32_t ch_inst, void *data); 106 uint32_t umc_inst); [all...] |
H A D | umc_v6_1.c | 88 uint32_t umc_inst, in get_umc_6_reg_offset() 91 return adev->umc.channel_offs*ch_inst + UMC_6_INST_DIST*umc_inst; in get_umc_6_reg_offset() 147 uint32_t umc_inst = 0; in umc_v6_1_clear_error_count() local 156 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v6_1_clear_error_count() 158 umc_inst, in umc_v6_1_clear_error_count() 259 uint32_t umc_inst = 0; in umc_v6_1_query_ras_error_count() local 272 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v6_1_query_ras_error_count() 274 umc_inst, in umc_v6_1_query_ras_error_count() 299 uint32_t umc_inst) in umc_v6_1_query_error_address() 303 uint32_t channel_index = adev->umc.channel_idx_tbl[umc_inst * ade in umc_v6_1_query_error_address() 87 get_umc_6_reg_offset(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst) get_umc_6_reg_offset() argument 295 umc_v6_1_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v6_1_query_error_address() argument 358 uint32_t umc_inst = 0; umc_v6_1_query_ras_error_address() local 431 uint32_t umc_inst = 0; umc_v6_1_err_cnt_init() local [all...] |
H A D | umc_v8_10.c | 72 uint32_t umc_inst, in get_umc_v8_10_reg_offset() 75 return adev->umc.channel_offs * ch_inst + UMC_8_INST_DIST * umc_inst + in get_umc_v8_10_reg_offset() 80 uint32_t node_inst, uint32_t umc_inst, in umc_v8_10_clear_error_count_per_channel() 85 get_umc_v8_10_reg_offset(adev, node_inst, umc_inst, ch_inst); in umc_v8_10_clear_error_count_per_channel() 144 uint32_t node_inst, uint32_t umc_inst, in umc_v8_10_query_ecc_error_count() 149 get_umc_v8_10_reg_offset(adev, node_inst, umc_inst, ch_inst); in umc_v8_10_query_ecc_error_count() 207 uint32_t ch_inst, uint32_t umc_inst, in umc_v8_10_convert_error_address() 218 umc_inst * adev->umc.channel_inst_num + in umc_v8_10_convert_error_address() 240 retired_page_addr, channel_index, umc_inst); in umc_v8_10_convert_error_address() 245 uint32_t node_inst, uint32_t umc_inst, in umc_v8_10_query_error_address() 70 get_umc_v8_10_reg_offset(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst) get_umc_v8_10_reg_offset() argument 79 umc_v8_10_clear_error_count_per_channel(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v8_10_clear_error_count_per_channel() argument 143 umc_v8_10_query_ecc_error_count(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v8_10_query_ecc_error_count() argument 205 umc_v8_10_convert_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint64_t err_addr, uint32_t ch_inst, uint32_t umc_inst, uint32_t node_inst, uint64_t mc_umc_status) umc_v8_10_convert_error_address() argument 244 umc_v8_10_query_error_address(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v8_10_query_error_address() argument 294 umc_v8_10_err_cnt_init_per_channel(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v8_10_err_cnt_init_per_channel() argument 335 umc_v8_10_ecc_info_query_correctable_error_count(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, unsigned long *error_count) umc_v8_10_ecc_info_query_correctable_error_count() argument 356 umc_v8_10_ecc_info_query_uncorrectable_error_count(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, unsigned long *error_count) umc_v8_10_ecc_info_query_uncorrectable_error_count() argument 381 umc_v8_10_ecc_info_query_ecc_error_count(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v8_10_ecc_info_query_ecc_error_count() argument 403 umc_v8_10_ecc_info_query_error_address(struct amdgpu_device *adev, uint32_t node_inst, uint32_t umc_inst, uint32_t ch_inst, void *data) umc_v8_10_ecc_info_query_error_address() argument [all...] |
H A D | amdgpu_umc.c | 29 uint32_t ch_inst, uint32_t umc_inst) in amdgpu_umc_convert_error_address() 34 err_data, err_addr, ch_inst, umc_inst); in amdgpu_umc_convert_error_address() 46 uint64_t err_addr, uint32_t ch_inst, uint32_t umc_inst) in amdgpu_umc_page_retirement_mca() 64 ch_inst, umc_inst); in amdgpu_umc_page_retirement_mca() 293 uint32_t umc_inst) in amdgpu_umc_fill_error_record() 305 err_rec->mcumc_id = umc_inst; in amdgpu_umc_fill_error_record() 314 uint32_t umc_inst = 0; in amdgpu_umc_loop_channels() local 319 LOOP_UMC_EACH_NODE_INST_AND_CH(node_inst, umc_inst, ch_inst) { in amdgpu_umc_loop_channels() 320 ret = func(adev, node_inst, umc_inst, ch_inst, data); in amdgpu_umc_loop_channels() 323 node_inst, umc_inst, ch_ins in amdgpu_umc_loop_channels() 27 amdgpu_umc_convert_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint64_t err_addr, uint32_t ch_inst, uint32_t umc_inst) amdgpu_umc_convert_error_address() argument 45 amdgpu_umc_page_retirement_mca(struct amdgpu_device *adev, uint64_t err_addr, uint32_t ch_inst, uint32_t umc_inst) amdgpu_umc_page_retirement_mca() argument 289 amdgpu_umc_fill_error_record(struct ras_err_data *err_data, uint64_t err_addr, uint64_t retired_page, uint32_t channel_index, uint32_t umc_inst) amdgpu_umc_fill_error_record() argument [all...] |
H A D | umc_v6_7.h | 76 uint32_t ch_inst, uint32_t umc_inst);
|
H A D | amdgpu_ras.c | 3047 uint32_t umc_inst = 0, ch_inst = 0; in amdgpu_bad_page_notifier() local 3080 umc_inst = GET_UMC_INST(m->ipid); in amdgpu_bad_page_notifier() 3084 umc_inst, ch_inst); in amdgpu_bad_page_notifier() 3086 if (!amdgpu_umc_page_retirement_mca(adev, m->addr, ch_inst, umc_inst)) in amdgpu_bad_page_notifier()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
H A D | umc_v8_7.c | 43 uint32_t umc_inst, in get_umc_8_reg_offset() 46 return adev->umc.channel_offs*ch_inst + UMC_8_INST_DIST*umc_inst; in get_umc_8_reg_offset() 89 uint32_t umc_inst = 0; in umc_v8_7_clear_error_count() local 93 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v8_7_clear_error_count() 95 umc_inst, in umc_v8_7_clear_error_count() 175 uint32_t umc_inst = 0; in umc_v8_7_query_ras_error_count() local 179 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v8_7_query_ras_error_count() 181 umc_inst, in umc_v8_7_query_ras_error_count() 199 uint32_t umc_inst) in umc_v8_7_query_error_address() 204 uint32_t channel_index = adev->umc.channel_idx_tbl[umc_inst * ade in umc_v8_7_query_error_address() 42 get_umc_8_reg_offset(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst) get_umc_8_reg_offset() argument 195 umc_v8_7_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v8_7_query_error_address() argument 265 uint32_t umc_inst = 0; umc_v8_7_query_ras_error_address() local 313 uint32_t umc_inst = 0; umc_v8_7_err_cnt_init() local [all...] |
H A D | amdgpu_umc.h | 39 #define LOOP_UMC_INST(umc_inst) for ((umc_inst) = 0; (umc_inst) < adev->umc.umc_inst_num; (umc_inst)++) 41 #define LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) LOOP_UMC_INST((umc_inst)) LOOP_UMC_CH_INST((ch_inst))
|
H A D | umc_v6_1.c | 87 uint32_t umc_inst, in get_umc_6_reg_offset() 90 return adev->umc.channel_offs*ch_inst + UMC_6_INST_DIST*umc_inst; in get_umc_6_reg_offset() 146 uint32_t umc_inst = 0; in umc_v6_1_clear_error_count() local 155 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v6_1_clear_error_count() 157 umc_inst, in umc_v6_1_clear_error_count() 258 uint32_t umc_inst = 0; in umc_v6_1_query_ras_error_count() local 271 LOOP_UMC_INST_AND_CH(umc_inst, ch_inst) { in umc_v6_1_query_ras_error_count() 273 umc_inst, in umc_v6_1_query_ras_error_count() 298 uint32_t umc_inst) in umc_v6_1_query_error_address() 303 uint32_t channel_index = adev->umc.channel_idx_tbl[umc_inst * ade in umc_v6_1_query_error_address() 86 get_umc_6_reg_offset(struct amdgpu_device *adev, uint32_t umc_inst, uint32_t ch_inst) get_umc_6_reg_offset() argument 294 umc_v6_1_query_error_address(struct amdgpu_device *adev, struct ras_err_data *err_data, uint32_t umc_reg_offset, uint32_t ch_inst, uint32_t umc_inst) umc_v6_1_query_error_address() argument 373 uint32_t umc_inst = 0; umc_v6_1_query_ras_error_address() local 446 uint32_t umc_inst = 0; umc_v6_1_err_cnt_init() local [all...] |