Home
last modified time | relevance | path

Searched refs:umc (Results 1 - 25 of 32) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_umc.c36 if (!adev->umc.ras_if) { in amdgpu_umc_ras_late_init()
37 adev->umc.ras_if = in amdgpu_umc_ras_late_init()
39 if (!adev->umc.ras_if) in amdgpu_umc_ras_late_init()
41 adev->umc.ras_if->block = AMDGPU_RAS_BLOCK__UMC; in amdgpu_umc_ras_late_init()
42 adev->umc.ras_if->type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in amdgpu_umc_ras_late_init()
43 adev->umc.ras_if->sub_block_index = 0; in amdgpu_umc_ras_late_init()
44 strcpy(adev->umc.ras_if->name, "umc"); in amdgpu_umc_ras_late_init()
46 ih_info.head = fs_info.head = *adev->umc.ras_if; in amdgpu_umc_ras_late_init()
48 r = amdgpu_ras_late_init(adev, adev->umc in amdgpu_umc_ras_late_init()
[all...]
H A Dgmc_v9_0.c48 #include "umc/umc_6_0_sh_mask.h"
1133 adev->umc.funcs = &umc_v6_0_funcs; in gmc_v9_0_set_umc_funcs()
1136 adev->umc.max_ras_err_cnt_per_query = UMC_V6_1_TOTAL_CHANNEL_NUM; in gmc_v9_0_set_umc_funcs()
1137 adev->umc.channel_inst_num = UMC_V6_1_CHANNEL_INSTANCE_NUM; in gmc_v9_0_set_umc_funcs()
1138 adev->umc.umc_inst_num = UMC_V6_1_UMC_INSTANCE_NUM; in gmc_v9_0_set_umc_funcs()
1139 adev->umc.channel_offs = UMC_V6_1_PER_CHANNEL_OFFSET_VG20; in gmc_v9_0_set_umc_funcs()
1140 adev->umc.channel_idx_tbl = &umc_v6_1_channel_idx_tbl[0][0]; in gmc_v9_0_set_umc_funcs()
1141 adev->umc.funcs = &umc_v6_1_funcs; in gmc_v9_0_set_umc_funcs()
1144 adev->umc.max_ras_err_cnt_per_query = UMC_V6_1_TOTAL_CHANNEL_NUM; in gmc_v9_0_set_umc_funcs()
1145 adev->umc in gmc_v9_0_set_umc_funcs()
[all...]
H A Damdgpu_umc.h39 #define LOOP_UMC_INST(umc_inst) for ((umc_inst) = 0; (umc_inst) < adev->umc.umc_inst_num; (umc_inst)++)
40 #define LOOP_UMC_CH_INST(ch_inst) for ((ch_inst) = 0; (ch_inst) < adev->umc.channel_inst_num; (ch_inst)++)
56 /* number of umc channel instance with memory map register access */
58 /* number of umc instance with memory map register access */
H A Dgmc_v10_0.c620 adev->umc.max_ras_err_cnt_per_query = UMC_V8_7_TOTAL_CHANNEL_NUM; in gmc_v10_0_set_umc_funcs()
621 adev->umc.channel_inst_num = UMC_V8_7_CHANNEL_INSTANCE_NUM; in gmc_v10_0_set_umc_funcs()
622 adev->umc.umc_inst_num = UMC_V8_7_UMC_INSTANCE_NUM; in gmc_v10_0_set_umc_funcs()
623 adev->umc.channel_offs = UMC_V8_7_PER_CHANNEL_OFFSET_SIENNA; in gmc_v10_0_set_umc_funcs()
624 adev->umc.channel_idx_tbl = &umc_v8_7_channel_idx_tbl[0][0]; in gmc_v10_0_set_umc_funcs()
625 adev->umc.funcs = &umc_v8_7_funcs; in gmc_v10_0_set_umc_funcs()
998 if (adev->umc.funcs && adev->umc.funcs->init_registers) in gmc_v10_0_hw_init()
999 adev->umc.funcs->init_registers(adev); in gmc_v10_0_hw_init()
H A Dumc_v8_7.c29 #include "umc/umc_8_7_0_offset.h"
30 #include "umc/umc_8_7_0_sh_mask.h"
46 return adev->umc.channel_offs*ch_inst + UMC_8_INST_DIST*umc_inst; in get_umc_8_reg_offset()
204 uint32_t channel_index = adev->umc.channel_idx_tbl[umc_inst * adev->umc.channel_inst_num + ch_inst]; in umc_v8_7_query_error_address()
217 /* clear umc status */ in umc_v8_7_query_error_address()
235 /* translate umc channel address to soc pa, 3 parts are included */ in umc_v8_7_query_error_address()
256 /* clear umc status */ in umc_v8_7_query_error_address()
H A Damdgpu_gmc.c316 if (adev->umc.funcs && adev->umc.funcs->ras_late_init) { in amdgpu_gmc_ras_late_init()
317 r = adev->umc.funcs->ras_late_init(adev); in amdgpu_gmc_ras_late_init()
H A Dumc_v6_1.c29 #include "umc/umc_6_1_1_offset.h"
30 #include "umc/umc_6_1_1_sh_mask.h"
31 #include "umc/umc_6_1_2_offset.h"
90 return adev->umc.channel_offs*ch_inst + UMC_6_INST_DIST*umc_inst; in get_umc_6_reg_offset()
303 uint32_t channel_index = adev->umc.channel_idx_tbl[umc_inst * adev->umc.channel_inst_num + ch_inst]; in umc_v6_1_query_error_address()
325 /* clear umc status */ in umc_v6_1_query_error_address()
343 /* translate umc channel address to soc pa, 3 parts are included */ in umc_v6_1_query_error_address()
364 /* clear umc status */ in umc_v6_1_query_error_address()
H A Damdgpu_ras.c48 "umc",
264 * block: umc, sdma, gfx, .........
276 * echo inject umc ue 0x0 0x0 0x0 > /sys/kernel/debug/dri/0/ras/ras_ctrl
277 * echo inject umc ce 0 0 0 > /sys/kernel/debug/dri/0/ras/ras_ctrl
278 * echo disable umc > /sys/kernel/debug/dri/0/ras/ras_ctrl
331 /* umc ce/ue error injection for a bad page is not allowed */ in amdgpu_ras_debugfs_ctrl_write()
770 if (adev->umc.funcs->query_ras_error_count) in amdgpu_ras_error_query()
771 adev->umc.funcs->query_ras_error_count(adev, &err_data); in amdgpu_ras_error_query()
772 /* umc query_ras_error_address is also responsible for clearing in amdgpu_ras_error_query()
775 if (adev->umc in amdgpu_ras_error_query()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_umc.c52 kcalloc(adev->umc.max_ras_err_cnt_per_query, in amdgpu_umc_page_retirement_mca()
56 "Failed to alloc memory for umc error record in MCA notifier!\n"); in amdgpu_umc_page_retirement_mca()
91 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_do_page_retirement()
92 adev->umc.ras->ras_block.hw_ops->query_ras_error_count) in amdgpu_umc_do_page_retirement()
93 adev->umc.ras->ras_block.hw_ops->query_ras_error_count(adev, ras_error_status); in amdgpu_umc_do_page_retirement()
95 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_umc_do_page_retirement()
96 adev->umc.ras->ras_block.hw_ops->query_ras_error_address && in amdgpu_umc_do_page_retirement()
97 adev->umc in amdgpu_umc_do_page_retirement()
[all...]
H A Dumc_v8_10.c27 #include "umc/umc_8_10_0_offset.h"
28 #include "umc/umc_8_10_0_sh_mask.h"
75 return adev->umc.channel_offs * ch_inst + UMC_8_INST_DIST * umc_inst + in get_umc_v8_10_reg_offset()
216 adev->umc.channel_idx_tbl[node_inst * adev->umc.umc_inst_num * in umc_v8_10_convert_error_address()
217 adev->umc.channel_inst_num + in umc_v8_10_convert_error_address()
218 umc_inst * adev->umc.channel_inst_num + in umc_v8_10_convert_error_address()
234 dev_err(adev->dev, "Failed to map pa from umc na.\n"); in umc_v8_10_convert_error_address()
263 /* clear umc status */ in umc_v8_10_query_error_address()
281 /* clear umc statu in umc_v8_10_query_error_address()
[all...]
H A Dumc_v6_7.c28 #include "umc/umc_6_7_0_offset.h"
29 #include "umc/umc_6_7_0_sh_mask.h"
50 uint32_t index = umc_inst * adev->umc.channel_inst_num + ch_inst; in get_umc_v6_7_reg_offset()
52 /* adjust umc and channel index offset, in get_umc_v6_7_reg_offset()
53 * the register address is not linear on each umc instace */ in get_umc_v6_7_reg_offset()
57 return adev->umc.channel_offs * ch_inst + UMC_V6_7_INST_DIST * umc_inst; in get_umc_v6_7_reg_offset()
106 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v6_7_ecc_info_query_correctable_error_count()
119 adev->umc.channel_idx_tbl[umc_inst * adev->umc.channel_inst_num + ch_inst]; in umc_v6_7_ecc_info_query_correctable_error_count()
123 /* translate umc channe in umc_v6_7_ecc_info_query_correctable_error_count()
[all...]
H A Damdgpu_umc.h40 #define LOOP_UMC_INST(umc_inst) for ((umc_inst) = 0; (umc_inst) < adev->umc.umc_inst_num; (umc_inst)++)
41 #define LOOP_UMC_CH_INST(ch_inst) for ((ch_inst) = 0; (ch_inst) < adev->umc.channel_inst_num; (ch_inst)++)
45 for_each_set_bit((node_inst), &(adev->umc.active_mask), adev->umc.node_inst_num)
73 /* number of umc channel instance with memory map register access */
75 /* number of umc instance with memory map register access */
78 /* Total number of umc node instance including harvest one */
92 /* active mask for umc node instance */
H A Dgmc_v9_0.c46 #include "umc/umc_6_0_sh_mask.h"
1459 adev->umc.funcs = &umc_v6_0_funcs; in gmc_v9_0_set_umc_funcs()
1462 adev->umc.max_ras_err_cnt_per_query = UMC_V6_1_TOTAL_CHANNEL_NUM; in gmc_v9_0_set_umc_funcs()
1463 adev->umc.channel_inst_num = UMC_V6_1_CHANNEL_INSTANCE_NUM; in gmc_v9_0_set_umc_funcs()
1464 adev->umc.umc_inst_num = UMC_V6_1_UMC_INSTANCE_NUM; in gmc_v9_0_set_umc_funcs()
1465 adev->umc.channel_offs = UMC_V6_1_PER_CHANNEL_OFFSET_VG20; in gmc_v9_0_set_umc_funcs()
1466 adev->umc.retire_unit = 1; in gmc_v9_0_set_umc_funcs()
1467 adev->umc.channel_idx_tbl = &umc_v6_1_channel_idx_tbl[0][0]; in gmc_v9_0_set_umc_funcs()
1468 adev->umc.ras = &umc_v6_1_ras; in gmc_v9_0_set_umc_funcs()
1471 adev->umc in gmc_v9_0_set_umc_funcs()
[all...]
H A Dgmc_v11_0.c592 adev->umc.channel_inst_num = UMC_V8_10_CHANNEL_INSTANCE_NUM; in gmc_v11_0_set_umc_funcs()
593 adev->umc.umc_inst_num = UMC_V8_10_UMC_INSTANCE_NUM; in gmc_v11_0_set_umc_funcs()
594 adev->umc.max_ras_err_cnt_per_query = UMC_V8_10_TOTAL_CHANNEL_NUM(adev); in gmc_v11_0_set_umc_funcs()
595 adev->umc.channel_offs = UMC_V8_10_PER_CHANNEL_OFFSET; in gmc_v11_0_set_umc_funcs()
596 adev->umc.retire_unit = UMC_V8_10_NA_COL_2BITS_POWER_OF_2_NUM; in gmc_v11_0_set_umc_funcs()
597 if (adev->umc.node_inst_num == 4) in gmc_v11_0_set_umc_funcs()
598 adev->umc.channel_idx_tbl = &umc_v8_10_channel_idx_tbl_ext0[0][0][0]; in gmc_v11_0_set_umc_funcs()
600 adev->umc.channel_idx_tbl = &umc_v8_10_channel_idx_tbl[0][0][0]; in gmc_v11_0_set_umc_funcs()
601 adev->umc.ras = &umc_v8_10_ras; in gmc_v11_0_set_umc_funcs()
948 if (adev->umc in gmc_v11_0_hw_init()
[all...]
H A Dgmc_v10_0.c685 adev->umc.max_ras_err_cnt_per_query = UMC_V8_7_TOTAL_CHANNEL_NUM; in gmc_v10_0_set_umc_funcs()
686 adev->umc.channel_inst_num = UMC_V8_7_CHANNEL_INSTANCE_NUM; in gmc_v10_0_set_umc_funcs()
687 adev->umc.umc_inst_num = UMC_V8_7_UMC_INSTANCE_NUM; in gmc_v10_0_set_umc_funcs()
688 adev->umc.channel_offs = UMC_V8_7_PER_CHANNEL_OFFSET_SIENNA; in gmc_v10_0_set_umc_funcs()
689 adev->umc.retire_unit = 1; in gmc_v10_0_set_umc_funcs()
690 adev->umc.channel_idx_tbl = &umc_v8_7_channel_idx_tbl[0][0]; in gmc_v10_0_set_umc_funcs()
691 adev->umc.ras = &umc_v8_7_ras; in gmc_v10_0_set_umc_funcs()
1110 if (adev->umc.funcs && adev->umc.funcs->init_registers) in gmc_v10_0_hw_init()
1111 adev->umc in gmc_v10_0_hw_init()
[all...]
H A Dumc_v8_7.c30 #include "umc/umc_8_7_0_offset.h"
31 #include "umc/umc_8_7_0_sh_mask.h"
47 return adev->umc.channel_offs*ch_inst + UMC_8_INST_DIST*umc_inst; in get_umc_v8_7_reg_offset()
58 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v8_7_ecc_info_query_correctable_error_count()
77 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v8_7_ecc_info_querry_uncorrectable_error_count()
119 adev->umc.channel_idx_tbl[umc_inst * adev->umc.channel_inst_num + ch_inst]; in umc_v8_7_convert_error_address()
121 /* translate umc channel address to soc pa, 3 parts are included */ in umc_v8_7_convert_error_address()
139 eccinfo_table_idx = umc_inst * adev->umc.channel_inst_num + ch_inst; in umc_v8_7_ecc_info_query_error_address()
345 /* clear umc statu in umc_v8_7_query_error_address()
[all...]
H A Dumc_v6_1.c30 #include "umc/umc_6_1_1_offset.h"
31 #include "umc/umc_6_1_1_sh_mask.h"
32 #include "umc/umc_6_1_2_offset.h"
91 return adev->umc.channel_offs*ch_inst + UMC_6_INST_DIST*umc_inst; in get_umc_6_reg_offset()
303 uint32_t channel_index = adev->umc.channel_idx_tbl[umc_inst * adev->umc.channel_inst_num + ch_inst]; in umc_v6_1_query_error_address()
325 /* clear umc status */ in umc_v6_1_query_error_address()
340 /* translate umc channel address to soc pa, 3 parts are included */ in umc_v6_1_query_error_address()
349 /* clear umc status */ in umc_v6_1_query_error_address()
H A Damdgpu_ras.c58 "umc",
423 * The block is one of: umc, sdma, gfx, etc.
438 * echo inject umc ue 0x0 0x0 0x0 > /sys/kernel/debug/dri/0/ras/ras_ctrl
439 * echo inject umc ce 0 0 0 3 > /sys/kernel/debug/dri/0/ras/ras_ctrl
440 * echo disable umc > /sys/kernel/debug/dri/0/ras/ras_ctrl
448 * /sys/class/drm/card[0/1/2...]/device/ras/[gfx|sdma|umc|...]_err_count
503 /* umc ce/ue error injection for a bad page is not allowed */ in amdgpu_ras_debugfs_ctrl_write()
996 if (adev->umc.ras && adev->umc.ras->ras_block.hw_ops && in amdgpu_ras_get_ecc_info()
997 adev->umc in amdgpu_ras_get_ecc_info()
[all...]
H A Damdgpu_ras_eeprom.c426 if (adev->umc.ras && in amdgpu_ras_eeprom_reset_table()
427 adev->umc.ras->set_eeprom_table_version) in amdgpu_ras_eeprom_reset_table()
428 adev->umc.ras->set_eeprom_table_version(hdr); in amdgpu_ras_eeprom_reset_table()
/kernel/linux/linux-6.6/drivers/edac/
H A Damd64_edac.c1116 static int umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr) in umc_normaddr_to_sysaddr() argument
1136 ctx.inst_id = umc; in umc_normaddr_to_sysaddr()
1139 if (df_indirect_read_instance(nid, 0, 0x1B4, umc, &ctx.tmp)) in umc_normaddr_to_sysaddr()
1153 if (df_indirect_read_instance(nid, 0, 0x110 + (8 * base), umc, &ctx.tmp)) in umc_normaddr_to_sysaddr()
1176 if (df_indirect_read_instance(nid, 0, 0x114 + (8 * base), umc, &ctx.tmp)) in umc_normaddr_to_sysaddr()
1229 * umc/channel# as instance id of the coherent slave in umc_normaddr_to_sysaddr()
1232 if (df_indirect_read_instance(nid, 0, 0x50, umc, &ctx.tmp)) in umc_normaddr_to_sysaddr()
1348 if (!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT)) in umc_determine_edac_cap()
1354 if (pvt->umc[i].umc_cfg & BIT(12)) in umc_determine_edac_cap()
1533 static int umc_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc, in umc_addr_mask_to_cs_size() argument
1607 struct amd64_umc *umc; umc_dump_misc_regs() local
1704 int umc; umc_prep_chip_selects() local
1720 int cs, umc; umc_read_base_mask() local
1811 struct amd64_umc *umc; umc_determine_memory_type() local
3170 struct amd64_umc *umc; umc_read_mc_regs() local
3330 u8 umc, cs; umc_init_csrows() local
3623 struct amd64_umc *umc; umc_ecc_enabled() local
3783 gpu_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc, unsigned int cs_mode, int csrow_nr) gpu_addr_mask_to_cs_size() argument
3807 struct amd64_umc *umc; gpu_dump_misc_regs() local
3840 u8 umc, cs; gpu_init_csrows() local
3883 gpu_get_umc_base(u8 umc, u8 channel) gpu_get_umc_base() argument
3908 struct amd64_umc *umc; gpu_read_mc_regs() local
3926 int umc, cs; gpu_read_base_mask() local
3951 int umc; gpu_prep_chip_selects() local
[all...]
/kernel/linux/linux-5.10/drivers/edac/
H A Damd64_edac.c217 if (pvt->umc) { in __set_scrub_rate()
259 if (pvt->umc) { in get_scrub_rate()
723 if (pvt->umc) { in determine_edac_cap()
727 if (!(pvt->umc[i].sdp_ctrl & UMC_SDP_INIT)) in determine_edac_cap()
733 if (pvt->umc[i].umc_cfg & BIT(12)) in determine_edac_cap()
849 struct amd64_umc *umc; in __dump_misc_regs_df() local
854 umc = &pvt->umc[i]; in __dump_misc_regs_df()
856 edac_dbg(1, "UMC%d DIMM cfg: 0x%x\n", i, umc->dimm_cfg); in __dump_misc_regs_df()
857 edac_dbg(1, "UMC%d UMC cfg: 0x%x\n", i, umc in __dump_misc_regs_df()
950 int umc; prep_chip_selects() local
971 int cs, umc; read_umc_base_mask() local
1593 f17_addr_mask_to_cs_size(struct amd64_pvt *pvt, u8 umc, unsigned int cs_mode, int csrow_nr) f17_addr_mask_to_cs_size() argument
2796 struct amd64_umc *umc; __read_mc_regs_df() local
2958 u8 umc, cs; init_csrows_df() local
3241 struct amd64_umc *umc; ecc_enabled() local
[all...]
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/mce/
H A Damd.c92 [SMCA_UMC] = { "umc", "Unified Memory Controller" },
678 int umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr) in umc_normaddr_to_sysaddr() argument
695 if (amd_df_indirect_read(nid, 0, 0x1B4, umc, &tmp)) in umc_normaddr_to_sysaddr()
709 if (amd_df_indirect_read(nid, 0, 0x110 + (8 * base), umc, &tmp)) in umc_normaddr_to_sysaddr()
732 if (amd_df_indirect_read(nid, 0, 0x114 + (8 * base), umc, &tmp)) in umc_normaddr_to_sysaddr()
785 * umc/channel# as instance id of the coherent slave in umc_normaddr_to_sysaddr()
788 if (amd_df_indirect_read(nid, 0, 0x50, umc, &tmp)) in umc_normaddr_to_sysaddr()
805 if (amd_df_indirect_read(nid, 1, 0x208, umc, &tmp)) in umc_normaddr_to_sysaddr()
844 if (amd_df_indirect_read(nid, 0, 0x104, umc, &tmp)) in umc_normaddr_to_sysaddr()
/kernel/linux/linux-5.10/arch/x86/include/asm/
H A Dmce.h371 int umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr);
380 umc_normaddr_to_sysaddr(u64 norm_addr, u16 nid, u8 umc, u64 *sys_addr) { return -EINVAL; }; in umc_normaddr_to_sysaddr() argument
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/
H A DMakefile44 obj-$(CONFIG_CPU_SUP_UMC_32) += umc.o
/kernel/linux/linux-6.6/arch/x86/kernel/cpu/
H A DMakefile42 obj-$(CONFIG_CPU_SUP_UMC_32) += umc.o

Completed in 29 milliseconds

12