Lines Matching defs:dct

102 static void f15h_select_dct(struct amd64_pvt *pvt, u8 dct)
108 reg |= dct;
126 static inline int amd64_read_dct_pci_cfg(struct amd64_pvt *pvt, u8 dct,
131 if (dct || offset >= 0x100)
136 if (dct) {
154 dct = (dct && pvt->model == 0x30) ? 3 : dct;
155 f15h_select_dct(pvt, dct);
159 if (dct)
369 * compute the CS base address of the @csrow on the DRAM controller @dct.
372 static void get_cs_base_and_mask(struct amd64_pvt *pvt, int csrow, u8 dct,
379 csbase = pvt->csels[dct].csbases[csrow];
380 csmask = pvt->csels[dct].csmasks[csrow];
391 csbase = pvt->csels[dct].csbases[csrow];
392 csmask = pvt->csels[dct].csmasks[csrow >> 1];
407 csbase = pvt->csels[dct].csbases[csrow];
408 csmask = pvt->csels[dct].csmasks[csrow >> 1];
428 #define for_each_chip_select(i, dct, pvt) \
429 for (i = 0; i < pvt->csels[dct].b_cnt; i++)
431 #define chip_select_base(i, dct, pvt) \
432 pvt->csels[dct].csbases[i]
434 #define for_each_chip_select_mask(i, dct, pvt) \
435 for (i = 0; i < pvt->csels[dct].m_cnt; i++)
2111 static int k8_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
2114 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0;
2215 static int f10_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
2218 u32 dclr = dct ? pvt->dclr1 : pvt->dclr0;
2231 static int f15_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
2240 static int f15_m60h_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
2244 u32 dcsm = pvt->csels[dct].csmasks[cs_mask_nr];
2273 static int f16_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
2445 static int f10_process_possible_spare(struct amd64_pvt *pvt, u8 dct, int csrow)
2449 if (online_spare_swap_done(pvt, dct) &&
2450 csrow == online_spare_bad_dramcs(pvt, dct)) {
2452 for_each_chip_select(tmp_cs, dct, pvt) {
2453 if (chip_select_base(tmp_cs, dct, pvt) & 0x2) {
2470 static int f1x_lookup_addr_in_dct(u64 in_addr, u8 nid, u8 dct)
2484 edac_dbg(1, "input addr: 0x%llx, DCT: %d\n", in_addr, dct);
2486 for_each_chip_select(csrow, dct, pvt) {
2487 if (!csrow_enabled(csrow, dct, pvt))
2490 get_cs_base_and_mask(pvt, csrow, dct, &cs_base, &cs_mask);
2505 cs_found = f10_process_possible_spare(pvt, dct, csrow);
2674 /* Verify number of dct's that participate in channel interleaving. */
3289 static u32 dct_get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr)
3291 u32 dbam = dct ? pvt->dbam1 : pvt->dbam0;
3297 nr_pages = pvt->ops->dbam_to_cs(pvt, dct, cs_mode, csrow_nr);
3301 csrow_nr, dct, cs_mode);
3307 static u32 umc_get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr_orig)
3312 cs_mode = umc_get_cs_mode(csrow_nr >> 1, dct, pvt);
3314 nr_pages = umc_addr_mask_to_cs_size(pvt, dct, cs_mode, csrow_nr);
3318 csrow_nr_orig, dct, cs_mode);
3822 static u32 gpu_get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr)
3827 nr_pages = gpu_addr_mask_to_cs_size(pvt, dct, cs_mode, csrow_nr);
3830 edac_dbg(0, "csrow: %d, channel: %d\n", csrow_nr, dct);
4224 int cs = 0, dct = 0;
4226 for (dct = 0; dct < pvt->max_mcs; dct++) {
4227 for_each_chip_select(cs, dct, pvt)
4228 cs_enabled |= csrow_enabled(cs, dct, pvt);