1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5 
6 #include "i915_drv.h"
7 #include "intel_dram.h"
8 #include "intel_sideband.h"
9 
10 struct dram_dimm_info {
11 	u8 size, width, ranks;
12 };
13 
14 struct dram_channel_info {
15 	struct dram_dimm_info dimm_l, dimm_s;
16 	u8 ranks;
17 	bool is_16gb_dimm;
18 };
19 
20 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
21 
intel_dram_type_str(enum intel_dram_type type)22 static const char *intel_dram_type_str(enum intel_dram_type type)
23 {
24 	static const char * const str[] = {
25 		DRAM_TYPE_STR(UNKNOWN),
26 		DRAM_TYPE_STR(DDR3),
27 		DRAM_TYPE_STR(DDR4),
28 		DRAM_TYPE_STR(LPDDR3),
29 		DRAM_TYPE_STR(LPDDR4),
30 	};
31 
32 	if (type >= ARRAY_SIZE(str))
33 		type = INTEL_DRAM_UNKNOWN;
34 
35 	return str[type];
36 }
37 
38 #undef DRAM_TYPE_STR
39 
intel_dimm_num_devices(const struct dram_dimm_info *dimm)40 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
41 {
42 	return dimm->ranks * 64 / (dimm->width ?: 1);
43 }
44 
45 /* Returns total GB for the whole DIMM */
skl_get_dimm_size(u16 val)46 static int skl_get_dimm_size(u16 val)
47 {
48 	return val & SKL_DRAM_SIZE_MASK;
49 }
50 
skl_get_dimm_width(u16 val)51 static int skl_get_dimm_width(u16 val)
52 {
53 	if (skl_get_dimm_size(val) == 0)
54 		return 0;
55 
56 	switch (val & SKL_DRAM_WIDTH_MASK) {
57 	case SKL_DRAM_WIDTH_X8:
58 	case SKL_DRAM_WIDTH_X16:
59 	case SKL_DRAM_WIDTH_X32:
60 		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
61 		return 8 << val;
62 	default:
63 		MISSING_CASE(val);
64 		return 0;
65 	}
66 }
67 
skl_get_dimm_ranks(u16 val)68 static int skl_get_dimm_ranks(u16 val)
69 {
70 	if (skl_get_dimm_size(val) == 0)
71 		return 0;
72 
73 	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
74 
75 	return val + 1;
76 }
77 
78 /* Returns total GB for the whole DIMM */
cnl_get_dimm_size(u16 val)79 static int cnl_get_dimm_size(u16 val)
80 {
81 	return (val & CNL_DRAM_SIZE_MASK) / 2;
82 }
83 
cnl_get_dimm_width(u16 val)84 static int cnl_get_dimm_width(u16 val)
85 {
86 	if (cnl_get_dimm_size(val) == 0)
87 		return 0;
88 
89 	switch (val & CNL_DRAM_WIDTH_MASK) {
90 	case CNL_DRAM_WIDTH_X8:
91 	case CNL_DRAM_WIDTH_X16:
92 	case CNL_DRAM_WIDTH_X32:
93 		val = (val & CNL_DRAM_WIDTH_MASK) >> CNL_DRAM_WIDTH_SHIFT;
94 		return 8 << val;
95 	default:
96 		MISSING_CASE(val);
97 		return 0;
98 	}
99 }
100 
cnl_get_dimm_ranks(u16 val)101 static int cnl_get_dimm_ranks(u16 val)
102 {
103 	if (cnl_get_dimm_size(val) == 0)
104 		return 0;
105 
106 	val = (val & CNL_DRAM_RANK_MASK) >> CNL_DRAM_RANK_SHIFT;
107 
108 	return val + 1;
109 }
110 
111 static bool
skl_is_16gb_dimm(const struct dram_dimm_info *dimm)112 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
113 {
114 	/* Convert total GB to Gb per DRAM device */
115 	return 8 * dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
116 }
117 
118 static void
skl_dram_get_dimm_info(struct drm_i915_private *i915, struct dram_dimm_info *dimm, int channel, char dimm_name, u16 val)119 skl_dram_get_dimm_info(struct drm_i915_private *i915,
120 		       struct dram_dimm_info *dimm,
121 		       int channel, char dimm_name, u16 val)
122 {
123 	if (INTEL_GEN(i915) >= 10) {
124 		dimm->size = cnl_get_dimm_size(val);
125 		dimm->width = cnl_get_dimm_width(val);
126 		dimm->ranks = cnl_get_dimm_ranks(val);
127 	} else {
128 		dimm->size = skl_get_dimm_size(val);
129 		dimm->width = skl_get_dimm_width(val);
130 		dimm->ranks = skl_get_dimm_ranks(val);
131 	}
132 
133 	drm_dbg_kms(&i915->drm,
134 		    "CH%u DIMM %c size: %u GB, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
135 		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
136 		    yesno(skl_is_16gb_dimm(dimm)));
137 }
138 
139 static int
skl_dram_get_channel_info(struct drm_i915_private *i915, struct dram_channel_info *ch, int channel, u32 val)140 skl_dram_get_channel_info(struct drm_i915_private *i915,
141 			  struct dram_channel_info *ch,
142 			  int channel, u32 val)
143 {
144 	skl_dram_get_dimm_info(i915, &ch->dimm_l,
145 			       channel, 'L', val & 0xffff);
146 	skl_dram_get_dimm_info(i915, &ch->dimm_s,
147 			       channel, 'S', val >> 16);
148 
149 	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
150 		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
151 		return -EINVAL;
152 	}
153 
154 	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
155 		ch->ranks = 2;
156 	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
157 		ch->ranks = 2;
158 	else
159 		ch->ranks = 1;
160 
161 	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
162 		skl_is_16gb_dimm(&ch->dimm_s);
163 
164 	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
165 		    channel, ch->ranks, yesno(ch->is_16gb_dimm));
166 
167 	return 0;
168 }
169 
170 static bool
intel_is_dram_symmetric(const struct dram_channel_info *ch0, const struct dram_channel_info *ch1)171 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
172 			const struct dram_channel_info *ch1)
173 {
174 	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
175 		(ch0->dimm_s.size == 0 ||
176 		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
177 }
178 
179 static int
skl_dram_get_channels_info(struct drm_i915_private *i915)180 skl_dram_get_channels_info(struct drm_i915_private *i915)
181 {
182 	struct dram_info *dram_info = &i915->dram_info;
183 	struct dram_channel_info ch0 = {}, ch1 = {};
184 	u32 val;
185 	int ret;
186 
187 	val = intel_uncore_read(&i915->uncore,
188 				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
189 	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
190 	if (ret == 0)
191 		dram_info->num_channels++;
192 
193 	val = intel_uncore_read(&i915->uncore,
194 				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
195 	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
196 	if (ret == 0)
197 		dram_info->num_channels++;
198 
199 	if (dram_info->num_channels == 0) {
200 		drm_info(&i915->drm, "Number of memory channels is zero\n");
201 		return -EINVAL;
202 	}
203 
204 	/*
205 	 * If any of the channel is single rank channel, worst case output
206 	 * will be same as if single rank memory, so consider single rank
207 	 * memory.
208 	 */
209 	if (ch0.ranks == 1 || ch1.ranks == 1)
210 		dram_info->ranks = 1;
211 	else
212 		dram_info->ranks = max(ch0.ranks, ch1.ranks);
213 
214 	if (dram_info->ranks == 0) {
215 		drm_info(&i915->drm, "couldn't get memory rank information\n");
216 		return -EINVAL;
217 	}
218 
219 	dram_info->is_16gb_dimm = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
220 
221 	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
222 
223 	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
224 		    yesno(dram_info->symmetric_memory));
225 
226 	return 0;
227 }
228 
229 static enum intel_dram_type
skl_get_dram_type(struct drm_i915_private *i915)230 skl_get_dram_type(struct drm_i915_private *i915)
231 {
232 	u32 val;
233 
234 	val = intel_uncore_read(&i915->uncore,
235 				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
236 
237 	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
238 	case SKL_DRAM_DDR_TYPE_DDR3:
239 		return INTEL_DRAM_DDR3;
240 	case SKL_DRAM_DDR_TYPE_DDR4:
241 		return INTEL_DRAM_DDR4;
242 	case SKL_DRAM_DDR_TYPE_LPDDR3:
243 		return INTEL_DRAM_LPDDR3;
244 	case SKL_DRAM_DDR_TYPE_LPDDR4:
245 		return INTEL_DRAM_LPDDR4;
246 	default:
247 		MISSING_CASE(val);
248 		return INTEL_DRAM_UNKNOWN;
249 	}
250 }
251 
252 static int
skl_get_dram_info(struct drm_i915_private *i915)253 skl_get_dram_info(struct drm_i915_private *i915)
254 {
255 	struct dram_info *dram_info = &i915->dram_info;
256 	u32 mem_freq_khz, val;
257 	int ret;
258 
259 	dram_info->type = skl_get_dram_type(i915);
260 	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
261 		    intel_dram_type_str(dram_info->type));
262 
263 	ret = skl_dram_get_channels_info(i915);
264 	if (ret)
265 		return ret;
266 
267 	val = intel_uncore_read(&i915->uncore,
268 				SKL_MC_BIOS_DATA_0_0_0_MCHBAR_PCU);
269 	mem_freq_khz = DIV_ROUND_UP((val & SKL_REQ_DATA_MASK) *
270 				    SKL_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
271 
272 	dram_info->bandwidth_kbps = dram_info->num_channels *
273 		mem_freq_khz * 8;
274 
275 	if (dram_info->bandwidth_kbps == 0) {
276 		drm_info(&i915->drm,
277 			 "Couldn't get system memory bandwidth\n");
278 		return -EINVAL;
279 	}
280 
281 	dram_info->valid = true;
282 	return 0;
283 }
284 
285 /* Returns Gb per DRAM device */
bxt_get_dimm_size(u32 val)286 static int bxt_get_dimm_size(u32 val)
287 {
288 	switch (val & BXT_DRAM_SIZE_MASK) {
289 	case BXT_DRAM_SIZE_4GBIT:
290 		return 4;
291 	case BXT_DRAM_SIZE_6GBIT:
292 		return 6;
293 	case BXT_DRAM_SIZE_8GBIT:
294 		return 8;
295 	case BXT_DRAM_SIZE_12GBIT:
296 		return 12;
297 	case BXT_DRAM_SIZE_16GBIT:
298 		return 16;
299 	default:
300 		MISSING_CASE(val);
301 		return 0;
302 	}
303 }
304 
bxt_get_dimm_width(u32 val)305 static int bxt_get_dimm_width(u32 val)
306 {
307 	if (!bxt_get_dimm_size(val))
308 		return 0;
309 
310 	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
311 
312 	return 8 << val;
313 }
314 
bxt_get_dimm_ranks(u32 val)315 static int bxt_get_dimm_ranks(u32 val)
316 {
317 	if (!bxt_get_dimm_size(val))
318 		return 0;
319 
320 	switch (val & BXT_DRAM_RANK_MASK) {
321 	case BXT_DRAM_RANK_SINGLE:
322 		return 1;
323 	case BXT_DRAM_RANK_DUAL:
324 		return 2;
325 	default:
326 		MISSING_CASE(val);
327 		return 0;
328 	}
329 }
330 
bxt_get_dimm_type(u32 val)331 static enum intel_dram_type bxt_get_dimm_type(u32 val)
332 {
333 	if (!bxt_get_dimm_size(val))
334 		return INTEL_DRAM_UNKNOWN;
335 
336 	switch (val & BXT_DRAM_TYPE_MASK) {
337 	case BXT_DRAM_TYPE_DDR3:
338 		return INTEL_DRAM_DDR3;
339 	case BXT_DRAM_TYPE_LPDDR3:
340 		return INTEL_DRAM_LPDDR3;
341 	case BXT_DRAM_TYPE_DDR4:
342 		return INTEL_DRAM_DDR4;
343 	case BXT_DRAM_TYPE_LPDDR4:
344 		return INTEL_DRAM_LPDDR4;
345 	default:
346 		MISSING_CASE(val);
347 		return INTEL_DRAM_UNKNOWN;
348 	}
349 }
350 
bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)351 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
352 {
353 	dimm->width = bxt_get_dimm_width(val);
354 	dimm->ranks = bxt_get_dimm_ranks(val);
355 
356 	/*
357 	 * Size in register is Gb per DRAM device. Convert to total
358 	 * GB to match the way we report this for non-LP platforms.
359 	 */
360 	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm) / 8;
361 }
362 
bxt_get_dram_info(struct drm_i915_private *i915)363 static int bxt_get_dram_info(struct drm_i915_private *i915)
364 {
365 	struct dram_info *dram_info = &i915->dram_info;
366 	u32 dram_channels;
367 	u32 mem_freq_khz, val;
368 	u8 num_active_channels;
369 	int i;
370 
371 	val = intel_uncore_read(&i915->uncore, BXT_P_CR_MC_BIOS_REQ_0_0_0);
372 	mem_freq_khz = DIV_ROUND_UP((val & BXT_REQ_DATA_MASK) *
373 				    BXT_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
374 
375 	dram_channels = val & BXT_DRAM_CHANNEL_ACTIVE_MASK;
376 	num_active_channels = hweight32(dram_channels);
377 
378 	/* Each active bit represents 4-byte channel */
379 	dram_info->bandwidth_kbps = (mem_freq_khz * num_active_channels * 4);
380 
381 	if (dram_info->bandwidth_kbps == 0) {
382 		drm_info(&i915->drm,
383 			 "Couldn't get system memory bandwidth\n");
384 		return -EINVAL;
385 	}
386 
387 	/*
388 	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
389 	 */
390 	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
391 		struct dram_dimm_info dimm;
392 		enum intel_dram_type type;
393 
394 		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
395 		if (val == 0xFFFFFFFF)
396 			continue;
397 
398 		dram_info->num_channels++;
399 
400 		bxt_get_dimm_info(&dimm, val);
401 		type = bxt_get_dimm_type(val);
402 
403 		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
404 			    dram_info->type != INTEL_DRAM_UNKNOWN &&
405 			    dram_info->type != type);
406 
407 		drm_dbg_kms(&i915->drm,
408 			    "CH%u DIMM size: %u GB, width: X%u, ranks: %u, type: %s\n",
409 			    i - BXT_D_CR_DRP0_DUNIT_START,
410 			    dimm.size, dimm.width, dimm.ranks,
411 			    intel_dram_type_str(type));
412 
413 		/*
414 		 * If any of the channel is single rank channel,
415 		 * worst case output will be same as if single rank
416 		 * memory, so consider single rank memory.
417 		 */
418 		if (dram_info->ranks == 0)
419 			dram_info->ranks = dimm.ranks;
420 		else if (dimm.ranks == 1)
421 			dram_info->ranks = 1;
422 
423 		if (type != INTEL_DRAM_UNKNOWN)
424 			dram_info->type = type;
425 	}
426 
427 	if (dram_info->type == INTEL_DRAM_UNKNOWN || dram_info->ranks == 0) {
428 		drm_info(&i915->drm, "couldn't get memory information\n");
429 		return -EINVAL;
430 	}
431 
432 	dram_info->valid = true;
433 
434 	return 0;
435 }
436 
icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)437 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
438 {
439 	struct dram_info *dram_info = &dev_priv->dram_info;
440 	u32 val = 0;
441 	int ret;
442 
443 	ret = sandybridge_pcode_read(dev_priv,
444 				     ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
445 				     ICL_PCODE_MEM_SS_READ_GLOBAL_INFO,
446 				     &val, NULL);
447 	if (ret)
448 		return ret;
449 
450 	if (IS_GEN(dev_priv, 12)) {
451 		switch (val & 0xf) {
452 		case 0:
453 			dram_info->type = INTEL_DRAM_DDR4;
454 			break;
455 		case 3:
456 			dram_info->type = INTEL_DRAM_LPDDR4;
457 			break;
458 		case 4:
459 			dram_info->type = INTEL_DRAM_DDR3;
460 			break;
461 		case 5:
462 			dram_info->type = INTEL_DRAM_LPDDR3;
463 			break;
464 		default:
465 			MISSING_CASE(val & 0xf);
466 			return -1;
467 		}
468 	} else {
469 		switch (val & 0xf) {
470 		case 0:
471 			dram_info->type = INTEL_DRAM_DDR4;
472 			break;
473 		case 1:
474 			dram_info->type = INTEL_DRAM_DDR3;
475 			break;
476 		case 2:
477 			dram_info->type = INTEL_DRAM_LPDDR3;
478 			break;
479 		case 3:
480 			dram_info->type = INTEL_DRAM_LPDDR4;
481 			break;
482 		default:
483 			MISSING_CASE(val & 0xf);
484 			return -1;
485 		}
486 	}
487 
488 	dram_info->num_channels = (val & 0xf0) >> 4;
489 	dram_info->num_qgv_points = (val & 0xf00) >> 8;
490 
491 	return 0;
492 }
493 
gen11_get_dram_info(struct drm_i915_private *i915)494 static int gen11_get_dram_info(struct drm_i915_private *i915)
495 {
496 	int ret = skl_get_dram_info(i915);
497 
498 	if (ret)
499 		return ret;
500 
501 	return icl_pcode_read_mem_global_info(i915);
502 }
503 
gen12_get_dram_info(struct drm_i915_private *i915)504 static int gen12_get_dram_info(struct drm_i915_private *i915)
505 {
506 	/* Always needed for GEN12+ */
507 	i915->dram_info.is_16gb_dimm = true;
508 
509 	return icl_pcode_read_mem_global_info(i915);
510 }
511 
intel_dram_detect(struct drm_i915_private *i915)512 void intel_dram_detect(struct drm_i915_private *i915)
513 {
514 	struct dram_info *dram_info = &i915->dram_info;
515 	int ret;
516 
517 	/*
518 	 * Assume 16Gb DIMMs are present until proven otherwise.
519 	 * This is only used for the level 0 watermark latency
520 	 * w/a which does not apply to bxt/glk.
521 	 */
522 	dram_info->is_16gb_dimm = !IS_GEN9_LP(i915);
523 
524 	if (INTEL_GEN(i915) < 9 || !HAS_DISPLAY(i915))
525 		return;
526 
527 	if (INTEL_GEN(i915) >= 12)
528 		ret = gen12_get_dram_info(i915);
529 	else if (INTEL_GEN(i915) >= 11)
530 		ret = gen11_get_dram_info(i915);
531 	else if (IS_GEN9_LP(i915))
532 		ret = bxt_get_dram_info(i915);
533 	else
534 		ret = skl_get_dram_info(i915);
535 	if (ret)
536 		return;
537 
538 	drm_dbg_kms(&i915->drm, "DRAM bandwidth: %u kBps, channels: %u\n",
539 		    dram_info->bandwidth_kbps, dram_info->num_channels);
540 
541 	drm_dbg_kms(&i915->drm, "DRAM ranks: %u, 16Gb DIMMs: %s\n",
542 		    dram_info->ranks, yesno(dram_info->is_16gb_dimm));
543 }
544 
gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)545 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
546 {
547 	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
548 	static const u8 sets[4] = { 1, 1, 2, 2 };
549 
550 	return EDRAM_NUM_BANKS(cap) *
551 		ways[EDRAM_WAYS_IDX(cap)] *
552 		sets[EDRAM_SETS_IDX(cap)];
553 }
554 
intel_dram_edram_detect(struct drm_i915_private *i915)555 void intel_dram_edram_detect(struct drm_i915_private *i915)
556 {
557 	u32 edram_cap = 0;
558 
559 	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || INTEL_GEN(i915) >= 9))
560 		return;
561 
562 	edram_cap = __raw_uncore_read32(&i915->uncore, HSW_EDRAM_CAP);
563 
564 	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
565 
566 	if (!(edram_cap & EDRAM_ENABLED))
567 		return;
568 
569 	/*
570 	 * The needed capability bits for size calculation are not there with
571 	 * pre gen9 so return 128MB always.
572 	 */
573 	if (INTEL_GEN(i915) < 9)
574 		i915->edram_size_mb = 128;
575 	else
576 		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
577 
578 	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
579 }
580