1/*
2 * Copyright © 2008 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 *    Keith Packard <keithp@keithp.com>
25 *
26 */
27
28#include <linux/export.h>
29#include <linux/i2c.h>
30#include <linux/notifier.h>
31#include <linux/slab.h>
32#include <linux/string_helpers.h>
33#include <linux/timekeeping.h>
34#include <linux/types.h>
35
36#include <asm/byteorder.h>
37
38#include <drm/display/drm_dp_helper.h>
39#include <drm/display/drm_dsc_helper.h>
40#include <drm/display/drm_hdmi_helper.h>
41#include <drm/drm_atomic_helper.h>
42#include <drm/drm_crtc.h>
43#include <drm/drm_edid.h>
44#include <drm/drm_probe_helper.h>
45
46#include "g4x_dp.h"
47#include "i915_drv.h"
48#include "i915_irq.h"
49#include "i915_reg.h"
50#include "intel_atomic.h"
51#include "intel_audio.h"
52#include "intel_backlight.h"
53#include "intel_combo_phy_regs.h"
54#include "intel_connector.h"
55#include "intel_crtc.h"
56#include "intel_cx0_phy.h"
57#include "intel_ddi.h"
58#include "intel_de.h"
59#include "intel_display_types.h"
60#include "intel_dp.h"
61#include "intel_dp_aux.h"
62#include "intel_dp_hdcp.h"
63#include "intel_dp_link_training.h"
64#include "intel_dp_mst.h"
65#include "intel_dpio_phy.h"
66#include "intel_dpll.h"
67#include "intel_fifo_underrun.h"
68#include "intel_hdcp.h"
69#include "intel_hdmi.h"
70#include "intel_hotplug.h"
71#include "intel_hotplug_irq.h"
72#include "intel_lspcon.h"
73#include "intel_lvds.h"
74#include "intel_panel.h"
75#include "intel_pch_display.h"
76#include "intel_pps.h"
77#include "intel_psr.h"
78#include "intel_tc.h"
79#include "intel_vdsc.h"
80#include "intel_vrr.h"
81#include "intel_crtc_state_dump.h"
82
83/* DP DSC throughput values used for slice count calculations KPixels/s */
84#define DP_DSC_PEAK_PIXEL_RATE			2720000
85#define DP_DSC_MAX_ENC_THROUGHPUT_0		340000
86#define DP_DSC_MAX_ENC_THROUGHPUT_1		400000
87
88/* DP DSC FEC Overhead factor = 1/(0.972261) */
89#define DP_DSC_FEC_OVERHEAD_FACTOR		972261
90
91/* Compliance test status bits  */
92#define INTEL_DP_RESOLUTION_SHIFT_MASK	0
93#define INTEL_DP_RESOLUTION_PREFERRED	(1 << INTEL_DP_RESOLUTION_SHIFT_MASK)
94#define INTEL_DP_RESOLUTION_STANDARD	(2 << INTEL_DP_RESOLUTION_SHIFT_MASK)
95#define INTEL_DP_RESOLUTION_FAILSAFE	(3 << INTEL_DP_RESOLUTION_SHIFT_MASK)
96
97
98/* Constants for DP DSC configurations */
99static const u8 valid_dsc_bpp[] = {6, 8, 10, 12, 15};
100
101/* With Single pipe configuration, HW is capable of supporting maximum
102 * of 4 slices per line.
103 */
104static const u8 valid_dsc_slicecount[] = {1, 2, 4};
105
106/**
107 * intel_dp_is_edp - is the given port attached to an eDP panel (either CPU or PCH)
108 * @intel_dp: DP struct
109 *
110 * If a CPU or PCH DP output is attached to an eDP panel, this function
111 * will return true, and false otherwise.
112 *
113 * This function is not safe to use prior to encoder type being set.
114 */
115bool intel_dp_is_edp(struct intel_dp *intel_dp)
116{
117	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
118
119	return dig_port->base.type == INTEL_OUTPUT_EDP;
120}
121
122static void intel_dp_unset_edid(struct intel_dp *intel_dp);
123
124/* Is link rate UHBR and thus 128b/132b? */
125bool intel_dp_is_uhbr(const struct intel_crtc_state *crtc_state)
126{
127	return crtc_state->port_clock >= 1000000;
128}
129
130static void intel_dp_set_default_sink_rates(struct intel_dp *intel_dp)
131{
132	intel_dp->sink_rates[0] = 162000;
133	intel_dp->num_sink_rates = 1;
134}
135
136/* update sink rates from dpcd */
137static void intel_dp_set_dpcd_sink_rates(struct intel_dp *intel_dp)
138{
139	static const int dp_rates[] = {
140		162000, 270000, 540000, 810000
141	};
142	int i, max_rate;
143	int max_lttpr_rate;
144
145	if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) {
146		/* Needed, e.g., for Apple MBP 2017, 15 inch eDP Retina panel */
147		static const int quirk_rates[] = { 162000, 270000, 324000 };
148
149		memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates));
150		intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates);
151
152		return;
153	}
154
155	/*
156	 * Sink rates for 8b/10b.
157	 */
158	max_rate = drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]);
159	max_lttpr_rate = drm_dp_lttpr_max_link_rate(intel_dp->lttpr_common_caps);
160	if (max_lttpr_rate)
161		max_rate = min(max_rate, max_lttpr_rate);
162
163	for (i = 0; i < ARRAY_SIZE(dp_rates); i++) {
164		if (dp_rates[i] > max_rate)
165			break;
166		intel_dp->sink_rates[i] = dp_rates[i];
167	}
168
169	/*
170	 * Sink rates for 128b/132b. If set, sink should support all 8b/10b
171	 * rates and 10 Gbps.
172	 */
173	if (intel_dp->dpcd[DP_MAIN_LINK_CHANNEL_CODING] & DP_CAP_ANSI_128B132B) {
174		u8 uhbr_rates = 0;
175
176		BUILD_BUG_ON(ARRAY_SIZE(intel_dp->sink_rates) < ARRAY_SIZE(dp_rates) + 3);
177
178		drm_dp_dpcd_readb(&intel_dp->aux,
179				  DP_128B132B_SUPPORTED_LINK_RATES, &uhbr_rates);
180
181		if (drm_dp_lttpr_count(intel_dp->lttpr_common_caps)) {
182			/* We have a repeater */
183			if (intel_dp->lttpr_common_caps[0] >= 0x20 &&
184			    intel_dp->lttpr_common_caps[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER -
185							DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] &
186			    DP_PHY_REPEATER_128B132B_SUPPORTED) {
187				/* Repeater supports 128b/132b, valid UHBR rates */
188				uhbr_rates &= intel_dp->lttpr_common_caps[DP_PHY_REPEATER_128B132B_RATES -
189									  DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
190			} else {
191				/* Does not support 128b/132b */
192				uhbr_rates = 0;
193			}
194		}
195
196		if (uhbr_rates & DP_UHBR10)
197			intel_dp->sink_rates[i++] = 1000000;
198		if (uhbr_rates & DP_UHBR13_5)
199			intel_dp->sink_rates[i++] = 1350000;
200		if (uhbr_rates & DP_UHBR20)
201			intel_dp->sink_rates[i++] = 2000000;
202	}
203
204	intel_dp->num_sink_rates = i;
205}
206
207static void intel_dp_set_sink_rates(struct intel_dp *intel_dp)
208{
209	struct intel_connector *connector = intel_dp->attached_connector;
210	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
211	struct intel_encoder *encoder = &intel_dig_port->base;
212
213	intel_dp_set_dpcd_sink_rates(intel_dp);
214
215	if (intel_dp->num_sink_rates)
216		return;
217
218	drm_err(&dp_to_i915(intel_dp)->drm,
219		"[CONNECTOR:%d:%s][ENCODER:%d:%s] Invalid DPCD with no link rates, using defaults\n",
220		connector->base.base.id, connector->base.name,
221		encoder->base.base.id, encoder->base.name);
222
223	intel_dp_set_default_sink_rates(intel_dp);
224}
225
226static void intel_dp_set_default_max_sink_lane_count(struct intel_dp *intel_dp)
227{
228	intel_dp->max_sink_lane_count = 1;
229}
230
231static void intel_dp_set_max_sink_lane_count(struct intel_dp *intel_dp)
232{
233	struct intel_connector *connector = intel_dp->attached_connector;
234	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
235	struct intel_encoder *encoder = &intel_dig_port->base;
236
237	intel_dp->max_sink_lane_count = drm_dp_max_lane_count(intel_dp->dpcd);
238
239	switch (intel_dp->max_sink_lane_count) {
240	case 1:
241	case 2:
242	case 4:
243		return;
244	}
245
246	drm_err(&dp_to_i915(intel_dp)->drm,
247		"[CONNECTOR:%d:%s][ENCODER:%d:%s] Invalid DPCD max lane count (%d), using default\n",
248		connector->base.base.id, connector->base.name,
249		encoder->base.base.id, encoder->base.name,
250		intel_dp->max_sink_lane_count);
251
252	intel_dp_set_default_max_sink_lane_count(intel_dp);
253}
254
255/* Get length of rates array potentially limited by max_rate. */
256static int intel_dp_rate_limit_len(const int *rates, int len, int max_rate)
257{
258	int i;
259
260	/* Limit results by potentially reduced max rate */
261	for (i = 0; i < len; i++) {
262		if (rates[len - i - 1] <= max_rate)
263			return len - i;
264	}
265
266	return 0;
267}
268
269/* Get length of common rates array potentially limited by max_rate. */
270static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp,
271					  int max_rate)
272{
273	return intel_dp_rate_limit_len(intel_dp->common_rates,
274				       intel_dp->num_common_rates, max_rate);
275}
276
277static int intel_dp_common_rate(struct intel_dp *intel_dp, int index)
278{
279	if (drm_WARN_ON(&dp_to_i915(intel_dp)->drm,
280			index < 0 || index >= intel_dp->num_common_rates))
281		return 162000;
282
283	return intel_dp->common_rates[index];
284}
285
286/* Theoretical max between source and sink */
287static int intel_dp_max_common_rate(struct intel_dp *intel_dp)
288{
289	return intel_dp_common_rate(intel_dp, intel_dp->num_common_rates - 1);
290}
291
292static int intel_dp_max_source_lane_count(struct intel_digital_port *dig_port)
293{
294	int vbt_max_lanes = intel_bios_dp_max_lane_count(dig_port->base.devdata);
295	int max_lanes = dig_port->max_lanes;
296
297	if (vbt_max_lanes)
298		max_lanes = min(max_lanes, vbt_max_lanes);
299
300	return max_lanes;
301}
302
303/* Theoretical max between source and sink */
304static int intel_dp_max_common_lane_count(struct intel_dp *intel_dp)
305{
306	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
307	int source_max = intel_dp_max_source_lane_count(dig_port);
308	int sink_max = intel_dp->max_sink_lane_count;
309	int fia_max = intel_tc_port_fia_max_lane_count(dig_port);
310	int lttpr_max = drm_dp_lttpr_max_lane_count(intel_dp->lttpr_common_caps);
311
312	if (lttpr_max)
313		sink_max = min(sink_max, lttpr_max);
314
315	return min3(source_max, sink_max, fia_max);
316}
317
318int intel_dp_max_lane_count(struct intel_dp *intel_dp)
319{
320	switch (intel_dp->max_link_lane_count) {
321	case 1:
322	case 2:
323	case 4:
324		return intel_dp->max_link_lane_count;
325	default:
326		MISSING_CASE(intel_dp->max_link_lane_count);
327		return 1;
328	}
329}
330
331/*
332 * The required data bandwidth for a mode with given pixel clock and bpp. This
333 * is the required net bandwidth independent of the data bandwidth efficiency.
334 */
335int
336intel_dp_link_required(int pixel_clock, int bpp)
337{
338	/* pixel_clock is in kHz, divide bpp by 8 for bit to Byte conversion */
339	return DIV_ROUND_UP(pixel_clock * bpp, 8);
340}
341
342/*
343 * Given a link rate and lanes, get the data bandwidth.
344 *
345 * Data bandwidth is the actual payload rate, which depends on the data
346 * bandwidth efficiency and the link rate.
347 *
348 * For 8b/10b channel encoding, SST and non-FEC, the data bandwidth efficiency
349 * is 80%. For example, for a 1.62 Gbps link, 1.62*10^9 bps * 0.80 * (1/8) =
350 * 162000 kBps. With 8-bit symbols, we have 162000 kHz symbol clock. Just by
351 * coincidence, the port clock in kHz matches the data bandwidth in kBps, and
352 * they equal the link bit rate in Gbps multiplied by 100000. (Note that this no
353 * longer holds for data bandwidth as soon as FEC or MST is taken into account!)
354 *
355 * For 128b/132b channel encoding, the data bandwidth efficiency is 96.71%. For
356 * example, for a 10 Gbps link, 10*10^9 bps * 0.9671 * (1/8) = 1208875
357 * kBps. With 32-bit symbols, we have 312500 kHz symbol clock. The value 1000000
358 * does not match the symbol clock, the port clock (not even if you think in
359 * terms of a byte clock), nor the data bandwidth. It only matches the link bit
360 * rate in units of 10000 bps.
361 */
362int
363intel_dp_max_data_rate(int max_link_rate, int max_lanes)
364{
365	if (max_link_rate >= 1000000) {
366		/*
367		 * UHBR rates always use 128b/132b channel encoding, and have
368		 * 97.71% data bandwidth efficiency. Consider max_link_rate the
369		 * link bit rate in units of 10000 bps.
370		 */
371		int max_link_rate_kbps = max_link_rate * 10;
372
373		max_link_rate_kbps = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(max_link_rate_kbps, 9671), 10000);
374		max_link_rate = max_link_rate_kbps / 8;
375	}
376
377	/*
378	 * Lower than UHBR rates always use 8b/10b channel encoding, and have
379	 * 80% data bandwidth efficiency for SST non-FEC. However, this turns
380	 * out to be a nop by coincidence, and can be skipped:
381	 *
382	 *	int max_link_rate_kbps = max_link_rate * 10;
383	 *	max_link_rate_kbps = DIV_ROUND_CLOSEST_ULL(max_link_rate_kbps * 8, 10);
384	 *	max_link_rate = max_link_rate_kbps / 8;
385	 */
386
387	return max_link_rate * max_lanes;
388}
389
390bool intel_dp_can_bigjoiner(struct intel_dp *intel_dp)
391{
392	struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp);
393	struct intel_encoder *encoder = &intel_dig_port->base;
394	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
395
396	return DISPLAY_VER(dev_priv) >= 12 ||
397		(DISPLAY_VER(dev_priv) == 11 &&
398		 encoder->port != PORT_A);
399}
400
401static int dg2_max_source_rate(struct intel_dp *intel_dp)
402{
403	return intel_dp_is_edp(intel_dp) ? 810000 : 1350000;
404}
405
406static int icl_max_source_rate(struct intel_dp *intel_dp)
407{
408	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
409	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
410	enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
411
412	if (intel_phy_is_combo(dev_priv, phy) && !intel_dp_is_edp(intel_dp))
413		return 540000;
414
415	return 810000;
416}
417
418static int ehl_max_source_rate(struct intel_dp *intel_dp)
419{
420	if (intel_dp_is_edp(intel_dp))
421		return 540000;
422
423	return 810000;
424}
425
426static int mtl_max_source_rate(struct intel_dp *intel_dp)
427{
428	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
429	struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
430	enum phy phy = intel_port_to_phy(i915, dig_port->base.port);
431
432	if (intel_is_c10phy(i915, phy))
433		return 810000;
434
435	return 2000000;
436}
437
438static int vbt_max_link_rate(struct intel_dp *intel_dp)
439{
440	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
441	int max_rate;
442
443	max_rate = intel_bios_dp_max_link_rate(encoder->devdata);
444
445	if (intel_dp_is_edp(intel_dp)) {
446		struct intel_connector *connector = intel_dp->attached_connector;
447		int edp_max_rate = connector->panel.vbt.edp.max_link_rate;
448
449		if (max_rate && edp_max_rate)
450			max_rate = min(max_rate, edp_max_rate);
451		else if (edp_max_rate)
452			max_rate = edp_max_rate;
453	}
454
455	return max_rate;
456}
457
458static void
459intel_dp_set_source_rates(struct intel_dp *intel_dp)
460{
461	/* The values must be in increasing order */
462	static const int mtl_rates[] = {
463		162000, 216000, 243000, 270000, 324000, 432000, 540000, 675000,
464		810000,	1000000, 1350000, 2000000,
465	};
466	static const int icl_rates[] = {
467		162000, 216000, 270000, 324000, 432000, 540000, 648000, 810000,
468		1000000, 1350000,
469	};
470	static const int bxt_rates[] = {
471		162000, 216000, 243000, 270000, 324000, 432000, 540000
472	};
473	static const int skl_rates[] = {
474		162000, 216000, 270000, 324000, 432000, 540000
475	};
476	static const int hsw_rates[] = {
477		162000, 270000, 540000
478	};
479	static const int g4x_rates[] = {
480		162000, 270000
481	};
482	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
483	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
484	const int *source_rates;
485	int size, max_rate = 0, vbt_max_rate;
486
487	/* This should only be done once */
488	drm_WARN_ON(&dev_priv->drm,
489		    intel_dp->source_rates || intel_dp->num_source_rates);
490
491	if (DISPLAY_VER(dev_priv) >= 14) {
492		source_rates = mtl_rates;
493		size = ARRAY_SIZE(mtl_rates);
494		max_rate = mtl_max_source_rate(intel_dp);
495	} else if (DISPLAY_VER(dev_priv) >= 11) {
496		source_rates = icl_rates;
497		size = ARRAY_SIZE(icl_rates);
498		if (IS_DG2(dev_priv))
499			max_rate = dg2_max_source_rate(intel_dp);
500		else if (IS_ALDERLAKE_P(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
501			 IS_DG1(dev_priv) || IS_ROCKETLAKE(dev_priv))
502			max_rate = 810000;
503		else if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv))
504			max_rate = ehl_max_source_rate(intel_dp);
505		else
506			max_rate = icl_max_source_rate(intel_dp);
507	} else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) {
508		source_rates = bxt_rates;
509		size = ARRAY_SIZE(bxt_rates);
510	} else if (DISPLAY_VER(dev_priv) == 9) {
511		source_rates = skl_rates;
512		size = ARRAY_SIZE(skl_rates);
513	} else if ((IS_HASWELL(dev_priv) && !IS_HASWELL_ULX(dev_priv)) ||
514		   IS_BROADWELL(dev_priv)) {
515		source_rates = hsw_rates;
516		size = ARRAY_SIZE(hsw_rates);
517	} else {
518		source_rates = g4x_rates;
519		size = ARRAY_SIZE(g4x_rates);
520	}
521
522	vbt_max_rate = vbt_max_link_rate(intel_dp);
523	if (max_rate && vbt_max_rate)
524		max_rate = min(max_rate, vbt_max_rate);
525	else if (vbt_max_rate)
526		max_rate = vbt_max_rate;
527
528	if (max_rate)
529		size = intel_dp_rate_limit_len(source_rates, size, max_rate);
530
531	intel_dp->source_rates = source_rates;
532	intel_dp->num_source_rates = size;
533}
534
535static int intersect_rates(const int *source_rates, int source_len,
536			   const int *sink_rates, int sink_len,
537			   int *common_rates)
538{
539	int i = 0, j = 0, k = 0;
540
541	while (i < source_len && j < sink_len) {
542		if (source_rates[i] == sink_rates[j]) {
543			if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES))
544				return k;
545			common_rates[k] = source_rates[i];
546			++k;
547			++i;
548			++j;
549		} else if (source_rates[i] < sink_rates[j]) {
550			++i;
551		} else {
552			++j;
553		}
554	}
555	return k;
556}
557
558/* return index of rate in rates array, or -1 if not found */
559static int intel_dp_rate_index(const int *rates, int len, int rate)
560{
561	int i;
562
563	for (i = 0; i < len; i++)
564		if (rate == rates[i])
565			return i;
566
567	return -1;
568}
569
570static void intel_dp_set_common_rates(struct intel_dp *intel_dp)
571{
572	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
573
574	drm_WARN_ON(&i915->drm,
575		    !intel_dp->num_source_rates || !intel_dp->num_sink_rates);
576
577	intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates,
578						     intel_dp->num_source_rates,
579						     intel_dp->sink_rates,
580						     intel_dp->num_sink_rates,
581						     intel_dp->common_rates);
582
583	/* Paranoia, there should always be something in common. */
584	if (drm_WARN_ON(&i915->drm, intel_dp->num_common_rates == 0)) {
585		intel_dp->common_rates[0] = 162000;
586		intel_dp->num_common_rates = 1;
587	}
588}
589
590static bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate,
591				       u8 lane_count)
592{
593	/*
594	 * FIXME: we need to synchronize the current link parameters with
595	 * hardware readout. Currently fast link training doesn't work on
596	 * boot-up.
597	 */
598	if (link_rate == 0 ||
599	    link_rate > intel_dp->max_link_rate)
600		return false;
601
602	if (lane_count == 0 ||
603	    lane_count > intel_dp_max_lane_count(intel_dp))
604		return false;
605
606	return true;
607}
608
609static bool intel_dp_can_link_train_fallback_for_edp(struct intel_dp *intel_dp,
610						     int link_rate,
611						     u8 lane_count)
612{
613	/* FIXME figure out what we actually want here */
614	const struct drm_display_mode *fixed_mode =
615		intel_panel_preferred_fixed_mode(intel_dp->attached_connector);
616	int mode_rate, max_rate;
617
618	mode_rate = intel_dp_link_required(fixed_mode->clock, 18);
619	max_rate = intel_dp_max_data_rate(link_rate, lane_count);
620	if (mode_rate > max_rate)
621		return false;
622
623	return true;
624}
625
626int intel_dp_get_link_train_fallback_values(struct intel_dp *intel_dp,
627					    int link_rate, u8 lane_count)
628{
629	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
630	int index;
631
632	/*
633	 * TODO: Enable fallback on MST links once MST link compute can handle
634	 * the fallback params.
635	 */
636	if (intel_dp->is_mst) {
637		drm_err(&i915->drm, "Link Training Unsuccessful\n");
638		return -1;
639	}
640
641	if (intel_dp_is_edp(intel_dp) && !intel_dp->use_max_params) {
642		drm_dbg_kms(&i915->drm,
643			    "Retrying Link training for eDP with max parameters\n");
644		intel_dp->use_max_params = true;
645		return 0;
646	}
647
648	index = intel_dp_rate_index(intel_dp->common_rates,
649				    intel_dp->num_common_rates,
650				    link_rate);
651	if (index > 0) {
652		if (intel_dp_is_edp(intel_dp) &&
653		    !intel_dp_can_link_train_fallback_for_edp(intel_dp,
654							      intel_dp_common_rate(intel_dp, index - 1),
655							      lane_count)) {
656			drm_dbg_kms(&i915->drm,
657				    "Retrying Link training for eDP with same parameters\n");
658			return 0;
659		}
660		intel_dp->max_link_rate = intel_dp_common_rate(intel_dp, index - 1);
661		intel_dp->max_link_lane_count = lane_count;
662	} else if (lane_count > 1) {
663		if (intel_dp_is_edp(intel_dp) &&
664		    !intel_dp_can_link_train_fallback_for_edp(intel_dp,
665							      intel_dp_max_common_rate(intel_dp),
666							      lane_count >> 1)) {
667			drm_dbg_kms(&i915->drm,
668				    "Retrying Link training for eDP with same parameters\n");
669			return 0;
670		}
671		intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
672		intel_dp->max_link_lane_count = lane_count >> 1;
673	} else {
674		drm_err(&i915->drm, "Link Training Unsuccessful\n");
675		return -1;
676	}
677
678	return 0;
679}
680
681u32 intel_dp_mode_to_fec_clock(u32 mode_clock)
682{
683	return div_u64(mul_u32_u32(mode_clock, 1000000U),
684		       DP_DSC_FEC_OVERHEAD_FACTOR);
685}
686
687static int
688small_joiner_ram_size_bits(struct drm_i915_private *i915)
689{
690	if (DISPLAY_VER(i915) >= 13)
691		return 17280 * 8;
692	else if (DISPLAY_VER(i915) >= 11)
693		return 7680 * 8;
694	else
695		return 6144 * 8;
696}
697
698u32 intel_dp_dsc_nearest_valid_bpp(struct drm_i915_private *i915, u32 bpp, u32 pipe_bpp)
699{
700	u32 bits_per_pixel = bpp;
701	int i;
702
703	/* Error out if the max bpp is less than smallest allowed valid bpp */
704	if (bits_per_pixel < valid_dsc_bpp[0]) {
705		drm_dbg_kms(&i915->drm, "Unsupported BPP %u, min %u\n",
706			    bits_per_pixel, valid_dsc_bpp[0]);
707		return 0;
708	}
709
710	/* From XE_LPD onwards we support from bpc upto uncompressed bpp-1 BPPs */
711	if (DISPLAY_VER(i915) >= 13) {
712		bits_per_pixel = min(bits_per_pixel, pipe_bpp - 1);
713
714		/*
715		 * According to BSpec, 27 is the max DSC output bpp,
716		 * 8 is the min DSC output bpp.
717		 * While we can still clamp higher bpp values to 27, saving bandwidth,
718		 * if it is required to oompress up to bpp < 8, means we can't do
719		 * that and probably means we can't fit the required mode, even with
720		 * DSC enabled.
721		 */
722		if (bits_per_pixel < 8) {
723			drm_dbg_kms(&i915->drm, "Unsupported BPP %u, min 8\n",
724				    bits_per_pixel);
725			return 0;
726		}
727		bits_per_pixel = min_t(u32, bits_per_pixel, 27);
728	} else {
729		/* Find the nearest match in the array of known BPPs from VESA */
730		for (i = 0; i < ARRAY_SIZE(valid_dsc_bpp) - 1; i++) {
731			if (bits_per_pixel < valid_dsc_bpp[i + 1])
732				break;
733		}
734		drm_dbg_kms(&i915->drm, "Set dsc bpp from %d to VESA %d\n",
735			    bits_per_pixel, valid_dsc_bpp[i]);
736
737		bits_per_pixel = valid_dsc_bpp[i];
738	}
739
740	return bits_per_pixel;
741}
742
743u16 intel_dp_dsc_get_output_bpp(struct drm_i915_private *i915,
744				u32 link_clock, u32 lane_count,
745				u32 mode_clock, u32 mode_hdisplay,
746				bool bigjoiner,
747				u32 pipe_bpp,
748				u32 timeslots)
749{
750	u32 bits_per_pixel, max_bpp_small_joiner_ram;
751
752	/*
753	 * Available Link Bandwidth(Kbits/sec) = (NumberOfLanes)*
754	 * (LinkSymbolClock)* 8 * (TimeSlots / 64)
755	 * for SST -> TimeSlots is 64(i.e all TimeSlots that are available)
756	 * for MST -> TimeSlots has to be calculated, based on mode requirements
757	 *
758	 * Due to FEC overhead, the available bw is reduced to 97.2261%.
759	 * To support the given mode:
760	 * Bandwidth required should be <= Available link Bandwidth * FEC Overhead
761	 * =>ModeClock * bits_per_pixel <= Available Link Bandwidth * FEC Overhead
762	 * =>bits_per_pixel <= Available link Bandwidth * FEC Overhead / ModeClock
763	 * =>bits_per_pixel <= (NumberOfLanes * LinkSymbolClock) * 8 (TimeSlots / 64) /
764	 *		       (ModeClock / FEC Overhead)
765	 * =>bits_per_pixel <= (NumberOfLanes * LinkSymbolClock * TimeSlots) /
766	 *		       (ModeClock / FEC Overhead * 8)
767	 */
768	bits_per_pixel = ((link_clock * lane_count) * timeslots) /
769			 (intel_dp_mode_to_fec_clock(mode_clock) * 8);
770
771	drm_dbg_kms(&i915->drm, "Max link bpp is %u for %u timeslots "
772				"total bw %u pixel clock %u\n",
773				bits_per_pixel, timeslots,
774				(link_clock * lane_count * 8),
775				intel_dp_mode_to_fec_clock(mode_clock));
776
777	/* Small Joiner Check: output bpp <= joiner RAM (bits) / Horiz. width */
778	max_bpp_small_joiner_ram = small_joiner_ram_size_bits(i915) /
779		mode_hdisplay;
780
781	if (bigjoiner)
782		max_bpp_small_joiner_ram *= 2;
783
784	/*
785	 * Greatest allowed DSC BPP = MIN (output BPP from available Link BW
786	 * check, output bpp from small joiner RAM check)
787	 */
788	bits_per_pixel = min(bits_per_pixel, max_bpp_small_joiner_ram);
789
790	if (bigjoiner) {
791		u32 max_bpp_bigjoiner =
792			i915->display.cdclk.max_cdclk_freq * 48 /
793			intel_dp_mode_to_fec_clock(mode_clock);
794
795		bits_per_pixel = min(bits_per_pixel, max_bpp_bigjoiner);
796	}
797
798	bits_per_pixel = intel_dp_dsc_nearest_valid_bpp(i915, bits_per_pixel, pipe_bpp);
799
800	/*
801	 * Compressed BPP in U6.4 format so multiply by 16, for Gen 11,
802	 * fractional part is 0
803	 */
804	return bits_per_pixel << 4;
805}
806
807u8 intel_dp_dsc_get_slice_count(struct intel_dp *intel_dp,
808				int mode_clock, int mode_hdisplay,
809				bool bigjoiner)
810{
811	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
812	u8 min_slice_count, i;
813	int max_slice_width;
814
815	if (mode_clock <= DP_DSC_PEAK_PIXEL_RATE)
816		min_slice_count = DIV_ROUND_UP(mode_clock,
817					       DP_DSC_MAX_ENC_THROUGHPUT_0);
818	else
819		min_slice_count = DIV_ROUND_UP(mode_clock,
820					       DP_DSC_MAX_ENC_THROUGHPUT_1);
821
822	/*
823	 * Due to some DSC engine BW limitations, we need to enable second
824	 * slice and VDSC engine, whenever we approach close enough to max CDCLK
825	 */
826	if (mode_clock >= ((i915->display.cdclk.max_cdclk_freq * 85) / 100))
827		min_slice_count = max_t(u8, min_slice_count, 2);
828
829	max_slice_width = drm_dp_dsc_sink_max_slice_width(intel_dp->dsc_dpcd);
830	if (max_slice_width < DP_DSC_MIN_SLICE_WIDTH_VALUE) {
831		drm_dbg_kms(&i915->drm,
832			    "Unsupported slice width %d by DP DSC Sink device\n",
833			    max_slice_width);
834		return 0;
835	}
836	/* Also take into account max slice width */
837	min_slice_count = max_t(u8, min_slice_count,
838				DIV_ROUND_UP(mode_hdisplay,
839					     max_slice_width));
840
841	/* Find the closest match to the valid slice count values */
842	for (i = 0; i < ARRAY_SIZE(valid_dsc_slicecount); i++) {
843		u8 test_slice_count = valid_dsc_slicecount[i] << bigjoiner;
844
845		if (test_slice_count >
846		    drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd, false))
847			break;
848
849		/* big joiner needs small joiner to be enabled */
850		if (bigjoiner && test_slice_count < 4)
851			continue;
852
853		if (min_slice_count <= test_slice_count)
854			return test_slice_count;
855	}
856
857	drm_dbg_kms(&i915->drm, "Unsupported Slice Count %d\n",
858		    min_slice_count);
859	return 0;
860}
861
862static bool source_can_output(struct intel_dp *intel_dp,
863			      enum intel_output_format format)
864{
865	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
866
867	switch (format) {
868	case INTEL_OUTPUT_FORMAT_RGB:
869		return true;
870
871	case INTEL_OUTPUT_FORMAT_YCBCR444:
872		/*
873		 * No YCbCr output support on gmch platforms.
874		 * Also, ILK doesn't seem capable of DP YCbCr output.
875		 * The displayed image is severly corrupted. SNB+ is fine.
876		 */
877		return !HAS_GMCH(i915) && !IS_IRONLAKE(i915);
878
879	case INTEL_OUTPUT_FORMAT_YCBCR420:
880		/* Platform < Gen 11 cannot output YCbCr420 format */
881		return DISPLAY_VER(i915) >= 11;
882
883	default:
884		MISSING_CASE(format);
885		return false;
886	}
887}
888
889static bool
890dfp_can_convert_from_rgb(struct intel_dp *intel_dp,
891			 enum intel_output_format sink_format)
892{
893	if (!drm_dp_is_branch(intel_dp->dpcd))
894		return false;
895
896	if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR444)
897		return intel_dp->dfp.rgb_to_ycbcr;
898
899	if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420)
900		return intel_dp->dfp.rgb_to_ycbcr &&
901			intel_dp->dfp.ycbcr_444_to_420;
902
903	return false;
904}
905
906static bool
907dfp_can_convert_from_ycbcr444(struct intel_dp *intel_dp,
908			      enum intel_output_format sink_format)
909{
910	if (!drm_dp_is_branch(intel_dp->dpcd))
911		return false;
912
913	if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420)
914		return intel_dp->dfp.ycbcr_444_to_420;
915
916	return false;
917}
918
919static enum intel_output_format
920intel_dp_output_format(struct intel_connector *connector,
921		       enum intel_output_format sink_format)
922{
923	struct intel_dp *intel_dp = intel_attached_dp(connector);
924	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
925	enum intel_output_format output_format;
926
927	if (intel_dp->force_dsc_output_format)
928		return intel_dp->force_dsc_output_format;
929
930	if (sink_format == INTEL_OUTPUT_FORMAT_RGB ||
931	    dfp_can_convert_from_rgb(intel_dp, sink_format))
932		output_format = INTEL_OUTPUT_FORMAT_RGB;
933
934	else if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR444 ||
935		 dfp_can_convert_from_ycbcr444(intel_dp, sink_format))
936		output_format = INTEL_OUTPUT_FORMAT_YCBCR444;
937
938	else
939		output_format = INTEL_OUTPUT_FORMAT_YCBCR420;
940
941	drm_WARN_ON(&i915->drm, !source_can_output(intel_dp, output_format));
942
943	return output_format;
944}
945
946int intel_dp_min_bpp(enum intel_output_format output_format)
947{
948	if (output_format == INTEL_OUTPUT_FORMAT_RGB)
949		return 6 * 3;
950	else
951		return 8 * 3;
952}
953
954static int intel_dp_output_bpp(enum intel_output_format output_format, int bpp)
955{
956	/*
957	 * bpp value was assumed to RGB format. And YCbCr 4:2:0 output
958	 * format of the number of bytes per pixel will be half the number
959	 * of bytes of RGB pixel.
960	 */
961	if (output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
962		bpp /= 2;
963
964	return bpp;
965}
966
967static enum intel_output_format
968intel_dp_sink_format(struct intel_connector *connector,
969		     const struct drm_display_mode *mode)
970{
971	const struct drm_display_info *info = &connector->base.display_info;
972
973	if (drm_mode_is_420_only(info, mode))
974		return INTEL_OUTPUT_FORMAT_YCBCR420;
975
976	return INTEL_OUTPUT_FORMAT_RGB;
977}
978
979static int
980intel_dp_mode_min_output_bpp(struct intel_connector *connector,
981			     const struct drm_display_mode *mode)
982{
983	enum intel_output_format output_format, sink_format;
984
985	sink_format = intel_dp_sink_format(connector, mode);
986
987	output_format = intel_dp_output_format(connector, sink_format);
988
989	return intel_dp_output_bpp(output_format, intel_dp_min_bpp(output_format));
990}
991
992static bool intel_dp_hdisplay_bad(struct drm_i915_private *dev_priv,
993				  int hdisplay)
994{
995	/*
996	 * Older platforms don't like hdisplay==4096 with DP.
997	 *
998	 * On ILK/SNB/IVB the pipe seems to be somewhat running (scanline
999	 * and frame counter increment), but we don't get vblank interrupts,
1000	 * and the pipe underruns immediately. The link also doesn't seem
1001	 * to get trained properly.
1002	 *
1003	 * On CHV the vblank interrupts don't seem to disappear but
1004	 * otherwise the symptoms are similar.
1005	 *
1006	 * TODO: confirm the behaviour on HSW+
1007	 */
1008	return hdisplay == 4096 && !HAS_DDI(dev_priv);
1009}
1010
1011static int intel_dp_max_tmds_clock(struct intel_dp *intel_dp)
1012{
1013	struct intel_connector *connector = intel_dp->attached_connector;
1014	const struct drm_display_info *info = &connector->base.display_info;
1015	int max_tmds_clock = intel_dp->dfp.max_tmds_clock;
1016
1017	/* Only consider the sink's max TMDS clock if we know this is a HDMI DFP */
1018	if (max_tmds_clock && info->max_tmds_clock)
1019		max_tmds_clock = min(max_tmds_clock, info->max_tmds_clock);
1020
1021	return max_tmds_clock;
1022}
1023
1024static enum drm_mode_status
1025intel_dp_tmds_clock_valid(struct intel_dp *intel_dp,
1026			  int clock, int bpc,
1027			  enum intel_output_format sink_format,
1028			  bool respect_downstream_limits)
1029{
1030	int tmds_clock, min_tmds_clock, max_tmds_clock;
1031
1032	if (!respect_downstream_limits)
1033		return MODE_OK;
1034
1035	tmds_clock = intel_hdmi_tmds_clock(clock, bpc, sink_format);
1036
1037	min_tmds_clock = intel_dp->dfp.min_tmds_clock;
1038	max_tmds_clock = intel_dp_max_tmds_clock(intel_dp);
1039
1040	if (min_tmds_clock && tmds_clock < min_tmds_clock)
1041		return MODE_CLOCK_LOW;
1042
1043	if (max_tmds_clock && tmds_clock > max_tmds_clock)
1044		return MODE_CLOCK_HIGH;
1045
1046	return MODE_OK;
1047}
1048
1049static enum drm_mode_status
1050intel_dp_mode_valid_downstream(struct intel_connector *connector,
1051			       const struct drm_display_mode *mode,
1052			       int target_clock)
1053{
1054	struct intel_dp *intel_dp = intel_attached_dp(connector);
1055	const struct drm_display_info *info = &connector->base.display_info;
1056	enum drm_mode_status status;
1057	enum intel_output_format sink_format;
1058
1059	/* If PCON supports FRL MODE, check FRL bandwidth constraints */
1060	if (intel_dp->dfp.pcon_max_frl_bw) {
1061		int target_bw;
1062		int max_frl_bw;
1063		int bpp = intel_dp_mode_min_output_bpp(connector, mode);
1064
1065		target_bw = bpp * target_clock;
1066
1067		max_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
1068
1069		/* converting bw from Gbps to Kbps*/
1070		max_frl_bw = max_frl_bw * 1000000;
1071
1072		if (target_bw > max_frl_bw)
1073			return MODE_CLOCK_HIGH;
1074
1075		return MODE_OK;
1076	}
1077
1078	if (intel_dp->dfp.max_dotclock &&
1079	    target_clock > intel_dp->dfp.max_dotclock)
1080		return MODE_CLOCK_HIGH;
1081
1082	sink_format = intel_dp_sink_format(connector, mode);
1083
1084	/* Assume 8bpc for the DP++/HDMI/DVI TMDS clock check */
1085	status = intel_dp_tmds_clock_valid(intel_dp, target_clock,
1086					   8, sink_format, true);
1087
1088	if (status != MODE_OK) {
1089		if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
1090		    !connector->base.ycbcr_420_allowed ||
1091		    !drm_mode_is_420_also(info, mode))
1092			return status;
1093		sink_format = INTEL_OUTPUT_FORMAT_YCBCR420;
1094		status = intel_dp_tmds_clock_valid(intel_dp, target_clock,
1095						   8, sink_format, true);
1096		if (status != MODE_OK)
1097			return status;
1098	}
1099
1100	return MODE_OK;
1101}
1102
1103bool intel_dp_need_bigjoiner(struct intel_dp *intel_dp,
1104			     int hdisplay, int clock)
1105{
1106	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1107
1108	if (!intel_dp_can_bigjoiner(intel_dp))
1109		return false;
1110
1111	return clock > i915->max_dotclk_freq || hdisplay > 5120;
1112}
1113
1114static enum drm_mode_status
1115intel_dp_mode_valid(struct drm_connector *_connector,
1116		    struct drm_display_mode *mode)
1117{
1118	struct intel_connector *connector = to_intel_connector(_connector);
1119	struct intel_dp *intel_dp = intel_attached_dp(connector);
1120	struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
1121	const struct drm_display_mode *fixed_mode;
1122	int target_clock = mode->clock;
1123	int max_rate, mode_rate, max_lanes, max_link_clock;
1124	int max_dotclk = dev_priv->max_dotclk_freq;
1125	u16 dsc_max_output_bpp = 0;
1126	u8 dsc_slice_count = 0;
1127	enum drm_mode_status status;
1128	bool dsc = false, bigjoiner = false;
1129
1130	status = intel_cpu_transcoder_mode_valid(dev_priv, mode);
1131	if (status != MODE_OK)
1132		return status;
1133
1134	if (mode->flags & DRM_MODE_FLAG_DBLCLK)
1135		return MODE_H_ILLEGAL;
1136
1137	fixed_mode = intel_panel_fixed_mode(connector, mode);
1138	if (intel_dp_is_edp(intel_dp) && fixed_mode) {
1139		status = intel_panel_mode_valid(connector, mode);
1140		if (status != MODE_OK)
1141			return status;
1142
1143		target_clock = fixed_mode->clock;
1144	}
1145
1146	if (mode->clock < 10000)
1147		return MODE_CLOCK_LOW;
1148
1149	if (intel_dp_need_bigjoiner(intel_dp, mode->hdisplay, target_clock)) {
1150		bigjoiner = true;
1151		max_dotclk *= 2;
1152	}
1153	if (target_clock > max_dotclk)
1154		return MODE_CLOCK_HIGH;
1155
1156	if (intel_dp_hdisplay_bad(dev_priv, mode->hdisplay))
1157		return MODE_H_ILLEGAL;
1158
1159	max_link_clock = intel_dp_max_link_rate(intel_dp);
1160	max_lanes = intel_dp_max_lane_count(intel_dp);
1161
1162	max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
1163	mode_rate = intel_dp_link_required(target_clock,
1164					   intel_dp_mode_min_output_bpp(connector, mode));
1165
1166	if (HAS_DSC(dev_priv) &&
1167	    drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd)) {
1168		/*
1169		 * TBD pass the connector BPC,
1170		 * for now U8_MAX so that max BPC on that platform would be picked
1171		 */
1172		int pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, U8_MAX);
1173
1174		/*
1175		 * Output bpp is stored in 6.4 format so right shift by 4 to get the
1176		 * integer value since we support only integer values of bpp.
1177		 */
1178		if (intel_dp_is_edp(intel_dp)) {
1179			dsc_max_output_bpp =
1180				drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4;
1181			dsc_slice_count =
1182				drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
1183								true);
1184		} else if (drm_dp_sink_supports_fec(intel_dp->fec_capable)) {
1185			dsc_max_output_bpp =
1186				intel_dp_dsc_get_output_bpp(dev_priv,
1187							    max_link_clock,
1188							    max_lanes,
1189							    target_clock,
1190							    mode->hdisplay,
1191							    bigjoiner,
1192							    pipe_bpp, 64) >> 4;
1193			dsc_slice_count =
1194				intel_dp_dsc_get_slice_count(intel_dp,
1195							     target_clock,
1196							     mode->hdisplay,
1197							     bigjoiner);
1198		}
1199
1200		dsc = dsc_max_output_bpp && dsc_slice_count;
1201	}
1202
1203	/*
1204	 * Big joiner configuration needs DSC for TGL which is not true for
1205	 * XE_LPD where uncompressed joiner is supported.
1206	 */
1207	if (DISPLAY_VER(dev_priv) < 13 && bigjoiner && !dsc)
1208		return MODE_CLOCK_HIGH;
1209
1210	if (mode_rate > max_rate && !dsc)
1211		return MODE_CLOCK_HIGH;
1212
1213	status = intel_dp_mode_valid_downstream(connector, mode, target_clock);
1214	if (status != MODE_OK)
1215		return status;
1216
1217	return intel_mode_valid_max_plane_size(dev_priv, mode, bigjoiner);
1218}
1219
1220bool intel_dp_source_supports_tps3(struct drm_i915_private *i915)
1221{
1222	return DISPLAY_VER(i915) >= 9 || IS_BROADWELL(i915) || IS_HASWELL(i915);
1223}
1224
1225bool intel_dp_source_supports_tps4(struct drm_i915_private *i915)
1226{
1227	return DISPLAY_VER(i915) >= 10;
1228}
1229
1230static void snprintf_int_array(char *str, size_t len,
1231			       const int *array, int nelem)
1232{
1233	int i;
1234
1235	str[0] = '\0';
1236
1237	for (i = 0; i < nelem; i++) {
1238		int r = snprintf(str, len, "%s%d", i ? ", " : "", array[i]);
1239		if (r >= len)
1240			return;
1241		str += r;
1242		len -= r;
1243	}
1244}
1245
1246static void intel_dp_print_rates(struct intel_dp *intel_dp)
1247{
1248	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1249	char str[128]; /* FIXME: too big for stack? */
1250
1251	if (!drm_debug_enabled(DRM_UT_KMS))
1252		return;
1253
1254	snprintf_int_array(str, sizeof(str),
1255			   intel_dp->source_rates, intel_dp->num_source_rates);
1256	drm_dbg_kms(&i915->drm, "source rates: %s\n", str);
1257
1258	snprintf_int_array(str, sizeof(str),
1259			   intel_dp->sink_rates, intel_dp->num_sink_rates);
1260	drm_dbg_kms(&i915->drm, "sink rates: %s\n", str);
1261
1262	snprintf_int_array(str, sizeof(str),
1263			   intel_dp->common_rates, intel_dp->num_common_rates);
1264	drm_dbg_kms(&i915->drm, "common rates: %s\n", str);
1265}
1266
1267int
1268intel_dp_max_link_rate(struct intel_dp *intel_dp)
1269{
1270	int len;
1271
1272	len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->max_link_rate);
1273
1274	return intel_dp_common_rate(intel_dp, len - 1);
1275}
1276
1277int intel_dp_rate_select(struct intel_dp *intel_dp, int rate)
1278{
1279	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1280	int i = intel_dp_rate_index(intel_dp->sink_rates,
1281				    intel_dp->num_sink_rates, rate);
1282
1283	if (drm_WARN_ON(&i915->drm, i < 0))
1284		i = 0;
1285
1286	return i;
1287}
1288
1289void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock,
1290			   u8 *link_bw, u8 *rate_select)
1291{
1292	/* eDP 1.4 rate select method. */
1293	if (intel_dp->use_rate_select) {
1294		*link_bw = 0;
1295		*rate_select =
1296			intel_dp_rate_select(intel_dp, port_clock);
1297	} else {
1298		*link_bw = drm_dp_link_rate_to_bw_code(port_clock);
1299		*rate_select = 0;
1300	}
1301}
1302
1303bool intel_dp_has_hdmi_sink(struct intel_dp *intel_dp)
1304{
1305	struct intel_connector *connector = intel_dp->attached_connector;
1306
1307	return connector->base.display_info.is_hdmi;
1308}
1309
1310static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp,
1311					 const struct intel_crtc_state *pipe_config)
1312{
1313	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1314
1315	/* On TGL, FEC is supported on all Pipes */
1316	if (DISPLAY_VER(dev_priv) >= 12)
1317		return true;
1318
1319	if (DISPLAY_VER(dev_priv) == 11 && pipe_config->cpu_transcoder != TRANSCODER_A)
1320		return true;
1321
1322	return false;
1323}
1324
1325static bool intel_dp_supports_fec(struct intel_dp *intel_dp,
1326				  const struct intel_crtc_state *pipe_config)
1327{
1328	return intel_dp_source_supports_fec(intel_dp, pipe_config) &&
1329		drm_dp_sink_supports_fec(intel_dp->fec_capable);
1330}
1331
1332static bool intel_dp_supports_dsc(struct intel_dp *intel_dp,
1333				  const struct intel_crtc_state *crtc_state)
1334{
1335	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP) && !crtc_state->fec_enable)
1336		return false;
1337
1338	return intel_dsc_source_support(crtc_state) &&
1339		drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd);
1340}
1341
1342static int intel_dp_hdmi_compute_bpc(struct intel_dp *intel_dp,
1343				     const struct intel_crtc_state *crtc_state,
1344				     int bpc, bool respect_downstream_limits)
1345{
1346	int clock = crtc_state->hw.adjusted_mode.crtc_clock;
1347
1348	/*
1349	 * Current bpc could already be below 8bpc due to
1350	 * FDI bandwidth constraints or other limits.
1351	 * HDMI minimum is 8bpc however.
1352	 */
1353	bpc = max(bpc, 8);
1354
1355	/*
1356	 * We will never exceed downstream TMDS clock limits while
1357	 * attempting deep color. If the user insists on forcing an
1358	 * out of spec mode they will have to be satisfied with 8bpc.
1359	 */
1360	if (!respect_downstream_limits)
1361		bpc = 8;
1362
1363	for (; bpc >= 8; bpc -= 2) {
1364		if (intel_hdmi_bpc_possible(crtc_state, bpc,
1365					    intel_dp_has_hdmi_sink(intel_dp)) &&
1366		    intel_dp_tmds_clock_valid(intel_dp, clock, bpc, crtc_state->sink_format,
1367					      respect_downstream_limits) == MODE_OK)
1368			return bpc;
1369	}
1370
1371	return -EINVAL;
1372}
1373
1374static int intel_dp_max_bpp(struct intel_dp *intel_dp,
1375			    const struct intel_crtc_state *crtc_state,
1376			    bool respect_downstream_limits)
1377{
1378	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1379	struct intel_connector *intel_connector = intel_dp->attached_connector;
1380	int bpp, bpc;
1381
1382	bpc = crtc_state->pipe_bpp / 3;
1383
1384	if (intel_dp->dfp.max_bpc)
1385		bpc = min_t(int, bpc, intel_dp->dfp.max_bpc);
1386
1387	if (intel_dp->dfp.min_tmds_clock) {
1388		int max_hdmi_bpc;
1389
1390		max_hdmi_bpc = intel_dp_hdmi_compute_bpc(intel_dp, crtc_state, bpc,
1391							 respect_downstream_limits);
1392		if (max_hdmi_bpc < 0)
1393			return 0;
1394
1395		bpc = min(bpc, max_hdmi_bpc);
1396	}
1397
1398	bpp = bpc * 3;
1399	if (intel_dp_is_edp(intel_dp)) {
1400		/* Get bpp from vbt only for panels that dont have bpp in edid */
1401		if (intel_connector->base.display_info.bpc == 0 &&
1402		    intel_connector->panel.vbt.edp.bpp &&
1403		    intel_connector->panel.vbt.edp.bpp < bpp) {
1404			drm_dbg_kms(&dev_priv->drm,
1405				    "clamping bpp for eDP panel to BIOS-provided %i\n",
1406				    intel_connector->panel.vbt.edp.bpp);
1407			bpp = intel_connector->panel.vbt.edp.bpp;
1408		}
1409	}
1410
1411	return bpp;
1412}
1413
1414/* Adjust link config limits based on compliance test requests. */
1415void
1416intel_dp_adjust_compliance_config(struct intel_dp *intel_dp,
1417				  struct intel_crtc_state *pipe_config,
1418				  struct link_config_limits *limits)
1419{
1420	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1421
1422	/* For DP Compliance we override the computed bpp for the pipe */
1423	if (intel_dp->compliance.test_data.bpc != 0) {
1424		int bpp = 3 * intel_dp->compliance.test_data.bpc;
1425
1426		limits->min_bpp = limits->max_bpp = bpp;
1427		pipe_config->dither_force_disable = bpp == 6 * 3;
1428
1429		drm_dbg_kms(&i915->drm, "Setting pipe_bpp to %d\n", bpp);
1430	}
1431
1432	/* Use values requested by Compliance Test Request */
1433	if (intel_dp->compliance.test_type == DP_TEST_LINK_TRAINING) {
1434		int index;
1435
1436		/* Validate the compliance test data since max values
1437		 * might have changed due to link train fallback.
1438		 */
1439		if (intel_dp_link_params_valid(intel_dp, intel_dp->compliance.test_link_rate,
1440					       intel_dp->compliance.test_lane_count)) {
1441			index = intel_dp_rate_index(intel_dp->common_rates,
1442						    intel_dp->num_common_rates,
1443						    intel_dp->compliance.test_link_rate);
1444			if (index >= 0)
1445				limits->min_rate = limits->max_rate =
1446					intel_dp->compliance.test_link_rate;
1447			limits->min_lane_count = limits->max_lane_count =
1448				intel_dp->compliance.test_lane_count;
1449		}
1450	}
1451}
1452
1453static bool has_seamless_m_n(struct intel_connector *connector)
1454{
1455	struct drm_i915_private *i915 = to_i915(connector->base.dev);
1456
1457	/*
1458	 * Seamless M/N reprogramming only implemented
1459	 * for BDW+ double buffered M/N registers so far.
1460	 */
1461	return HAS_DOUBLE_BUFFERED_M_N(i915) &&
1462		intel_panel_drrs_type(connector) == DRRS_TYPE_SEAMLESS;
1463}
1464
1465static int intel_dp_mode_clock(const struct intel_crtc_state *crtc_state,
1466			       const struct drm_connector_state *conn_state)
1467{
1468	struct intel_connector *connector = to_intel_connector(conn_state->connector);
1469	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1470
1471	/* FIXME a bit of a mess wrt clock vs. crtc_clock */
1472	if (has_seamless_m_n(connector))
1473		return intel_panel_highest_mode(connector, adjusted_mode)->clock;
1474	else
1475		return adjusted_mode->crtc_clock;
1476}
1477
1478/* Optimize link config in order: max bpp, min clock, min lanes */
1479static int
1480intel_dp_compute_link_config_wide(struct intel_dp *intel_dp,
1481				  struct intel_crtc_state *pipe_config,
1482				  const struct drm_connector_state *conn_state,
1483				  const struct link_config_limits *limits)
1484{
1485	int bpp, i, lane_count, clock = intel_dp_mode_clock(pipe_config, conn_state);
1486	int mode_rate, link_rate, link_avail;
1487
1488	for (bpp = limits->max_bpp; bpp >= limits->min_bpp; bpp -= 2 * 3) {
1489		int output_bpp = intel_dp_output_bpp(pipe_config->output_format, bpp);
1490
1491		mode_rate = intel_dp_link_required(clock, output_bpp);
1492
1493		for (i = 0; i < intel_dp->num_common_rates; i++) {
1494			link_rate = intel_dp_common_rate(intel_dp, i);
1495			if (link_rate < limits->min_rate ||
1496			    link_rate > limits->max_rate)
1497				continue;
1498
1499			for (lane_count = limits->min_lane_count;
1500			     lane_count <= limits->max_lane_count;
1501			     lane_count <<= 1) {
1502				link_avail = intel_dp_max_data_rate(link_rate,
1503								    lane_count);
1504
1505				if (mode_rate <= link_avail) {
1506					pipe_config->lane_count = lane_count;
1507					pipe_config->pipe_bpp = bpp;
1508					pipe_config->port_clock = link_rate;
1509
1510					return 0;
1511				}
1512			}
1513		}
1514	}
1515
1516	return -EINVAL;
1517}
1518
1519int intel_dp_dsc_compute_bpp(struct intel_dp *intel_dp, u8 max_req_bpc)
1520{
1521	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1522	int i, num_bpc;
1523	u8 dsc_bpc[3] = {0};
1524	u8 dsc_max_bpc;
1525
1526	/* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */
1527	if (DISPLAY_VER(i915) >= 12)
1528		dsc_max_bpc = min_t(u8, 12, max_req_bpc);
1529	else
1530		dsc_max_bpc = min_t(u8, 10, max_req_bpc);
1531
1532	num_bpc = drm_dp_dsc_sink_supported_input_bpcs(intel_dp->dsc_dpcd,
1533						       dsc_bpc);
1534	for (i = 0; i < num_bpc; i++) {
1535		if (dsc_max_bpc >= dsc_bpc[i])
1536			return dsc_bpc[i] * 3;
1537	}
1538
1539	return 0;
1540}
1541
1542static int intel_dp_source_dsc_version_minor(struct intel_dp *intel_dp)
1543{
1544	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
1545
1546	return DISPLAY_VER(i915) >= 14 ? 2 : 1;
1547}
1548
1549static int intel_dp_sink_dsc_version_minor(struct intel_dp *intel_dp)
1550{
1551	return (intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] & DP_DSC_MINOR_MASK) >>
1552		DP_DSC_MINOR_SHIFT;
1553}
1554
1555static int intel_dp_get_slice_height(int vactive)
1556{
1557	int slice_height;
1558
1559	/*
1560	 * VDSC 1.2a spec in Section 3.8 Options for Slices implies that 108
1561	 * lines is an optimal slice height, but any size can be used as long as
1562	 * vertical active integer multiple and maximum vertical slice count
1563	 * requirements are met.
1564	 */
1565	for (slice_height = 108; slice_height <= vactive; slice_height += 2)
1566		if (vactive % slice_height == 0)
1567			return slice_height;
1568
1569	/*
1570	 * Highly unlikely we reach here as most of the resolutions will end up
1571	 * finding appropriate slice_height in above loop but returning
1572	 * slice_height as 2 here as it should work with all resolutions.
1573	 */
1574	return 2;
1575}
1576
1577static int intel_dp_dsc_compute_params(struct intel_encoder *encoder,
1578				       struct intel_crtc_state *crtc_state)
1579{
1580	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1581	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1582	struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1583	u8 line_buf_depth;
1584	int ret;
1585
1586	/*
1587	 * RC_MODEL_SIZE is currently a constant across all configurations.
1588	 *
1589	 * FIXME: Look into using sink defined DPCD DP_DSC_RC_BUF_BLK_SIZE and
1590	 * DP_DSC_RC_BUF_SIZE for this.
1591	 */
1592	vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST;
1593	vdsc_cfg->pic_height = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1594
1595	vdsc_cfg->slice_height = intel_dp_get_slice_height(vdsc_cfg->pic_height);
1596
1597	ret = intel_dsc_compute_params(crtc_state);
1598	if (ret)
1599		return ret;
1600
1601	vdsc_cfg->dsc_version_major =
1602		(intel_dp->dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] &
1603		 DP_DSC_MAJOR_MASK) >> DP_DSC_MAJOR_SHIFT;
1604	vdsc_cfg->dsc_version_minor =
1605		min(intel_dp_source_dsc_version_minor(intel_dp),
1606		    intel_dp_sink_dsc_version_minor(intel_dp));
1607	if (vdsc_cfg->convert_rgb)
1608		vdsc_cfg->convert_rgb =
1609			intel_dp->dsc_dpcd[DP_DSC_DEC_COLOR_FORMAT_CAP - DP_DSC_SUPPORT] &
1610			DP_DSC_RGB;
1611
1612	line_buf_depth = drm_dp_dsc_sink_line_buf_depth(intel_dp->dsc_dpcd);
1613	if (!line_buf_depth) {
1614		drm_dbg_kms(&i915->drm,
1615			    "DSC Sink Line Buffer Depth invalid\n");
1616		return -EINVAL;
1617	}
1618
1619	if (vdsc_cfg->dsc_version_minor == 2)
1620		vdsc_cfg->line_buf_depth = (line_buf_depth == DSC_1_2_MAX_LINEBUF_DEPTH_BITS) ?
1621			DSC_1_2_MAX_LINEBUF_DEPTH_VAL : line_buf_depth;
1622	else
1623		vdsc_cfg->line_buf_depth = (line_buf_depth > DSC_1_1_MAX_LINEBUF_DEPTH_BITS) ?
1624			DSC_1_1_MAX_LINEBUF_DEPTH_BITS : line_buf_depth;
1625
1626	vdsc_cfg->block_pred_enable =
1627		intel_dp->dsc_dpcd[DP_DSC_BLK_PREDICTION_SUPPORT - DP_DSC_SUPPORT] &
1628		DP_DSC_BLK_PREDICTION_IS_SUPPORTED;
1629
1630	return drm_dsc_compute_rc_parameters(vdsc_cfg);
1631}
1632
1633static bool intel_dp_dsc_supports_format(struct intel_dp *intel_dp,
1634					 enum intel_output_format output_format)
1635{
1636	u8 sink_dsc_format;
1637
1638	switch (output_format) {
1639	case INTEL_OUTPUT_FORMAT_RGB:
1640		sink_dsc_format = DP_DSC_RGB;
1641		break;
1642	case INTEL_OUTPUT_FORMAT_YCBCR444:
1643		sink_dsc_format = DP_DSC_YCbCr444;
1644		break;
1645	case INTEL_OUTPUT_FORMAT_YCBCR420:
1646		if (min(intel_dp_source_dsc_version_minor(intel_dp),
1647			intel_dp_sink_dsc_version_minor(intel_dp)) < 2)
1648			return false;
1649		sink_dsc_format = DP_DSC_YCbCr420_Native;
1650		break;
1651	default:
1652		return false;
1653	}
1654
1655	return drm_dp_dsc_sink_supports_format(intel_dp->dsc_dpcd, sink_dsc_format);
1656}
1657
1658int intel_dp_dsc_compute_config(struct intel_dp *intel_dp,
1659				struct intel_crtc_state *pipe_config,
1660				struct drm_connector_state *conn_state,
1661				struct link_config_limits *limits,
1662				int timeslots,
1663				bool compute_pipe_bpp)
1664{
1665	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1666	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
1667	const struct drm_display_mode *adjusted_mode =
1668		&pipe_config->hw.adjusted_mode;
1669	int pipe_bpp;
1670	int ret;
1671
1672	pipe_config->fec_enable = !intel_dp_is_edp(intel_dp) &&
1673		intel_dp_supports_fec(intel_dp, pipe_config);
1674
1675	if (!intel_dp_supports_dsc(intel_dp, pipe_config))
1676		return -EINVAL;
1677
1678	if (!intel_dp_dsc_supports_format(intel_dp, pipe_config->output_format))
1679		return -EINVAL;
1680
1681	if (compute_pipe_bpp)
1682		pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, conn_state->max_requested_bpc);
1683	else
1684		pipe_bpp = pipe_config->pipe_bpp;
1685
1686	if (intel_dp->force_dsc_bpc) {
1687		pipe_bpp = intel_dp->force_dsc_bpc * 3;
1688		drm_dbg_kms(&dev_priv->drm, "Input DSC BPP forced to %d", pipe_bpp);
1689	}
1690
1691	/* Min Input BPC for ICL+ is 8 */
1692	if (pipe_bpp < 8 * 3) {
1693		drm_dbg_kms(&dev_priv->drm,
1694			    "No DSC support for less than 8bpc\n");
1695		return -EINVAL;
1696	}
1697
1698	/*
1699	 * For now enable DSC for max bpp, max link rate, max lane count.
1700	 * Optimize this later for the minimum possible link rate/lane count
1701	 * with DSC enabled for the requested mode.
1702	 */
1703	pipe_config->pipe_bpp = pipe_bpp;
1704	pipe_config->port_clock = limits->max_rate;
1705	pipe_config->lane_count = limits->max_lane_count;
1706
1707	if (intel_dp_is_edp(intel_dp)) {
1708		pipe_config->dsc.compressed_bpp =
1709			min_t(u16, drm_edp_dsc_sink_output_bpp(intel_dp->dsc_dpcd) >> 4,
1710			      pipe_config->pipe_bpp);
1711		pipe_config->dsc.slice_count =
1712			drm_dp_dsc_sink_max_slice_count(intel_dp->dsc_dpcd,
1713							true);
1714		if (!pipe_config->dsc.slice_count) {
1715			drm_dbg_kms(&dev_priv->drm, "Unsupported Slice Count %d\n",
1716				    pipe_config->dsc.slice_count);
1717			return -EINVAL;
1718		}
1719	} else {
1720		u16 dsc_max_output_bpp = 0;
1721		u8 dsc_dp_slice_count;
1722
1723		if (compute_pipe_bpp) {
1724			dsc_max_output_bpp =
1725				intel_dp_dsc_get_output_bpp(dev_priv,
1726							    pipe_config->port_clock,
1727							    pipe_config->lane_count,
1728							    adjusted_mode->crtc_clock,
1729							    adjusted_mode->crtc_hdisplay,
1730							    pipe_config->bigjoiner_pipes,
1731							    pipe_bpp,
1732							    timeslots);
1733			/*
1734			 * According to DSC 1.2a Section 4.1.1 Table 4.1 the maximum
1735			 * supported PPS value can be 63.9375 and with the further
1736			 * mention that bpp should be programmed double the target bpp
1737			 * restricting our target bpp to be 31.9375 at max
1738			 */
1739			if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
1740				dsc_max_output_bpp = min_t(u16, dsc_max_output_bpp, 31 << 4);
1741
1742			if (!dsc_max_output_bpp) {
1743				drm_dbg_kms(&dev_priv->drm,
1744					    "Compressed BPP not supported\n");
1745				return -EINVAL;
1746			}
1747		}
1748		dsc_dp_slice_count =
1749			intel_dp_dsc_get_slice_count(intel_dp,
1750						     adjusted_mode->crtc_clock,
1751						     adjusted_mode->crtc_hdisplay,
1752						     pipe_config->bigjoiner_pipes);
1753		if (!dsc_dp_slice_count) {
1754			drm_dbg_kms(&dev_priv->drm,
1755				    "Compressed Slice Count not supported\n");
1756			return -EINVAL;
1757		}
1758
1759		/*
1760		 * compute pipe bpp is set to false for DP MST DSC case
1761		 * and compressed_bpp is calculated same time once
1762		 * vpci timeslots are allocated, because overall bpp
1763		 * calculation procedure is bit different for MST case.
1764		 */
1765		if (compute_pipe_bpp) {
1766			pipe_config->dsc.compressed_bpp = min_t(u16,
1767								dsc_max_output_bpp >> 4,
1768								pipe_config->pipe_bpp);
1769		}
1770		pipe_config->dsc.slice_count = dsc_dp_slice_count;
1771		drm_dbg_kms(&dev_priv->drm, "DSC: compressed bpp %d slice count %d\n",
1772			    pipe_config->dsc.compressed_bpp,
1773			    pipe_config->dsc.slice_count);
1774	}
1775	/*
1776	 * VDSC engine operates at 1 Pixel per clock, so if peak pixel rate
1777	 * is greater than the maximum Cdclock and if slice count is even
1778	 * then we need to use 2 VDSC instances.
1779	 */
1780	if (pipe_config->bigjoiner_pipes || pipe_config->dsc.slice_count > 1)
1781		pipe_config->dsc.dsc_split = true;
1782
1783	ret = intel_dp_dsc_compute_params(&dig_port->base, pipe_config);
1784	if (ret < 0) {
1785		drm_dbg_kms(&dev_priv->drm,
1786			    "Cannot compute valid DSC parameters for Input Bpp = %d "
1787			    "Compressed BPP = %d\n",
1788			    pipe_config->pipe_bpp,
1789			    pipe_config->dsc.compressed_bpp);
1790		return ret;
1791	}
1792
1793	pipe_config->dsc.compression_enable = true;
1794	drm_dbg_kms(&dev_priv->drm, "DP DSC computed with Input Bpp = %d "
1795		    "Compressed Bpp = %d Slice Count = %d\n",
1796		    pipe_config->pipe_bpp,
1797		    pipe_config->dsc.compressed_bpp,
1798		    pipe_config->dsc.slice_count);
1799
1800	return 0;
1801}
1802
1803static int
1804intel_dp_compute_link_config(struct intel_encoder *encoder,
1805			     struct intel_crtc_state *pipe_config,
1806			     struct drm_connector_state *conn_state,
1807			     bool respect_downstream_limits)
1808{
1809	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1810	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1811	const struct drm_display_mode *adjusted_mode =
1812		&pipe_config->hw.adjusted_mode;
1813	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1814	struct link_config_limits limits;
1815	bool joiner_needs_dsc = false;
1816	int ret;
1817
1818	limits.min_rate = intel_dp_common_rate(intel_dp, 0);
1819	limits.max_rate = intel_dp_max_link_rate(intel_dp);
1820
1821	limits.min_lane_count = 1;
1822	limits.max_lane_count = intel_dp_max_lane_count(intel_dp);
1823
1824	limits.min_bpp = intel_dp_min_bpp(pipe_config->output_format);
1825	limits.max_bpp = intel_dp_max_bpp(intel_dp, pipe_config, respect_downstream_limits);
1826
1827	if (intel_dp->use_max_params) {
1828		/*
1829		 * Use the maximum clock and number of lanes the eDP panel
1830		 * advertizes being capable of in case the initial fast
1831		 * optimal params failed us. The panels are generally
1832		 * designed to support only a single clock and lane
1833		 * configuration, and typically on older panels these
1834		 * values correspond to the native resolution of the panel.
1835		 */
1836		limits.min_lane_count = limits.max_lane_count;
1837		limits.min_rate = limits.max_rate;
1838	}
1839
1840	intel_dp_adjust_compliance_config(intel_dp, pipe_config, &limits);
1841
1842	drm_dbg_kms(&i915->drm, "DP link computation with max lane count %i "
1843		    "max rate %d max bpp %d pixel clock %iKHz\n",
1844		    limits.max_lane_count, limits.max_rate,
1845		    limits.max_bpp, adjusted_mode->crtc_clock);
1846
1847	if (intel_dp_need_bigjoiner(intel_dp, adjusted_mode->crtc_hdisplay,
1848				    adjusted_mode->crtc_clock))
1849		pipe_config->bigjoiner_pipes = GENMASK(crtc->pipe + 1, crtc->pipe);
1850
1851	/*
1852	 * Pipe joiner needs compression up to display 12 due to bandwidth
1853	 * limitation. DG2 onwards pipe joiner can be enabled without
1854	 * compression.
1855	 */
1856	joiner_needs_dsc = DISPLAY_VER(i915) < 13 && pipe_config->bigjoiner_pipes;
1857
1858	/*
1859	 * Optimize for slow and wide for everything, because there are some
1860	 * eDP 1.3 and 1.4 panels don't work well with fast and narrow.
1861	 */
1862	ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config, conn_state, &limits);
1863
1864	if (ret || joiner_needs_dsc || intel_dp->force_dsc_en) {
1865		drm_dbg_kms(&i915->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n",
1866			    str_yes_no(ret), str_yes_no(joiner_needs_dsc),
1867			    str_yes_no(intel_dp->force_dsc_en));
1868		ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
1869						  conn_state, &limits, 64, true);
1870		if (ret < 0)
1871			return ret;
1872	}
1873
1874	if (pipe_config->dsc.compression_enable) {
1875		drm_dbg_kms(&i915->drm,
1876			    "DP lane count %d clock %d Input bpp %d Compressed bpp %d\n",
1877			    pipe_config->lane_count, pipe_config->port_clock,
1878			    pipe_config->pipe_bpp,
1879			    pipe_config->dsc.compressed_bpp);
1880
1881		drm_dbg_kms(&i915->drm,
1882			    "DP link rate required %i available %i\n",
1883			    intel_dp_link_required(adjusted_mode->crtc_clock,
1884						   pipe_config->dsc.compressed_bpp),
1885			    intel_dp_max_data_rate(pipe_config->port_clock,
1886						   pipe_config->lane_count));
1887	} else {
1888		drm_dbg_kms(&i915->drm, "DP lane count %d clock %d bpp %d\n",
1889			    pipe_config->lane_count, pipe_config->port_clock,
1890			    pipe_config->pipe_bpp);
1891
1892		drm_dbg_kms(&i915->drm,
1893			    "DP link rate required %i available %i\n",
1894			    intel_dp_link_required(adjusted_mode->crtc_clock,
1895						   pipe_config->pipe_bpp),
1896			    intel_dp_max_data_rate(pipe_config->port_clock,
1897						   pipe_config->lane_count));
1898	}
1899	return 0;
1900}
1901
1902bool intel_dp_limited_color_range(const struct intel_crtc_state *crtc_state,
1903				  const struct drm_connector_state *conn_state)
1904{
1905	const struct intel_digital_connector_state *intel_conn_state =
1906		to_intel_digital_connector_state(conn_state);
1907	const struct drm_display_mode *adjusted_mode =
1908		&crtc_state->hw.adjusted_mode;
1909
1910	/*
1911	 * Our YCbCr output is always limited range.
1912	 * crtc_state->limited_color_range only applies to RGB,
1913	 * and it must never be set for YCbCr or we risk setting
1914	 * some conflicting bits in TRANSCONF which will mess up
1915	 * the colors on the monitor.
1916	 */
1917	if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB)
1918		return false;
1919
1920	if (intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_AUTO) {
1921		/*
1922		 * See:
1923		 * CEA-861-E - 5.1 Default Encoding Parameters
1924		 * VESA DisplayPort Ver.1.2a - 5.1.1.1 Video Colorimetry
1925		 */
1926		return crtc_state->pipe_bpp != 18 &&
1927			drm_default_rgb_quant_range(adjusted_mode) ==
1928			HDMI_QUANTIZATION_RANGE_LIMITED;
1929	} else {
1930		return intel_conn_state->broadcast_rgb ==
1931			INTEL_BROADCAST_RGB_LIMITED;
1932	}
1933}
1934
1935static bool intel_dp_port_has_audio(struct drm_i915_private *dev_priv,
1936				    enum port port)
1937{
1938	if (IS_G4X(dev_priv))
1939		return false;
1940	if (DISPLAY_VER(dev_priv) < 12 && port == PORT_A)
1941		return false;
1942
1943	return true;
1944}
1945
1946static void intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state *crtc_state,
1947					     const struct drm_connector_state *conn_state,
1948					     struct drm_dp_vsc_sdp *vsc)
1949{
1950	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1951	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1952
1953	/*
1954	 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118
1955	 * VSC SDP supporting 3D stereo, PSR2, and Pixel Encoding/
1956	 * Colorimetry Format indication.
1957	 */
1958	vsc->revision = 0x5;
1959	vsc->length = 0x13;
1960
1961	/* DP 1.4a spec, Table 2-120 */
1962	switch (crtc_state->output_format) {
1963	case INTEL_OUTPUT_FORMAT_YCBCR444:
1964		vsc->pixelformat = DP_PIXELFORMAT_YUV444;
1965		break;
1966	case INTEL_OUTPUT_FORMAT_YCBCR420:
1967		vsc->pixelformat = DP_PIXELFORMAT_YUV420;
1968		break;
1969	case INTEL_OUTPUT_FORMAT_RGB:
1970	default:
1971		vsc->pixelformat = DP_PIXELFORMAT_RGB;
1972	}
1973
1974	switch (conn_state->colorspace) {
1975	case DRM_MODE_COLORIMETRY_BT709_YCC:
1976		vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
1977		break;
1978	case DRM_MODE_COLORIMETRY_XVYCC_601:
1979		vsc->colorimetry = DP_COLORIMETRY_XVYCC_601;
1980		break;
1981	case DRM_MODE_COLORIMETRY_XVYCC_709:
1982		vsc->colorimetry = DP_COLORIMETRY_XVYCC_709;
1983		break;
1984	case DRM_MODE_COLORIMETRY_SYCC_601:
1985		vsc->colorimetry = DP_COLORIMETRY_SYCC_601;
1986		break;
1987	case DRM_MODE_COLORIMETRY_OPYCC_601:
1988		vsc->colorimetry = DP_COLORIMETRY_OPYCC_601;
1989		break;
1990	case DRM_MODE_COLORIMETRY_BT2020_CYCC:
1991		vsc->colorimetry = DP_COLORIMETRY_BT2020_CYCC;
1992		break;
1993	case DRM_MODE_COLORIMETRY_BT2020_RGB:
1994		vsc->colorimetry = DP_COLORIMETRY_BT2020_RGB;
1995		break;
1996	case DRM_MODE_COLORIMETRY_BT2020_YCC:
1997		vsc->colorimetry = DP_COLORIMETRY_BT2020_YCC;
1998		break;
1999	case DRM_MODE_COLORIMETRY_DCI_P3_RGB_D65:
2000	case DRM_MODE_COLORIMETRY_DCI_P3_RGB_THEATER:
2001		vsc->colorimetry = DP_COLORIMETRY_DCI_P3_RGB;
2002		break;
2003	default:
2004		/*
2005		 * RGB->YCBCR color conversion uses the BT.709
2006		 * color space.
2007		 */
2008		if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
2009			vsc->colorimetry = DP_COLORIMETRY_BT709_YCC;
2010		else
2011			vsc->colorimetry = DP_COLORIMETRY_DEFAULT;
2012		break;
2013	}
2014
2015	vsc->bpc = crtc_state->pipe_bpp / 3;
2016
2017	/* only RGB pixelformat supports 6 bpc */
2018	drm_WARN_ON(&dev_priv->drm,
2019		    vsc->bpc == 6 && vsc->pixelformat != DP_PIXELFORMAT_RGB);
2020
2021	/* all YCbCr are always limited range */
2022	vsc->dynamic_range = DP_DYNAMIC_RANGE_CTA;
2023	vsc->content_type = DP_CONTENT_TYPE_NOT_DEFINED;
2024}
2025
2026static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp,
2027				     struct intel_crtc_state *crtc_state,
2028				     const struct drm_connector_state *conn_state)
2029{
2030	struct drm_dp_vsc_sdp *vsc = &crtc_state->infoframes.vsc;
2031
2032	/* When a crtc state has PSR, VSC SDP will be handled by PSR routine */
2033	if (crtc_state->has_psr)
2034		return;
2035
2036	if (!intel_dp_needs_vsc_sdp(crtc_state, conn_state))
2037		return;
2038
2039	crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
2040	vsc->sdp_type = DP_SDP_VSC;
2041	intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
2042					 &crtc_state->infoframes.vsc);
2043}
2044
2045void intel_dp_compute_psr_vsc_sdp(struct intel_dp *intel_dp,
2046				  const struct intel_crtc_state *crtc_state,
2047				  const struct drm_connector_state *conn_state,
2048				  struct drm_dp_vsc_sdp *vsc)
2049{
2050	vsc->sdp_type = DP_SDP_VSC;
2051
2052	if (crtc_state->has_psr2) {
2053		if (intel_dp->psr.colorimetry_support &&
2054		    intel_dp_needs_vsc_sdp(crtc_state, conn_state)) {
2055			/* [PSR2, +Colorimetry] */
2056			intel_dp_compute_vsc_colorimetry(crtc_state, conn_state,
2057							 vsc);
2058		} else {
2059			/*
2060			 * [PSR2, -Colorimetry]
2061			 * Prepare VSC Header for SU as per eDP 1.4 spec, Table 6-11
2062			 * 3D stereo + PSR/PSR2 + Y-coordinate.
2063			 */
2064			vsc->revision = 0x4;
2065			vsc->length = 0xe;
2066		}
2067	} else {
2068		/*
2069		 * [PSR1]
2070		 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118
2071		 * VSC SDP supporting 3D stereo + PSR (applies to eDP v1.3 or
2072		 * higher).
2073		 */
2074		vsc->revision = 0x2;
2075		vsc->length = 0x8;
2076	}
2077}
2078
2079static void
2080intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp,
2081					    struct intel_crtc_state *crtc_state,
2082					    const struct drm_connector_state *conn_state)
2083{
2084	int ret;
2085	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2086	struct hdmi_drm_infoframe *drm_infoframe = &crtc_state->infoframes.drm.drm;
2087
2088	if (!conn_state->hdr_output_metadata)
2089		return;
2090
2091	ret = drm_hdmi_infoframe_set_hdr_metadata(drm_infoframe, conn_state);
2092
2093	if (ret) {
2094		drm_dbg_kms(&dev_priv->drm, "couldn't set HDR metadata in infoframe\n");
2095		return;
2096	}
2097
2098	crtc_state->infoframes.enable |=
2099		intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA);
2100}
2101
2102static bool cpu_transcoder_has_drrs(struct drm_i915_private *i915,
2103				    enum transcoder cpu_transcoder)
2104{
2105	if (HAS_DOUBLE_BUFFERED_M_N(i915))
2106		return true;
2107
2108	return intel_cpu_transcoder_has_m2_n2(i915, cpu_transcoder);
2109}
2110
2111static bool can_enable_drrs(struct intel_connector *connector,
2112			    const struct intel_crtc_state *pipe_config,
2113			    const struct drm_display_mode *downclock_mode)
2114{
2115	struct drm_i915_private *i915 = to_i915(connector->base.dev);
2116
2117	if (pipe_config->vrr.enable)
2118		return false;
2119
2120	/*
2121	 * DRRS and PSR can't be enable together, so giving preference to PSR
2122	 * as it allows more power-savings by complete shutting down display,
2123	 * so to guarantee this, intel_drrs_compute_config() must be called
2124	 * after intel_psr_compute_config().
2125	 */
2126	if (pipe_config->has_psr)
2127		return false;
2128
2129	/* FIXME missing FDI M2/N2 etc. */
2130	if (pipe_config->has_pch_encoder)
2131		return false;
2132
2133	if (!cpu_transcoder_has_drrs(i915, pipe_config->cpu_transcoder))
2134		return false;
2135
2136	return downclock_mode &&
2137		intel_panel_drrs_type(connector) == DRRS_TYPE_SEAMLESS;
2138}
2139
2140static void
2141intel_dp_drrs_compute_config(struct intel_connector *connector,
2142			     struct intel_crtc_state *pipe_config,
2143			     int output_bpp)
2144{
2145	struct drm_i915_private *i915 = to_i915(connector->base.dev);
2146	const struct drm_display_mode *downclock_mode =
2147		intel_panel_downclock_mode(connector, &pipe_config->hw.adjusted_mode);
2148	int pixel_clock;
2149
2150	if (has_seamless_m_n(connector))
2151		pipe_config->seamless_m_n = true;
2152
2153	if (!can_enable_drrs(connector, pipe_config, downclock_mode)) {
2154		if (intel_cpu_transcoder_has_m2_n2(i915, pipe_config->cpu_transcoder))
2155			intel_zero_m_n(&pipe_config->dp_m2_n2);
2156		return;
2157	}
2158
2159	if (IS_IRONLAKE(i915) || IS_SANDYBRIDGE(i915) || IS_IVYBRIDGE(i915))
2160		pipe_config->msa_timing_delay = connector->panel.vbt.edp.drrs_msa_timing_delay;
2161
2162	pipe_config->has_drrs = true;
2163
2164	pixel_clock = downclock_mode->clock;
2165	if (pipe_config->splitter.enable)
2166		pixel_clock /= pipe_config->splitter.link_count;
2167
2168	intel_link_compute_m_n(output_bpp, pipe_config->lane_count, pixel_clock,
2169			       pipe_config->port_clock, &pipe_config->dp_m2_n2,
2170			       pipe_config->fec_enable);
2171
2172	/* FIXME: abstract this better */
2173	if (pipe_config->splitter.enable)
2174		pipe_config->dp_m2_n2.data_m *= pipe_config->splitter.link_count;
2175}
2176
2177static bool intel_dp_has_audio(struct intel_encoder *encoder,
2178			       const struct drm_connector_state *conn_state)
2179{
2180	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2181	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2182	struct intel_connector *connector = intel_dp->attached_connector;
2183	const struct intel_digital_connector_state *intel_conn_state =
2184		to_intel_digital_connector_state(conn_state);
2185
2186	if (!intel_dp_port_has_audio(i915, encoder->port))
2187		return false;
2188
2189	if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO)
2190		return connector->base.display_info.has_audio;
2191	else
2192		return intel_conn_state->force_audio == HDMI_AUDIO_ON;
2193}
2194
2195static int
2196intel_dp_compute_output_format(struct intel_encoder *encoder,
2197			       struct intel_crtc_state *crtc_state,
2198			       struct drm_connector_state *conn_state,
2199			       bool respect_downstream_limits)
2200{
2201	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2202	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2203	struct intel_connector *connector = intel_dp->attached_connector;
2204	const struct drm_display_info *info = &connector->base.display_info;
2205	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
2206	bool ycbcr_420_only;
2207	int ret;
2208
2209	ycbcr_420_only = drm_mode_is_420_only(info, adjusted_mode);
2210
2211	if (ycbcr_420_only && !connector->base.ycbcr_420_allowed) {
2212		drm_dbg_kms(&i915->drm,
2213			    "YCbCr 4:2:0 mode but YCbCr 4:2:0 output not possible. Falling back to RGB.\n");
2214		crtc_state->sink_format = INTEL_OUTPUT_FORMAT_RGB;
2215	} else {
2216		crtc_state->sink_format = intel_dp_sink_format(connector, adjusted_mode);
2217	}
2218
2219	crtc_state->output_format = intel_dp_output_format(connector, crtc_state->sink_format);
2220
2221	ret = intel_dp_compute_link_config(encoder, crtc_state, conn_state,
2222					   respect_downstream_limits);
2223	if (ret) {
2224		if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
2225		    !connector->base.ycbcr_420_allowed ||
2226		    !drm_mode_is_420_also(info, adjusted_mode))
2227			return ret;
2228
2229		crtc_state->sink_format = INTEL_OUTPUT_FORMAT_YCBCR420;
2230		crtc_state->output_format = intel_dp_output_format(connector,
2231								   crtc_state->sink_format);
2232		ret = intel_dp_compute_link_config(encoder, crtc_state, conn_state,
2233						   respect_downstream_limits);
2234	}
2235
2236	return ret;
2237}
2238
2239static void
2240intel_dp_audio_compute_config(struct intel_encoder *encoder,
2241			      struct intel_crtc_state *pipe_config,
2242			      struct drm_connector_state *conn_state)
2243{
2244	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2245	struct drm_connector *connector = conn_state->connector;
2246
2247	pipe_config->sdp_split_enable =
2248		intel_dp_has_audio(encoder, conn_state) &&
2249		intel_dp_is_uhbr(pipe_config);
2250
2251	drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s] SDP split enable: %s\n",
2252		    connector->base.id, connector->name,
2253		    str_yes_no(pipe_config->sdp_split_enable));
2254}
2255
2256int
2257intel_dp_compute_config(struct intel_encoder *encoder,
2258			struct intel_crtc_state *pipe_config,
2259			struct drm_connector_state *conn_state)
2260{
2261	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2262	struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
2263	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2264	const struct drm_display_mode *fixed_mode;
2265	struct intel_connector *connector = intel_dp->attached_connector;
2266	int ret = 0, output_bpp;
2267
2268	if (HAS_PCH_SPLIT(dev_priv) && !HAS_DDI(dev_priv) && encoder->port != PORT_A)
2269		pipe_config->has_pch_encoder = true;
2270
2271	pipe_config->has_audio =
2272		intel_dp_has_audio(encoder, conn_state) &&
2273		intel_audio_compute_config(encoder, pipe_config, conn_state);
2274
2275	fixed_mode = intel_panel_fixed_mode(connector, adjusted_mode);
2276	if (intel_dp_is_edp(intel_dp) && fixed_mode) {
2277		ret = intel_panel_compute_config(connector, adjusted_mode);
2278		if (ret)
2279			return ret;
2280	}
2281
2282	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
2283		return -EINVAL;
2284
2285	if (!connector->base.interlace_allowed &&
2286	    adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
2287		return -EINVAL;
2288
2289	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
2290		return -EINVAL;
2291
2292	if (intel_dp_hdisplay_bad(dev_priv, adjusted_mode->crtc_hdisplay))
2293		return -EINVAL;
2294
2295	/*
2296	 * Try to respect downstream TMDS clock limits first, if
2297	 * that fails assume the user might know something we don't.
2298	 */
2299	ret = intel_dp_compute_output_format(encoder, pipe_config, conn_state, true);
2300	if (ret)
2301		ret = intel_dp_compute_output_format(encoder, pipe_config, conn_state, false);
2302	if (ret)
2303		return ret;
2304
2305	if ((intel_dp_is_edp(intel_dp) && fixed_mode) ||
2306	    pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) {
2307		ret = intel_panel_fitting(pipe_config, conn_state);
2308		if (ret)
2309			return ret;
2310	}
2311
2312	pipe_config->limited_color_range =
2313		intel_dp_limited_color_range(pipe_config, conn_state);
2314
2315	pipe_config->enhanced_framing =
2316		drm_dp_enhanced_frame_cap(intel_dp->dpcd);
2317
2318	if (pipe_config->dsc.compression_enable)
2319		output_bpp = pipe_config->dsc.compressed_bpp;
2320	else
2321		output_bpp = intel_dp_output_bpp(pipe_config->output_format,
2322						 pipe_config->pipe_bpp);
2323
2324	if (intel_dp->mso_link_count) {
2325		int n = intel_dp->mso_link_count;
2326		int overlap = intel_dp->mso_pixel_overlap;
2327
2328		pipe_config->splitter.enable = true;
2329		pipe_config->splitter.link_count = n;
2330		pipe_config->splitter.pixel_overlap = overlap;
2331
2332		drm_dbg_kms(&dev_priv->drm, "MSO link count %d, pixel overlap %d\n",
2333			    n, overlap);
2334
2335		adjusted_mode->crtc_hdisplay = adjusted_mode->crtc_hdisplay / n + overlap;
2336		adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hblank_start / n + overlap;
2337		adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_hblank_end / n + overlap;
2338		adjusted_mode->crtc_hsync_start = adjusted_mode->crtc_hsync_start / n + overlap;
2339		adjusted_mode->crtc_hsync_end = adjusted_mode->crtc_hsync_end / n + overlap;
2340		adjusted_mode->crtc_htotal = adjusted_mode->crtc_htotal / n + overlap;
2341		adjusted_mode->crtc_clock /= n;
2342	}
2343
2344	intel_dp_audio_compute_config(encoder, pipe_config, conn_state);
2345
2346	intel_link_compute_m_n(output_bpp,
2347			       pipe_config->lane_count,
2348			       adjusted_mode->crtc_clock,
2349			       pipe_config->port_clock,
2350			       &pipe_config->dp_m_n,
2351			       pipe_config->fec_enable);
2352
2353	/* FIXME: abstract this better */
2354	if (pipe_config->splitter.enable)
2355		pipe_config->dp_m_n.data_m *= pipe_config->splitter.link_count;
2356
2357	if (!HAS_DDI(dev_priv))
2358		g4x_dp_set_clock(encoder, pipe_config);
2359
2360	intel_vrr_compute_config(pipe_config, conn_state);
2361	intel_psr_compute_config(intel_dp, pipe_config, conn_state);
2362	intel_dp_drrs_compute_config(connector, pipe_config, output_bpp);
2363	intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state);
2364	intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state);
2365
2366	return 0;
2367}
2368
2369void intel_dp_set_link_params(struct intel_dp *intel_dp,
2370			      int link_rate, int lane_count)
2371{
2372	memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
2373	intel_dp->link_trained = false;
2374	intel_dp->link_rate = link_rate;
2375	intel_dp->lane_count = lane_count;
2376}
2377
2378static void intel_dp_reset_max_link_params(struct intel_dp *intel_dp)
2379{
2380	intel_dp->max_link_lane_count = intel_dp_max_common_lane_count(intel_dp);
2381	intel_dp->max_link_rate = intel_dp_max_common_rate(intel_dp);
2382}
2383
2384/* Enable backlight PWM and backlight PP control. */
2385void intel_edp_backlight_on(const struct intel_crtc_state *crtc_state,
2386			    const struct drm_connector_state *conn_state)
2387{
2388	struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder));
2389	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2390
2391	if (!intel_dp_is_edp(intel_dp))
2392		return;
2393
2394	drm_dbg_kms(&i915->drm, "\n");
2395
2396	intel_backlight_enable(crtc_state, conn_state);
2397	intel_pps_backlight_on(intel_dp);
2398}
2399
2400/* Disable backlight PP control and backlight PWM. */
2401void intel_edp_backlight_off(const struct drm_connector_state *old_conn_state)
2402{
2403	struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder));
2404	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2405
2406	if (!intel_dp_is_edp(intel_dp))
2407		return;
2408
2409	drm_dbg_kms(&i915->drm, "\n");
2410
2411	intel_pps_backlight_off(intel_dp);
2412	intel_backlight_disable(old_conn_state);
2413}
2414
2415static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp)
2416{
2417	/*
2418	 * DPCD 1.2+ should support BRANCH_DEVICE_CTRL, and thus
2419	 * be capable of signalling downstream hpd with a long pulse.
2420	 * Whether or not that means D3 is safe to use is not clear,
2421	 * but let's assume so until proven otherwise.
2422	 *
2423	 * FIXME should really check all downstream ports...
2424	 */
2425	return intel_dp->dpcd[DP_DPCD_REV] == 0x11 &&
2426		drm_dp_is_branch(intel_dp->dpcd) &&
2427		intel_dp->downstream_ports[0] & DP_DS_PORT_HPD;
2428}
2429
2430void intel_dp_sink_set_decompression_state(struct intel_dp *intel_dp,
2431					   const struct intel_crtc_state *crtc_state,
2432					   bool enable)
2433{
2434	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2435	int ret;
2436
2437	if (!crtc_state->dsc.compression_enable)
2438		return;
2439
2440	ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_DSC_ENABLE,
2441				 enable ? DP_DECOMPRESSION_EN : 0);
2442	if (ret < 0)
2443		drm_dbg_kms(&i915->drm,
2444			    "Failed to %s sink decompression state\n",
2445			    str_enable_disable(enable));
2446}
2447
2448static void
2449intel_edp_init_source_oui(struct intel_dp *intel_dp, bool careful)
2450{
2451	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2452	u8 oui[] = { 0x00, 0xaa, 0x01 };
2453	u8 buf[3] = { 0 };
2454
2455	/*
2456	 * During driver init, we want to be careful and avoid changing the source OUI if it's
2457	 * already set to what we want, so as to avoid clearing any state by accident
2458	 */
2459	if (careful) {
2460		if (drm_dp_dpcd_read(&intel_dp->aux, DP_SOURCE_OUI, buf, sizeof(buf)) < 0)
2461			drm_err(&i915->drm, "Failed to read source OUI\n");
2462
2463		if (memcmp(oui, buf, sizeof(oui)) == 0)
2464			return;
2465	}
2466
2467	if (drm_dp_dpcd_write(&intel_dp->aux, DP_SOURCE_OUI, oui, sizeof(oui)) < 0)
2468		drm_err(&i915->drm, "Failed to write source OUI\n");
2469
2470	intel_dp->last_oui_write = jiffies;
2471}
2472
2473void intel_dp_wait_source_oui(struct intel_dp *intel_dp)
2474{
2475	struct intel_connector *connector = intel_dp->attached_connector;
2476	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2477
2478	drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s] Performing OUI wait (%u ms)\n",
2479		    connector->base.base.id, connector->base.name,
2480		    connector->panel.vbt.backlight.hdr_dpcd_refresh_timeout);
2481
2482	wait_remaining_ms_from_jiffies(intel_dp->last_oui_write,
2483				       connector->panel.vbt.backlight.hdr_dpcd_refresh_timeout);
2484}
2485
2486/* If the device supports it, try to set the power state appropriately */
2487void intel_dp_set_power(struct intel_dp *intel_dp, u8 mode)
2488{
2489	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2490	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2491	int ret, i;
2492
2493	/* Should have a valid DPCD by this point */
2494	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
2495		return;
2496
2497	if (mode != DP_SET_POWER_D0) {
2498		if (downstream_hpd_needs_d0(intel_dp))
2499			return;
2500
2501		ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
2502	} else {
2503		struct intel_lspcon *lspcon = dp_to_lspcon(intel_dp);
2504
2505		lspcon_resume(dp_to_dig_port(intel_dp));
2506
2507		/* Write the source OUI as early as possible */
2508		if (intel_dp_is_edp(intel_dp))
2509			intel_edp_init_source_oui(intel_dp, false);
2510
2511		/*
2512		 * When turning on, we need to retry for 1ms to give the sink
2513		 * time to wake up.
2514		 */
2515		for (i = 0; i < 3; i++) {
2516			ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode);
2517			if (ret == 1)
2518				break;
2519			msleep(1);
2520		}
2521
2522		if (ret == 1 && lspcon->active)
2523			lspcon_wait_pcon_mode(lspcon);
2524	}
2525
2526	if (ret != 1)
2527		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Set power to %s failed\n",
2528			    encoder->base.base.id, encoder->base.name,
2529			    mode == DP_SET_POWER_D0 ? "D0" : "D3");
2530}
2531
2532static bool
2533intel_dp_get_dpcd(struct intel_dp *intel_dp);
2534
2535/**
2536 * intel_dp_sync_state - sync the encoder state during init/resume
2537 * @encoder: intel encoder to sync
2538 * @crtc_state: state for the CRTC connected to the encoder
2539 *
2540 * Sync any state stored in the encoder wrt. HW state during driver init
2541 * and system resume.
2542 */
2543void intel_dp_sync_state(struct intel_encoder *encoder,
2544			 const struct intel_crtc_state *crtc_state)
2545{
2546	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2547
2548	if (!crtc_state)
2549		return;
2550
2551	/*
2552	 * Don't clobber DPCD if it's been already read out during output
2553	 * setup (eDP) or detect.
2554	 */
2555	if (intel_dp->dpcd[DP_DPCD_REV] == 0)
2556		intel_dp_get_dpcd(intel_dp);
2557
2558	intel_dp_reset_max_link_params(intel_dp);
2559}
2560
2561bool intel_dp_initial_fastset_check(struct intel_encoder *encoder,
2562				    struct intel_crtc_state *crtc_state)
2563{
2564	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2565	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2566	bool fastset = true;
2567
2568	/*
2569	 * If BIOS has set an unsupported or non-standard link rate for some
2570	 * reason force an encoder recompute and full modeset.
2571	 */
2572	if (intel_dp_rate_index(intel_dp->source_rates, intel_dp->num_source_rates,
2573				crtc_state->port_clock) < 0) {
2574		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Forcing full modeset due to unsupported link rate\n",
2575			    encoder->base.base.id, encoder->base.name);
2576		crtc_state->uapi.connectors_changed = true;
2577		fastset = false;
2578	}
2579
2580	/*
2581	 * FIXME hack to force full modeset when DSC is being used.
2582	 *
2583	 * As long as we do not have full state readout and config comparison
2584	 * of crtc_state->dsc, we have no way to ensure reliable fastset.
2585	 * Remove once we have readout for DSC.
2586	 */
2587	if (crtc_state->dsc.compression_enable) {
2588		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Forcing full modeset due to DSC being enabled\n",
2589			    encoder->base.base.id, encoder->base.name);
2590		crtc_state->uapi.mode_changed = true;
2591		fastset = false;
2592	}
2593
2594	if (CAN_PSR(intel_dp)) {
2595		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] Forcing full modeset to compute PSR state\n",
2596			    encoder->base.base.id, encoder->base.name);
2597		crtc_state->uapi.mode_changed = true;
2598		fastset = false;
2599	}
2600
2601	return fastset;
2602}
2603
2604static void intel_dp_get_pcon_dsc_cap(struct intel_dp *intel_dp)
2605{
2606	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2607
2608	/* Clear the cached register set to avoid using stale values */
2609
2610	memset(intel_dp->pcon_dsc_dpcd, 0, sizeof(intel_dp->pcon_dsc_dpcd));
2611
2612	if (drm_dp_dpcd_read(&intel_dp->aux, DP_PCON_DSC_ENCODER,
2613			     intel_dp->pcon_dsc_dpcd,
2614			     sizeof(intel_dp->pcon_dsc_dpcd)) < 0)
2615		drm_err(&i915->drm, "Failed to read DPCD register 0x%x\n",
2616			DP_PCON_DSC_ENCODER);
2617
2618	drm_dbg_kms(&i915->drm, "PCON ENCODER DSC DPCD: %*ph\n",
2619		    (int)sizeof(intel_dp->pcon_dsc_dpcd), intel_dp->pcon_dsc_dpcd);
2620}
2621
2622static int intel_dp_pcon_get_frl_mask(u8 frl_bw_mask)
2623{
2624	int bw_gbps[] = {9, 18, 24, 32, 40, 48};
2625	int i;
2626
2627	for (i = ARRAY_SIZE(bw_gbps) - 1; i >= 0; i--) {
2628		if (frl_bw_mask & (1 << i))
2629			return bw_gbps[i];
2630	}
2631	return 0;
2632}
2633
2634static int intel_dp_pcon_set_frl_mask(int max_frl)
2635{
2636	switch (max_frl) {
2637	case 48:
2638		return DP_PCON_FRL_BW_MASK_48GBPS;
2639	case 40:
2640		return DP_PCON_FRL_BW_MASK_40GBPS;
2641	case 32:
2642		return DP_PCON_FRL_BW_MASK_32GBPS;
2643	case 24:
2644		return DP_PCON_FRL_BW_MASK_24GBPS;
2645	case 18:
2646		return DP_PCON_FRL_BW_MASK_18GBPS;
2647	case 9:
2648		return DP_PCON_FRL_BW_MASK_9GBPS;
2649	}
2650
2651	return 0;
2652}
2653
2654static int intel_dp_hdmi_sink_max_frl(struct intel_dp *intel_dp)
2655{
2656	struct intel_connector *intel_connector = intel_dp->attached_connector;
2657	struct drm_connector *connector = &intel_connector->base;
2658	int max_frl_rate;
2659	int max_lanes, rate_per_lane;
2660	int max_dsc_lanes, dsc_rate_per_lane;
2661
2662	max_lanes = connector->display_info.hdmi.max_lanes;
2663	rate_per_lane = connector->display_info.hdmi.max_frl_rate_per_lane;
2664	max_frl_rate = max_lanes * rate_per_lane;
2665
2666	if (connector->display_info.hdmi.dsc_cap.v_1p2) {
2667		max_dsc_lanes = connector->display_info.hdmi.dsc_cap.max_lanes;
2668		dsc_rate_per_lane = connector->display_info.hdmi.dsc_cap.max_frl_rate_per_lane;
2669		if (max_dsc_lanes && dsc_rate_per_lane)
2670			max_frl_rate = min(max_frl_rate, max_dsc_lanes * dsc_rate_per_lane);
2671	}
2672
2673	return max_frl_rate;
2674}
2675
2676static bool
2677intel_dp_pcon_is_frl_trained(struct intel_dp *intel_dp,
2678			     u8 max_frl_bw_mask, u8 *frl_trained_mask)
2679{
2680	if (drm_dp_pcon_hdmi_link_active(&intel_dp->aux) &&
2681	    drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, frl_trained_mask) == DP_PCON_HDMI_MODE_FRL &&
2682	    *frl_trained_mask >= max_frl_bw_mask)
2683		return true;
2684
2685	return false;
2686}
2687
2688static int intel_dp_pcon_start_frl_training(struct intel_dp *intel_dp)
2689{
2690#define TIMEOUT_FRL_READY_MS 500
2691#define TIMEOUT_HDMI_LINK_ACTIVE_MS 1000
2692
2693	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2694	int max_frl_bw, max_pcon_frl_bw, max_edid_frl_bw, ret;
2695	u8 max_frl_bw_mask = 0, frl_trained_mask;
2696	bool is_active;
2697
2698	max_pcon_frl_bw = intel_dp->dfp.pcon_max_frl_bw;
2699	drm_dbg(&i915->drm, "PCON max rate = %d Gbps\n", max_pcon_frl_bw);
2700
2701	max_edid_frl_bw = intel_dp_hdmi_sink_max_frl(intel_dp);
2702	drm_dbg(&i915->drm, "Sink max rate from EDID = %d Gbps\n", max_edid_frl_bw);
2703
2704	max_frl_bw = min(max_edid_frl_bw, max_pcon_frl_bw);
2705
2706	if (max_frl_bw <= 0)
2707		return -EINVAL;
2708
2709	max_frl_bw_mask = intel_dp_pcon_set_frl_mask(max_frl_bw);
2710	drm_dbg(&i915->drm, "MAX_FRL_BW_MASK = %u\n", max_frl_bw_mask);
2711
2712	if (intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask))
2713		goto frl_trained;
2714
2715	ret = drm_dp_pcon_frl_prepare(&intel_dp->aux, false);
2716	if (ret < 0)
2717		return ret;
2718	/* Wait for PCON to be FRL Ready */
2719	wait_for(is_active = drm_dp_pcon_is_frl_ready(&intel_dp->aux) == true, TIMEOUT_FRL_READY_MS);
2720
2721	if (!is_active)
2722		return -ETIMEDOUT;
2723
2724	ret = drm_dp_pcon_frl_configure_1(&intel_dp->aux, max_frl_bw,
2725					  DP_PCON_ENABLE_SEQUENTIAL_LINK);
2726	if (ret < 0)
2727		return ret;
2728	ret = drm_dp_pcon_frl_configure_2(&intel_dp->aux, max_frl_bw_mask,
2729					  DP_PCON_FRL_LINK_TRAIN_NORMAL);
2730	if (ret < 0)
2731		return ret;
2732	ret = drm_dp_pcon_frl_enable(&intel_dp->aux);
2733	if (ret < 0)
2734		return ret;
2735	/*
2736	 * Wait for FRL to be completed
2737	 * Check if the HDMI Link is up and active.
2738	 */
2739	wait_for(is_active =
2740		 intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask),
2741		 TIMEOUT_HDMI_LINK_ACTIVE_MS);
2742
2743	if (!is_active)
2744		return -ETIMEDOUT;
2745
2746frl_trained:
2747	drm_dbg(&i915->drm, "FRL_TRAINED_MASK = %u\n", frl_trained_mask);
2748	intel_dp->frl.trained_rate_gbps = intel_dp_pcon_get_frl_mask(frl_trained_mask);
2749	intel_dp->frl.is_trained = true;
2750	drm_dbg(&i915->drm, "FRL trained with : %d Gbps\n", intel_dp->frl.trained_rate_gbps);
2751
2752	return 0;
2753}
2754
2755static bool intel_dp_is_hdmi_2_1_sink(struct intel_dp *intel_dp)
2756{
2757	if (drm_dp_is_branch(intel_dp->dpcd) &&
2758	    intel_dp_has_hdmi_sink(intel_dp) &&
2759	    intel_dp_hdmi_sink_max_frl(intel_dp) > 0)
2760		return true;
2761
2762	return false;
2763}
2764
2765static
2766int intel_dp_pcon_set_tmds_mode(struct intel_dp *intel_dp)
2767{
2768	int ret;
2769	u8 buf = 0;
2770
2771	/* Set PCON source control mode */
2772	buf |= DP_PCON_ENABLE_SOURCE_CTL_MODE;
2773
2774	ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf);
2775	if (ret < 0)
2776		return ret;
2777
2778	/* Set HDMI LINK ENABLE */
2779	buf |= DP_PCON_ENABLE_HDMI_LINK;
2780	ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf);
2781	if (ret < 0)
2782		return ret;
2783
2784	return 0;
2785}
2786
2787void intel_dp_check_frl_training(struct intel_dp *intel_dp)
2788{
2789	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
2790
2791	/*
2792	 * Always go for FRL training if:
2793	 * -PCON supports SRC_CTL_MODE (VESA DP2.0-HDMI2.1 PCON Spec Draft-1 Sec-7)
2794	 * -sink is HDMI2.1
2795	 */
2796	if (!(intel_dp->downstream_ports[2] & DP_PCON_SOURCE_CTL_MODE) ||
2797	    !intel_dp_is_hdmi_2_1_sink(intel_dp) ||
2798	    intel_dp->frl.is_trained)
2799		return;
2800
2801	if (intel_dp_pcon_start_frl_training(intel_dp) < 0) {
2802		int ret, mode;
2803
2804		drm_dbg(&dev_priv->drm, "Couldn't set FRL mode, continuing with TMDS mode\n");
2805		ret = intel_dp_pcon_set_tmds_mode(intel_dp);
2806		mode = drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, NULL);
2807
2808		if (ret < 0 || mode != DP_PCON_HDMI_MODE_TMDS)
2809			drm_dbg(&dev_priv->drm, "Issue with PCON, cannot set TMDS mode\n");
2810	} else {
2811		drm_dbg(&dev_priv->drm, "FRL training Completed\n");
2812	}
2813}
2814
2815static int
2816intel_dp_pcon_dsc_enc_slice_height(const struct intel_crtc_state *crtc_state)
2817{
2818	int vactive = crtc_state->hw.adjusted_mode.vdisplay;
2819
2820	return intel_hdmi_dsc_get_slice_height(vactive);
2821}
2822
2823static int
2824intel_dp_pcon_dsc_enc_slices(struct intel_dp *intel_dp,
2825			     const struct intel_crtc_state *crtc_state)
2826{
2827	struct intel_connector *intel_connector = intel_dp->attached_connector;
2828	struct drm_connector *connector = &intel_connector->base;
2829	int hdmi_throughput = connector->display_info.hdmi.dsc_cap.clk_per_slice;
2830	int hdmi_max_slices = connector->display_info.hdmi.dsc_cap.max_slices;
2831	int pcon_max_slices = drm_dp_pcon_dsc_max_slices(intel_dp->pcon_dsc_dpcd);
2832	int pcon_max_slice_width = drm_dp_pcon_dsc_max_slice_width(intel_dp->pcon_dsc_dpcd);
2833
2834	return intel_hdmi_dsc_get_num_slices(crtc_state, pcon_max_slices,
2835					     pcon_max_slice_width,
2836					     hdmi_max_slices, hdmi_throughput);
2837}
2838
2839static int
2840intel_dp_pcon_dsc_enc_bpp(struct intel_dp *intel_dp,
2841			  const struct intel_crtc_state *crtc_state,
2842			  int num_slices, int slice_width)
2843{
2844	struct intel_connector *intel_connector = intel_dp->attached_connector;
2845	struct drm_connector *connector = &intel_connector->base;
2846	int output_format = crtc_state->output_format;
2847	bool hdmi_all_bpp = connector->display_info.hdmi.dsc_cap.all_bpp;
2848	int pcon_fractional_bpp = drm_dp_pcon_dsc_bpp_incr(intel_dp->pcon_dsc_dpcd);
2849	int hdmi_max_chunk_bytes =
2850		connector->display_info.hdmi.dsc_cap.total_chunk_kbytes * 1024;
2851
2852	return intel_hdmi_dsc_get_bpp(pcon_fractional_bpp, slice_width,
2853				      num_slices, output_format, hdmi_all_bpp,
2854				      hdmi_max_chunk_bytes);
2855}
2856
2857void
2858intel_dp_pcon_dsc_configure(struct intel_dp *intel_dp,
2859			    const struct intel_crtc_state *crtc_state)
2860{
2861	u8 pps_param[6];
2862	int slice_height;
2863	int slice_width;
2864	int num_slices;
2865	int bits_per_pixel;
2866	int ret;
2867	struct intel_connector *intel_connector = intel_dp->attached_connector;
2868	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2869	struct drm_connector *connector;
2870	bool hdmi_is_dsc_1_2;
2871
2872	if (!intel_dp_is_hdmi_2_1_sink(intel_dp))
2873		return;
2874
2875	if (!intel_connector)
2876		return;
2877	connector = &intel_connector->base;
2878	hdmi_is_dsc_1_2 = connector->display_info.hdmi.dsc_cap.v_1p2;
2879
2880	if (!drm_dp_pcon_enc_is_dsc_1_2(intel_dp->pcon_dsc_dpcd) ||
2881	    !hdmi_is_dsc_1_2)
2882		return;
2883
2884	slice_height = intel_dp_pcon_dsc_enc_slice_height(crtc_state);
2885	if (!slice_height)
2886		return;
2887
2888	num_slices = intel_dp_pcon_dsc_enc_slices(intel_dp, crtc_state);
2889	if (!num_slices)
2890		return;
2891
2892	slice_width = DIV_ROUND_UP(crtc_state->hw.adjusted_mode.hdisplay,
2893				   num_slices);
2894
2895	bits_per_pixel = intel_dp_pcon_dsc_enc_bpp(intel_dp, crtc_state,
2896						   num_slices, slice_width);
2897	if (!bits_per_pixel)
2898		return;
2899
2900	pps_param[0] = slice_height & 0xFF;
2901	pps_param[1] = slice_height >> 8;
2902	pps_param[2] = slice_width & 0xFF;
2903	pps_param[3] = slice_width >> 8;
2904	pps_param[4] = bits_per_pixel & 0xFF;
2905	pps_param[5] = (bits_per_pixel >> 8) & 0x3;
2906
2907	ret = drm_dp_pcon_pps_override_param(&intel_dp->aux, pps_param);
2908	if (ret < 0)
2909		drm_dbg_kms(&i915->drm, "Failed to set pcon DSC\n");
2910}
2911
2912void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp,
2913					   const struct intel_crtc_state *crtc_state)
2914{
2915	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2916	bool ycbcr444_to_420 = false;
2917	bool rgb_to_ycbcr = false;
2918	u8 tmp;
2919
2920	if (intel_dp->dpcd[DP_DPCD_REV] < 0x13)
2921		return;
2922
2923	if (!drm_dp_is_branch(intel_dp->dpcd))
2924		return;
2925
2926	tmp = intel_dp_has_hdmi_sink(intel_dp) ? DP_HDMI_DVI_OUTPUT_CONFIG : 0;
2927
2928	if (drm_dp_dpcd_writeb(&intel_dp->aux,
2929			       DP_PROTOCOL_CONVERTER_CONTROL_0, tmp) != 1)
2930		drm_dbg_kms(&i915->drm, "Failed to %s protocol converter HDMI mode\n",
2931			    str_enable_disable(intel_dp_has_hdmi_sink(intel_dp)));
2932
2933	if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) {
2934		switch (crtc_state->output_format) {
2935		case INTEL_OUTPUT_FORMAT_YCBCR420:
2936			break;
2937		case INTEL_OUTPUT_FORMAT_YCBCR444:
2938			ycbcr444_to_420 = true;
2939			break;
2940		case INTEL_OUTPUT_FORMAT_RGB:
2941			rgb_to_ycbcr = true;
2942			ycbcr444_to_420 = true;
2943			break;
2944		default:
2945			MISSING_CASE(crtc_state->output_format);
2946			break;
2947		}
2948	} else if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR444) {
2949		switch (crtc_state->output_format) {
2950		case INTEL_OUTPUT_FORMAT_YCBCR444:
2951			break;
2952		case INTEL_OUTPUT_FORMAT_RGB:
2953			rgb_to_ycbcr = true;
2954			break;
2955		default:
2956			MISSING_CASE(crtc_state->output_format);
2957			break;
2958		}
2959	}
2960
2961	tmp = ycbcr444_to_420 ? DP_CONVERSION_TO_YCBCR420_ENABLE : 0;
2962
2963	if (drm_dp_dpcd_writeb(&intel_dp->aux,
2964			       DP_PROTOCOL_CONVERTER_CONTROL_1, tmp) != 1)
2965		drm_dbg_kms(&i915->drm,
2966			    "Failed to %s protocol converter YCbCr 4:2:0 conversion mode\n",
2967			    str_enable_disable(intel_dp->dfp.ycbcr_444_to_420));
2968
2969	tmp = rgb_to_ycbcr ? DP_CONVERSION_BT709_RGB_YCBCR_ENABLE : 0;
2970
2971	if (drm_dp_pcon_convert_rgb_to_ycbcr(&intel_dp->aux, tmp) < 0)
2972		drm_dbg_kms(&i915->drm,
2973			    "Failed to %s protocol converter RGB->YCbCr conversion mode\n",
2974			    str_enable_disable(tmp));
2975}
2976
2977bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp)
2978{
2979	u8 dprx = 0;
2980
2981	if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST,
2982			      &dprx) != 1)
2983		return false;
2984	return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED;
2985}
2986
2987static void intel_dp_get_dsc_sink_cap(struct intel_dp *intel_dp)
2988{
2989	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2990
2991	/*
2992	 * Clear the cached register set to avoid using stale values
2993	 * for the sinks that do not support DSC.
2994	 */
2995	memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
2996
2997	/* Clear fec_capable to avoid using stale values */
2998	intel_dp->fec_capable = 0;
2999
3000	/* Cache the DSC DPCD if eDP or DP rev >= 1.4 */
3001	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x14 ||
3002	    intel_dp->edp_dpcd[0] >= DP_EDP_14) {
3003		if (drm_dp_dpcd_read(&intel_dp->aux, DP_DSC_SUPPORT,
3004				     intel_dp->dsc_dpcd,
3005				     sizeof(intel_dp->dsc_dpcd)) < 0)
3006			drm_err(&i915->drm,
3007				"Failed to read DPCD register 0x%x\n",
3008				DP_DSC_SUPPORT);
3009
3010		drm_dbg_kms(&i915->drm, "DSC DPCD: %*ph\n",
3011			    (int)sizeof(intel_dp->dsc_dpcd),
3012			    intel_dp->dsc_dpcd);
3013
3014		/* FEC is supported only on DP 1.4 */
3015		if (!intel_dp_is_edp(intel_dp) &&
3016		    drm_dp_dpcd_readb(&intel_dp->aux, DP_FEC_CAPABILITY,
3017				      &intel_dp->fec_capable) < 0)
3018			drm_err(&i915->drm,
3019				"Failed to read FEC DPCD register\n");
3020
3021		drm_dbg_kms(&i915->drm, "FEC CAPABILITY: %x\n",
3022			    intel_dp->fec_capable);
3023	}
3024}
3025
3026static void intel_edp_mso_mode_fixup(struct intel_connector *connector,
3027				     struct drm_display_mode *mode)
3028{
3029	struct intel_dp *intel_dp = intel_attached_dp(connector);
3030	struct drm_i915_private *i915 = to_i915(connector->base.dev);
3031	int n = intel_dp->mso_link_count;
3032	int overlap = intel_dp->mso_pixel_overlap;
3033
3034	if (!mode || !n)
3035		return;
3036
3037	mode->hdisplay = (mode->hdisplay - overlap) * n;
3038	mode->hsync_start = (mode->hsync_start - overlap) * n;
3039	mode->hsync_end = (mode->hsync_end - overlap) * n;
3040	mode->htotal = (mode->htotal - overlap) * n;
3041	mode->clock *= n;
3042
3043	drm_mode_set_name(mode);
3044
3045	drm_dbg_kms(&i915->drm,
3046		    "[CONNECTOR:%d:%s] using generated MSO mode: " DRM_MODE_FMT "\n",
3047		    connector->base.base.id, connector->base.name,
3048		    DRM_MODE_ARG(mode));
3049}
3050
3051void intel_edp_fixup_vbt_bpp(struct intel_encoder *encoder, int pipe_bpp)
3052{
3053	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3054	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3055	struct intel_connector *connector = intel_dp->attached_connector;
3056
3057	if (connector->panel.vbt.edp.bpp && pipe_bpp > connector->panel.vbt.edp.bpp) {
3058		/*
3059		 * This is a big fat ugly hack.
3060		 *
3061		 * Some machines in UEFI boot mode provide us a VBT that has 18
3062		 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons
3063		 * unknown we fail to light up. Yet the same BIOS boots up with
3064		 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as
3065		 * max, not what it tells us to use.
3066		 *
3067		 * Note: This will still be broken if the eDP panel is not lit
3068		 * up by the BIOS, and thus we can't get the mode at module
3069		 * load.
3070		 */
3071		drm_dbg_kms(&dev_priv->drm,
3072			    "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
3073			    pipe_bpp, connector->panel.vbt.edp.bpp);
3074		connector->panel.vbt.edp.bpp = pipe_bpp;
3075	}
3076}
3077
3078static void intel_edp_mso_init(struct intel_dp *intel_dp)
3079{
3080	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3081	struct intel_connector *connector = intel_dp->attached_connector;
3082	struct drm_display_info *info = &connector->base.display_info;
3083	u8 mso;
3084
3085	if (intel_dp->edp_dpcd[0] < DP_EDP_14)
3086		return;
3087
3088	if (drm_dp_dpcd_readb(&intel_dp->aux, DP_EDP_MSO_LINK_CAPABILITIES, &mso) != 1) {
3089		drm_err(&i915->drm, "Failed to read MSO cap\n");
3090		return;
3091	}
3092
3093	/* Valid configurations are SST or MSO 2x1, 2x2, 4x1 */
3094	mso &= DP_EDP_MSO_NUMBER_OF_LINKS_MASK;
3095	if (mso % 2 || mso > drm_dp_max_lane_count(intel_dp->dpcd)) {
3096		drm_err(&i915->drm, "Invalid MSO link count cap %u\n", mso);
3097		mso = 0;
3098	}
3099
3100	if (mso) {
3101		drm_dbg_kms(&i915->drm, "Sink MSO %ux%u configuration, pixel overlap %u\n",
3102			    mso, drm_dp_max_lane_count(intel_dp->dpcd) / mso,
3103			    info->mso_pixel_overlap);
3104		if (!HAS_MSO(i915)) {
3105			drm_err(&i915->drm, "No source MSO support, disabling\n");
3106			mso = 0;
3107		}
3108	}
3109
3110	intel_dp->mso_link_count = mso;
3111	intel_dp->mso_pixel_overlap = mso ? info->mso_pixel_overlap : 0;
3112}
3113
3114static bool
3115intel_edp_init_dpcd(struct intel_dp *intel_dp)
3116{
3117	struct drm_i915_private *dev_priv =
3118		to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
3119
3120	/* this function is meant to be called only once */
3121	drm_WARN_ON(&dev_priv->drm, intel_dp->dpcd[DP_DPCD_REV] != 0);
3122
3123	if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0)
3124		return false;
3125
3126	drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
3127			 drm_dp_is_branch(intel_dp->dpcd));
3128
3129	/*
3130	 * Read the eDP display control registers.
3131	 *
3132	 * Do this independent of DP_DPCD_DISPLAY_CONTROL_CAPABLE bit in
3133	 * DP_EDP_CONFIGURATION_CAP, because some buggy displays do not have it
3134	 * set, but require eDP 1.4+ detection (e.g. for supported link rates
3135	 * method). The display control registers should read zero if they're
3136	 * not supported anyway.
3137	 */
3138	if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV,
3139			     intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) ==
3140			     sizeof(intel_dp->edp_dpcd)) {
3141		drm_dbg_kms(&dev_priv->drm, "eDP DPCD: %*ph\n",
3142			    (int)sizeof(intel_dp->edp_dpcd),
3143			    intel_dp->edp_dpcd);
3144
3145		intel_dp->use_max_params = intel_dp->edp_dpcd[0] < DP_EDP_14;
3146	}
3147
3148	/*
3149	 * This has to be called after intel_dp->edp_dpcd is filled, PSR checks
3150	 * for SET_POWER_CAPABLE bit in intel_dp->edp_dpcd[1]
3151	 */
3152	intel_psr_init_dpcd(intel_dp);
3153
3154	/* Clear the default sink rates */
3155	intel_dp->num_sink_rates = 0;
3156
3157	/* Read the eDP 1.4+ supported link rates. */
3158	if (intel_dp->edp_dpcd[0] >= DP_EDP_14) {
3159		__le16 sink_rates[DP_MAX_SUPPORTED_RATES];
3160		int i;
3161
3162		drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES,
3163				sink_rates, sizeof(sink_rates));
3164
3165		for (i = 0; i < ARRAY_SIZE(sink_rates); i++) {
3166			int val = le16_to_cpu(sink_rates[i]);
3167
3168			if (val == 0)
3169				break;
3170
3171			/* Value read multiplied by 200kHz gives the per-lane
3172			 * link rate in kHz. The source rates are, however,
3173			 * stored in terms of LS_Clk kHz. The full conversion
3174			 * back to symbols is
3175			 * (val * 200kHz)*(8/10 ch. encoding)*(1/8 bit to Byte)
3176			 */
3177			intel_dp->sink_rates[i] = (val * 200) / 10;
3178		}
3179		intel_dp->num_sink_rates = i;
3180	}
3181
3182	/*
3183	 * Use DP_LINK_RATE_SET if DP_SUPPORTED_LINK_RATES are available,
3184	 * default to DP_MAX_LINK_RATE and DP_LINK_BW_SET otherwise.
3185	 */
3186	if (intel_dp->num_sink_rates)
3187		intel_dp->use_rate_select = true;
3188	else
3189		intel_dp_set_sink_rates(intel_dp);
3190	intel_dp_set_max_sink_lane_count(intel_dp);
3191
3192	/* Read the eDP DSC DPCD registers */
3193	if (HAS_DSC(dev_priv))
3194		intel_dp_get_dsc_sink_cap(intel_dp);
3195
3196	/*
3197	 * If needed, program our source OUI so we can make various Intel-specific AUX services
3198	 * available (such as HDR backlight controls)
3199	 */
3200	intel_edp_init_source_oui(intel_dp, true);
3201
3202	return true;
3203}
3204
3205static bool
3206intel_dp_has_sink_count(struct intel_dp *intel_dp)
3207{
3208	if (!intel_dp->attached_connector)
3209		return false;
3210
3211	return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base,
3212					  intel_dp->dpcd,
3213					  &intel_dp->desc);
3214}
3215
3216static bool
3217intel_dp_get_dpcd(struct intel_dp *intel_dp)
3218{
3219	int ret;
3220
3221	if (intel_dp_init_lttpr_and_dprx_caps(intel_dp) < 0)
3222		return false;
3223
3224	/*
3225	 * Don't clobber cached eDP rates. Also skip re-reading
3226	 * the OUI/ID since we know it won't change.
3227	 */
3228	if (!intel_dp_is_edp(intel_dp)) {
3229		drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc,
3230				 drm_dp_is_branch(intel_dp->dpcd));
3231
3232		intel_dp_set_sink_rates(intel_dp);
3233		intel_dp_set_max_sink_lane_count(intel_dp);
3234		intel_dp_set_common_rates(intel_dp);
3235	}
3236
3237	if (intel_dp_has_sink_count(intel_dp)) {
3238		ret = drm_dp_read_sink_count(&intel_dp->aux);
3239		if (ret < 0)
3240			return false;
3241
3242		/*
3243		 * Sink count can change between short pulse hpd hence
3244		 * a member variable in intel_dp will track any changes
3245		 * between short pulse interrupts.
3246		 */
3247		intel_dp->sink_count = ret;
3248
3249		/*
3250		 * SINK_COUNT == 0 and DOWNSTREAM_PORT_PRESENT == 1 implies that
3251		 * a dongle is present but no display. Unless we require to know
3252		 * if a dongle is present or not, we don't need to update
3253		 * downstream port information. So, an early return here saves
3254		 * time from performing other operations which are not required.
3255		 */
3256		if (!intel_dp->sink_count)
3257			return false;
3258	}
3259
3260	return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd,
3261					   intel_dp->downstream_ports) == 0;
3262}
3263
3264static bool
3265intel_dp_can_mst(struct intel_dp *intel_dp)
3266{
3267	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3268
3269	return i915->params.enable_dp_mst &&
3270		intel_dp_mst_source_support(intel_dp) &&
3271		drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
3272}
3273
3274static void
3275intel_dp_configure_mst(struct intel_dp *intel_dp)
3276{
3277	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3278	struct intel_encoder *encoder =
3279		&dp_to_dig_port(intel_dp)->base;
3280	bool sink_can_mst = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd);
3281
3282	drm_dbg_kms(&i915->drm,
3283		    "[ENCODER:%d:%s] MST support: port: %s, sink: %s, modparam: %s\n",
3284		    encoder->base.base.id, encoder->base.name,
3285		    str_yes_no(intel_dp_mst_source_support(intel_dp)),
3286		    str_yes_no(sink_can_mst),
3287		    str_yes_no(i915->params.enable_dp_mst));
3288
3289	if (!intel_dp_mst_source_support(intel_dp))
3290		return;
3291
3292	intel_dp->is_mst = sink_can_mst &&
3293		i915->params.enable_dp_mst;
3294
3295	drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
3296					intel_dp->is_mst);
3297}
3298
3299static bool
3300intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *esi)
3301{
3302	return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI, esi, 4) == 4;
3303}
3304
3305static bool intel_dp_ack_sink_irq_esi(struct intel_dp *intel_dp, u8 esi[4])
3306{
3307	int retry;
3308
3309	for (retry = 0; retry < 3; retry++) {
3310		if (drm_dp_dpcd_write(&intel_dp->aux, DP_SINK_COUNT_ESI + 1,
3311				      &esi[1], 3) == 3)
3312			return true;
3313	}
3314
3315	return false;
3316}
3317
3318bool
3319intel_dp_needs_vsc_sdp(const struct intel_crtc_state *crtc_state,
3320		       const struct drm_connector_state *conn_state)
3321{
3322	/*
3323	 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication
3324	 * of Color Encoding Format and Content Color Gamut], in order to
3325	 * sending YCBCR 420 or HDR BT.2020 signals we should use DP VSC SDP.
3326	 */
3327	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
3328		return true;
3329
3330	switch (conn_state->colorspace) {
3331	case DRM_MODE_COLORIMETRY_SYCC_601:
3332	case DRM_MODE_COLORIMETRY_OPYCC_601:
3333	case DRM_MODE_COLORIMETRY_BT2020_YCC:
3334	case DRM_MODE_COLORIMETRY_BT2020_RGB:
3335	case DRM_MODE_COLORIMETRY_BT2020_CYCC:
3336		return true;
3337	default:
3338		break;
3339	}
3340
3341	return false;
3342}
3343
3344static ssize_t intel_dp_vsc_sdp_pack(const struct drm_dp_vsc_sdp *vsc,
3345				     struct dp_sdp *sdp, size_t size)
3346{
3347	size_t length = sizeof(struct dp_sdp);
3348
3349	if (size < length)
3350		return -ENOSPC;
3351
3352	memset(sdp, 0, size);
3353
3354	/*
3355	 * Prepare VSC Header for SU as per DP 1.4a spec, Table 2-119
3356	 * VSC SDP Header Bytes
3357	 */
3358	sdp->sdp_header.HB0 = 0; /* Secondary-Data Packet ID = 0 */
3359	sdp->sdp_header.HB1 = vsc->sdp_type; /* Secondary-data Packet Type */
3360	sdp->sdp_header.HB2 = vsc->revision; /* Revision Number */
3361	sdp->sdp_header.HB3 = vsc->length; /* Number of Valid Data Bytes */
3362
3363	/*
3364	 * Only revision 0x5 supports Pixel Encoding/Colorimetry Format as
3365	 * per DP 1.4a spec.
3366	 */
3367	if (vsc->revision != 0x5)
3368		goto out;
3369
3370	/* VSC SDP Payload for DB16 through DB18 */
3371	/* Pixel Encoding and Colorimetry Formats  */
3372	sdp->db[16] = (vsc->pixelformat & 0xf) << 4; /* DB16[7:4] */
3373	sdp->db[16] |= vsc->colorimetry & 0xf; /* DB16[3:0] */
3374
3375	switch (vsc->bpc) {
3376	case 6:
3377		/* 6bpc: 0x0 */
3378		break;
3379	case 8:
3380		sdp->db[17] = 0x1; /* DB17[3:0] */
3381		break;
3382	case 10:
3383		sdp->db[17] = 0x2;
3384		break;
3385	case 12:
3386		sdp->db[17] = 0x3;
3387		break;
3388	case 16:
3389		sdp->db[17] = 0x4;
3390		break;
3391	default:
3392		MISSING_CASE(vsc->bpc);
3393		break;
3394	}
3395	/* Dynamic Range and Component Bit Depth */
3396	if (vsc->dynamic_range == DP_DYNAMIC_RANGE_CTA)
3397		sdp->db[17] |= 0x80;  /* DB17[7] */
3398
3399	/* Content Type */
3400	sdp->db[18] = vsc->content_type & 0x7;
3401
3402out:
3403	return length;
3404}
3405
3406static ssize_t
3407intel_dp_hdr_metadata_infoframe_sdp_pack(struct drm_i915_private *i915,
3408					 const struct hdmi_drm_infoframe *drm_infoframe,
3409					 struct dp_sdp *sdp,
3410					 size_t size)
3411{
3412	size_t length = sizeof(struct dp_sdp);
3413	const int infoframe_size = HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE;
3414	unsigned char buf[HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE];
3415	ssize_t len;
3416
3417	if (size < length)
3418		return -ENOSPC;
3419
3420	memset(sdp, 0, size);
3421
3422	len = hdmi_drm_infoframe_pack_only(drm_infoframe, buf, sizeof(buf));
3423	if (len < 0) {
3424		drm_dbg_kms(&i915->drm, "buffer size is smaller than hdr metadata infoframe\n");
3425		return -ENOSPC;
3426	}
3427
3428	if (len != infoframe_size) {
3429		drm_dbg_kms(&i915->drm, "wrong static hdr metadata size\n");
3430		return -ENOSPC;
3431	}
3432
3433	/*
3434	 * Set up the infoframe sdp packet for HDR static metadata.
3435	 * Prepare VSC Header for SU as per DP 1.4a spec,
3436	 * Table 2-100 and Table 2-101
3437	 */
3438
3439	/* Secondary-Data Packet ID, 00h for non-Audio INFOFRAME */
3440	sdp->sdp_header.HB0 = 0;
3441	/*
3442	 * Packet Type 80h + Non-audio INFOFRAME Type value
3443	 * HDMI_INFOFRAME_TYPE_DRM: 0x87
3444	 * - 80h + Non-audio INFOFRAME Type value
3445	 * - InfoFrame Type: 0x07
3446	 *    [CTA-861-G Table-42 Dynamic Range and Mastering InfoFrame]
3447	 */
3448	sdp->sdp_header.HB1 = drm_infoframe->type;
3449	/*
3450	 * Least Significant Eight Bits of (Data Byte Count – 1)
3451	 * infoframe_size - 1
3452	 */
3453	sdp->sdp_header.HB2 = 0x1D;
3454	/* INFOFRAME SDP Version Number */
3455	sdp->sdp_header.HB3 = (0x13 << 2);
3456	/* CTA Header Byte 2 (INFOFRAME Version Number) */
3457	sdp->db[0] = drm_infoframe->version;
3458	/* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */
3459	sdp->db[1] = drm_infoframe->length;
3460	/*
3461	 * Copy HDMI_DRM_INFOFRAME_SIZE size from a buffer after
3462	 * HDMI_INFOFRAME_HEADER_SIZE
3463	 */
3464	BUILD_BUG_ON(sizeof(sdp->db) < HDMI_DRM_INFOFRAME_SIZE + 2);
3465	memcpy(&sdp->db[2], &buf[HDMI_INFOFRAME_HEADER_SIZE],
3466	       HDMI_DRM_INFOFRAME_SIZE);
3467
3468	/*
3469	 * Size of DP infoframe sdp packet for HDR static metadata consists of
3470	 * - DP SDP Header(struct dp_sdp_header): 4 bytes
3471	 * - Two Data Blocks: 2 bytes
3472	 *    CTA Header Byte2 (INFOFRAME Version Number)
3473	 *    CTA Header Byte3 (Length of INFOFRAME)
3474	 * - HDMI_DRM_INFOFRAME_SIZE: 26 bytes
3475	 *
3476	 * Prior to GEN11's GMP register size is identical to DP HDR static metadata
3477	 * infoframe size. But GEN11+ has larger than that size, write_infoframe
3478	 * will pad rest of the size.
3479	 */
3480	return sizeof(struct dp_sdp_header) + 2 + HDMI_DRM_INFOFRAME_SIZE;
3481}
3482
3483static void intel_write_dp_sdp(struct intel_encoder *encoder,
3484			       const struct intel_crtc_state *crtc_state,
3485			       unsigned int type)
3486{
3487	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3488	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3489	struct dp_sdp sdp = {};
3490	ssize_t len;
3491
3492	if ((crtc_state->infoframes.enable &
3493	     intel_hdmi_infoframe_enable(type)) == 0)
3494		return;
3495
3496	switch (type) {
3497	case DP_SDP_VSC:
3498		len = intel_dp_vsc_sdp_pack(&crtc_state->infoframes.vsc, &sdp,
3499					    sizeof(sdp));
3500		break;
3501	case HDMI_PACKET_TYPE_GAMUT_METADATA:
3502		len = intel_dp_hdr_metadata_infoframe_sdp_pack(dev_priv,
3503							       &crtc_state->infoframes.drm.drm,
3504							       &sdp, sizeof(sdp));
3505		break;
3506	default:
3507		MISSING_CASE(type);
3508		return;
3509	}
3510
3511	if (drm_WARN_ON(&dev_priv->drm, len < 0))
3512		return;
3513
3514	dig_port->write_infoframe(encoder, crtc_state, type, &sdp, len);
3515}
3516
3517void intel_write_dp_vsc_sdp(struct intel_encoder *encoder,
3518			    const struct intel_crtc_state *crtc_state,
3519			    const struct drm_dp_vsc_sdp *vsc)
3520{
3521	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3522	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3523	struct dp_sdp sdp = {};
3524	ssize_t len;
3525
3526	len = intel_dp_vsc_sdp_pack(vsc, &sdp, sizeof(sdp));
3527
3528	if (drm_WARN_ON(&dev_priv->drm, len < 0))
3529		return;
3530
3531	dig_port->write_infoframe(encoder, crtc_state, DP_SDP_VSC,
3532					&sdp, len);
3533}
3534
3535void intel_dp_set_infoframes(struct intel_encoder *encoder,
3536			     bool enable,
3537			     const struct intel_crtc_state *crtc_state,
3538			     const struct drm_connector_state *conn_state)
3539{
3540	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3541	i915_reg_t reg = HSW_TVIDEO_DIP_CTL(crtc_state->cpu_transcoder);
3542	u32 dip_enable = VIDEO_DIP_ENABLE_AVI_HSW | VIDEO_DIP_ENABLE_GCP_HSW |
3543			 VIDEO_DIP_ENABLE_VS_HSW | VIDEO_DIP_ENABLE_GMP_HSW |
3544			 VIDEO_DIP_ENABLE_SPD_HSW | VIDEO_DIP_ENABLE_DRM_GLK;
3545	u32 val = intel_de_read(dev_priv, reg) & ~dip_enable;
3546
3547	/* TODO: Add DSC case (DIP_ENABLE_PPS) */
3548	/* When PSR is enabled, this routine doesn't disable VSC DIP */
3549	if (!crtc_state->has_psr)
3550		val &= ~VIDEO_DIP_ENABLE_VSC_HSW;
3551
3552	intel_de_write(dev_priv, reg, val);
3553	intel_de_posting_read(dev_priv, reg);
3554
3555	if (!enable)
3556		return;
3557
3558	/* When PSR is enabled, VSC SDP is handled by PSR routine */
3559	if (!crtc_state->has_psr)
3560		intel_write_dp_sdp(encoder, crtc_state, DP_SDP_VSC);
3561
3562	intel_write_dp_sdp(encoder, crtc_state, HDMI_PACKET_TYPE_GAMUT_METADATA);
3563}
3564
3565static int intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp *vsc,
3566				   const void *buffer, size_t size)
3567{
3568	const struct dp_sdp *sdp = buffer;
3569
3570	if (size < sizeof(struct dp_sdp))
3571		return -EINVAL;
3572
3573	memset(vsc, 0, sizeof(*vsc));
3574
3575	if (sdp->sdp_header.HB0 != 0)
3576		return -EINVAL;
3577
3578	if (sdp->sdp_header.HB1 != DP_SDP_VSC)
3579		return -EINVAL;
3580
3581	vsc->sdp_type = sdp->sdp_header.HB1;
3582	vsc->revision = sdp->sdp_header.HB2;
3583	vsc->length = sdp->sdp_header.HB3;
3584
3585	if ((sdp->sdp_header.HB2 == 0x2 && sdp->sdp_header.HB3 == 0x8) ||
3586	    (sdp->sdp_header.HB2 == 0x4 && sdp->sdp_header.HB3 == 0xe)) {
3587		/*
3588		 * - HB2 = 0x2, HB3 = 0x8
3589		 *   VSC SDP supporting 3D stereo + PSR
3590		 * - HB2 = 0x4, HB3 = 0xe
3591		 *   VSC SDP supporting 3D stereo + PSR2 with Y-coordinate of
3592		 *   first scan line of the SU region (applies to eDP v1.4b
3593		 *   and higher).
3594		 */
3595		return 0;
3596	} else if (sdp->sdp_header.HB2 == 0x5 && sdp->sdp_header.HB3 == 0x13) {
3597		/*
3598		 * - HB2 = 0x5, HB3 = 0x13
3599		 *   VSC SDP supporting 3D stereo + PSR2 + Pixel Encoding/Colorimetry
3600		 *   Format.
3601		 */
3602		vsc->pixelformat = (sdp->db[16] >> 4) & 0xf;
3603		vsc->colorimetry = sdp->db[16] & 0xf;
3604		vsc->dynamic_range = (sdp->db[17] >> 7) & 0x1;
3605
3606		switch (sdp->db[17] & 0x7) {
3607		case 0x0:
3608			vsc->bpc = 6;
3609			break;
3610		case 0x1:
3611			vsc->bpc = 8;
3612			break;
3613		case 0x2:
3614			vsc->bpc = 10;
3615			break;
3616		case 0x3:
3617			vsc->bpc = 12;
3618			break;
3619		case 0x4:
3620			vsc->bpc = 16;
3621			break;
3622		default:
3623			MISSING_CASE(sdp->db[17] & 0x7);
3624			return -EINVAL;
3625		}
3626
3627		vsc->content_type = sdp->db[18] & 0x7;
3628	} else {
3629		return -EINVAL;
3630	}
3631
3632	return 0;
3633}
3634
3635static int
3636intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe *drm_infoframe,
3637					   const void *buffer, size_t size)
3638{
3639	int ret;
3640
3641	const struct dp_sdp *sdp = buffer;
3642
3643	if (size < sizeof(struct dp_sdp))
3644		return -EINVAL;
3645
3646	if (sdp->sdp_header.HB0 != 0)
3647		return -EINVAL;
3648
3649	if (sdp->sdp_header.HB1 != HDMI_INFOFRAME_TYPE_DRM)
3650		return -EINVAL;
3651
3652	/*
3653	 * Least Significant Eight Bits of (Data Byte Count – 1)
3654	 * 1Dh (i.e., Data Byte Count = 30 bytes).
3655	 */
3656	if (sdp->sdp_header.HB2 != 0x1D)
3657		return -EINVAL;
3658
3659	/* Most Significant Two Bits of (Data Byte Count – 1), Clear to 00b. */
3660	if ((sdp->sdp_header.HB3 & 0x3) != 0)
3661		return -EINVAL;
3662
3663	/* INFOFRAME SDP Version Number */
3664	if (((sdp->sdp_header.HB3 >> 2) & 0x3f) != 0x13)
3665		return -EINVAL;
3666
3667	/* CTA Header Byte 2 (INFOFRAME Version Number) */
3668	if (sdp->db[0] != 1)
3669		return -EINVAL;
3670
3671	/* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */
3672	if (sdp->db[1] != HDMI_DRM_INFOFRAME_SIZE)
3673		return -EINVAL;
3674
3675	ret = hdmi_drm_infoframe_unpack_only(drm_infoframe, &sdp->db[2],
3676					     HDMI_DRM_INFOFRAME_SIZE);
3677
3678	return ret;
3679}
3680
3681static void intel_read_dp_vsc_sdp(struct intel_encoder *encoder,
3682				  struct intel_crtc_state *crtc_state,
3683				  struct drm_dp_vsc_sdp *vsc)
3684{
3685	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3686	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3687	unsigned int type = DP_SDP_VSC;
3688	struct dp_sdp sdp = {};
3689	int ret;
3690
3691	/* When PSR is enabled, VSC SDP is handled by PSR routine */
3692	if (crtc_state->has_psr)
3693		return;
3694
3695	if ((crtc_state->infoframes.enable &
3696	     intel_hdmi_infoframe_enable(type)) == 0)
3697		return;
3698
3699	dig_port->read_infoframe(encoder, crtc_state, type, &sdp, sizeof(sdp));
3700
3701	ret = intel_dp_vsc_sdp_unpack(vsc, &sdp, sizeof(sdp));
3702
3703	if (ret)
3704		drm_dbg_kms(&dev_priv->drm, "Failed to unpack DP VSC SDP\n");
3705}
3706
3707static void intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder *encoder,
3708						     struct intel_crtc_state *crtc_state,
3709						     struct hdmi_drm_infoframe *drm_infoframe)
3710{
3711	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3712	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3713	unsigned int type = HDMI_PACKET_TYPE_GAMUT_METADATA;
3714	struct dp_sdp sdp = {};
3715	int ret;
3716
3717	if ((crtc_state->infoframes.enable &
3718	    intel_hdmi_infoframe_enable(type)) == 0)
3719		return;
3720
3721	dig_port->read_infoframe(encoder, crtc_state, type, &sdp,
3722				 sizeof(sdp));
3723
3724	ret = intel_dp_hdr_metadata_infoframe_sdp_unpack(drm_infoframe, &sdp,
3725							 sizeof(sdp));
3726
3727	if (ret)
3728		drm_dbg_kms(&dev_priv->drm,
3729			    "Failed to unpack DP HDR Metadata Infoframe SDP\n");
3730}
3731
3732void intel_read_dp_sdp(struct intel_encoder *encoder,
3733		       struct intel_crtc_state *crtc_state,
3734		       unsigned int type)
3735{
3736	switch (type) {
3737	case DP_SDP_VSC:
3738		intel_read_dp_vsc_sdp(encoder, crtc_state,
3739				      &crtc_state->infoframes.vsc);
3740		break;
3741	case HDMI_PACKET_TYPE_GAMUT_METADATA:
3742		intel_read_dp_hdr_metadata_infoframe_sdp(encoder, crtc_state,
3743							 &crtc_state->infoframes.drm.drm);
3744		break;
3745	default:
3746		MISSING_CASE(type);
3747		break;
3748	}
3749}
3750
3751static u8 intel_dp_autotest_link_training(struct intel_dp *intel_dp)
3752{
3753	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3754	int status = 0;
3755	int test_link_rate;
3756	u8 test_lane_count, test_link_bw;
3757	/* (DP CTS 1.2)
3758	 * 4.3.1.11
3759	 */
3760	/* Read the TEST_LANE_COUNT and TEST_LINK_RTAE fields (DP CTS 3.1.4) */
3761	status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LANE_COUNT,
3762				   &test_lane_count);
3763
3764	if (status <= 0) {
3765		drm_dbg_kms(&i915->drm, "Lane count read failed\n");
3766		return DP_TEST_NAK;
3767	}
3768	test_lane_count &= DP_MAX_LANE_COUNT_MASK;
3769
3770	status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_LINK_RATE,
3771				   &test_link_bw);
3772	if (status <= 0) {
3773		drm_dbg_kms(&i915->drm, "Link Rate read failed\n");
3774		return DP_TEST_NAK;
3775	}
3776	test_link_rate = drm_dp_bw_code_to_link_rate(test_link_bw);
3777
3778	/* Validate the requested link rate and lane count */
3779	if (!intel_dp_link_params_valid(intel_dp, test_link_rate,
3780					test_lane_count))
3781		return DP_TEST_NAK;
3782
3783	intel_dp->compliance.test_lane_count = test_lane_count;
3784	intel_dp->compliance.test_link_rate = test_link_rate;
3785
3786	return DP_TEST_ACK;
3787}
3788
3789static u8 intel_dp_autotest_video_pattern(struct intel_dp *intel_dp)
3790{
3791	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3792	u8 test_pattern;
3793	u8 test_misc;
3794	__be16 h_width, v_height;
3795	int status = 0;
3796
3797	/* Read the TEST_PATTERN (DP CTS 3.1.5) */
3798	status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_PATTERN,
3799				   &test_pattern);
3800	if (status <= 0) {
3801		drm_dbg_kms(&i915->drm, "Test pattern read failed\n");
3802		return DP_TEST_NAK;
3803	}
3804	if (test_pattern != DP_COLOR_RAMP)
3805		return DP_TEST_NAK;
3806
3807	status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_H_WIDTH_HI,
3808				  &h_width, 2);
3809	if (status <= 0) {
3810		drm_dbg_kms(&i915->drm, "H Width read failed\n");
3811		return DP_TEST_NAK;
3812	}
3813
3814	status = drm_dp_dpcd_read(&intel_dp->aux, DP_TEST_V_HEIGHT_HI,
3815				  &v_height, 2);
3816	if (status <= 0) {
3817		drm_dbg_kms(&i915->drm, "V Height read failed\n");
3818		return DP_TEST_NAK;
3819	}
3820
3821	status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_MISC0,
3822				   &test_misc);
3823	if (status <= 0) {
3824		drm_dbg_kms(&i915->drm, "TEST MISC read failed\n");
3825		return DP_TEST_NAK;
3826	}
3827	if ((test_misc & DP_TEST_COLOR_FORMAT_MASK) != DP_COLOR_FORMAT_RGB)
3828		return DP_TEST_NAK;
3829	if (test_misc & DP_TEST_DYNAMIC_RANGE_CEA)
3830		return DP_TEST_NAK;
3831	switch (test_misc & DP_TEST_BIT_DEPTH_MASK) {
3832	case DP_TEST_BIT_DEPTH_6:
3833		intel_dp->compliance.test_data.bpc = 6;
3834		break;
3835	case DP_TEST_BIT_DEPTH_8:
3836		intel_dp->compliance.test_data.bpc = 8;
3837		break;
3838	default:
3839		return DP_TEST_NAK;
3840	}
3841
3842	intel_dp->compliance.test_data.video_pattern = test_pattern;
3843	intel_dp->compliance.test_data.hdisplay = be16_to_cpu(h_width);
3844	intel_dp->compliance.test_data.vdisplay = be16_to_cpu(v_height);
3845	/* Set test active flag here so userspace doesn't interrupt things */
3846	intel_dp->compliance.test_active = true;
3847
3848	return DP_TEST_ACK;
3849}
3850
3851static u8 intel_dp_autotest_edid(struct intel_dp *intel_dp)
3852{
3853	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3854	u8 test_result = DP_TEST_ACK;
3855	struct intel_connector *intel_connector = intel_dp->attached_connector;
3856	struct drm_connector *connector = &intel_connector->base;
3857
3858	if (intel_connector->detect_edid == NULL ||
3859	    connector->edid_corrupt ||
3860	    intel_dp->aux.i2c_defer_count > 6) {
3861		/* Check EDID read for NACKs, DEFERs and corruption
3862		 * (DP CTS 1.2 Core r1.1)
3863		 *    4.2.2.4 : Failed EDID read, I2C_NAK
3864		 *    4.2.2.5 : Failed EDID read, I2C_DEFER
3865		 *    4.2.2.6 : EDID corruption detected
3866		 * Use failsafe mode for all cases
3867		 */
3868		if (intel_dp->aux.i2c_nack_count > 0 ||
3869			intel_dp->aux.i2c_defer_count > 0)
3870			drm_dbg_kms(&i915->drm,
3871				    "EDID read had %d NACKs, %d DEFERs\n",
3872				    intel_dp->aux.i2c_nack_count,
3873				    intel_dp->aux.i2c_defer_count);
3874		intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_FAILSAFE;
3875	} else {
3876		/* FIXME: Get rid of drm_edid_raw() */
3877		const struct edid *block = drm_edid_raw(intel_connector->detect_edid);
3878
3879		/* We have to write the checksum of the last block read */
3880		block += block->extensions;
3881
3882		if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_EDID_CHECKSUM,
3883				       block->checksum) <= 0)
3884			drm_dbg_kms(&i915->drm,
3885				    "Failed to write EDID checksum\n");
3886
3887		test_result = DP_TEST_ACK | DP_TEST_EDID_CHECKSUM_WRITE;
3888		intel_dp->compliance.test_data.edid = INTEL_DP_RESOLUTION_PREFERRED;
3889	}
3890
3891	/* Set test active flag here so userspace doesn't interrupt things */
3892	intel_dp->compliance.test_active = true;
3893
3894	return test_result;
3895}
3896
3897static void intel_dp_phy_pattern_update(struct intel_dp *intel_dp,
3898					const struct intel_crtc_state *crtc_state)
3899{
3900	struct drm_i915_private *dev_priv =
3901			to_i915(dp_to_dig_port(intel_dp)->base.base.dev);
3902	struct drm_dp_phy_test_params *data =
3903			&intel_dp->compliance.test_data.phytest;
3904	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3905	enum pipe pipe = crtc->pipe;
3906	u32 pattern_val;
3907
3908	switch (data->phy_pattern) {
3909	case DP_PHY_TEST_PATTERN_NONE:
3910		drm_dbg_kms(&dev_priv->drm, "Disable Phy Test Pattern\n");
3911		intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe), 0x0);
3912		break;
3913	case DP_PHY_TEST_PATTERN_D10_2:
3914		drm_dbg_kms(&dev_priv->drm, "Set D10.2 Phy Test Pattern\n");
3915		intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3916			       DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_D10_2);
3917		break;
3918	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
3919		drm_dbg_kms(&dev_priv->drm, "Set Error Count Phy Test Pattern\n");
3920		intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3921			       DDI_DP_COMP_CTL_ENABLE |
3922			       DDI_DP_COMP_CTL_SCRAMBLED_0);
3923		break;
3924	case DP_PHY_TEST_PATTERN_PRBS7:
3925		drm_dbg_kms(&dev_priv->drm, "Set PRBS7 Phy Test Pattern\n");
3926		intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3927			       DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_PRBS7);
3928		break;
3929	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
3930		/*
3931		 * FIXME: Ideally pattern should come from DPCD 0x250. As
3932		 * current firmware of DPR-100 could not set it, so hardcoding
3933		 * now for complaince test.
3934		 */
3935		drm_dbg_kms(&dev_priv->drm,
3936			    "Set 80Bit Custom Phy Test Pattern 0x3e0f83e0 0x0f83e0f8 0x0000f83e\n");
3937		pattern_val = 0x3e0f83e0;
3938		intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 0), pattern_val);
3939		pattern_val = 0x0f83e0f8;
3940		intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 1), pattern_val);
3941		pattern_val = 0x0000f83e;
3942		intel_de_write(dev_priv, DDI_DP_COMP_PAT(pipe, 2), pattern_val);
3943		intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3944			       DDI_DP_COMP_CTL_ENABLE |
3945			       DDI_DP_COMP_CTL_CUSTOM80);
3946		break;
3947	case DP_PHY_TEST_PATTERN_CP2520:
3948		/*
3949		 * FIXME: Ideally pattern should come from DPCD 0x24A. As
3950		 * current firmware of DPR-100 could not set it, so hardcoding
3951		 * now for complaince test.
3952		 */
3953		drm_dbg_kms(&dev_priv->drm, "Set HBR2 compliance Phy Test Pattern\n");
3954		pattern_val = 0xFB;
3955		intel_de_write(dev_priv, DDI_DP_COMP_CTL(pipe),
3956			       DDI_DP_COMP_CTL_ENABLE | DDI_DP_COMP_CTL_HBR2 |
3957			       pattern_val);
3958		break;
3959	default:
3960		WARN(1, "Invalid Phy Test Pattern\n");
3961	}
3962}
3963
3964static void intel_dp_process_phy_request(struct intel_dp *intel_dp,
3965					 const struct intel_crtc_state *crtc_state)
3966{
3967	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3968	struct drm_dp_phy_test_params *data =
3969		&intel_dp->compliance.test_data.phytest;
3970	u8 link_status[DP_LINK_STATUS_SIZE];
3971
3972	if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
3973					     link_status) < 0) {
3974		drm_dbg_kms(&i915->drm, "failed to get link status\n");
3975		return;
3976	}
3977
3978	/* retrieve vswing & pre-emphasis setting */
3979	intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX,
3980				  link_status);
3981
3982	intel_dp_set_signal_levels(intel_dp, crtc_state, DP_PHY_DPRX);
3983
3984	intel_dp_phy_pattern_update(intel_dp, crtc_state);
3985
3986	drm_dp_dpcd_write(&intel_dp->aux, DP_TRAINING_LANE0_SET,
3987			  intel_dp->train_set, crtc_state->lane_count);
3988
3989	drm_dp_set_phy_test_pattern(&intel_dp->aux, data,
3990				    intel_dp->dpcd[DP_DPCD_REV]);
3991}
3992
3993static u8 intel_dp_autotest_phy_pattern(struct intel_dp *intel_dp)
3994{
3995	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3996	struct drm_dp_phy_test_params *data =
3997		&intel_dp->compliance.test_data.phytest;
3998
3999	if (drm_dp_get_phy_test_pattern(&intel_dp->aux, data)) {
4000		drm_dbg_kms(&i915->drm, "DP Phy Test pattern AUX read failure\n");
4001		return DP_TEST_NAK;
4002	}
4003
4004	/* Set test active flag here so userspace doesn't interrupt things */
4005	intel_dp->compliance.test_active = true;
4006
4007	return DP_TEST_ACK;
4008}
4009
4010static void intel_dp_handle_test_request(struct intel_dp *intel_dp)
4011{
4012	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4013	u8 response = DP_TEST_NAK;
4014	u8 request = 0;
4015	int status;
4016
4017	status = drm_dp_dpcd_readb(&intel_dp->aux, DP_TEST_REQUEST, &request);
4018	if (status <= 0) {
4019		drm_dbg_kms(&i915->drm,
4020			    "Could not read test request from sink\n");
4021		goto update_status;
4022	}
4023
4024	switch (request) {
4025	case DP_TEST_LINK_TRAINING:
4026		drm_dbg_kms(&i915->drm, "LINK_TRAINING test requested\n");
4027		response = intel_dp_autotest_link_training(intel_dp);
4028		break;
4029	case DP_TEST_LINK_VIDEO_PATTERN:
4030		drm_dbg_kms(&i915->drm, "TEST_PATTERN test requested\n");
4031		response = intel_dp_autotest_video_pattern(intel_dp);
4032		break;
4033	case DP_TEST_LINK_EDID_READ:
4034		drm_dbg_kms(&i915->drm, "EDID test requested\n");
4035		response = intel_dp_autotest_edid(intel_dp);
4036		break;
4037	case DP_TEST_LINK_PHY_TEST_PATTERN:
4038		drm_dbg_kms(&i915->drm, "PHY_PATTERN test requested\n");
4039		response = intel_dp_autotest_phy_pattern(intel_dp);
4040		break;
4041	default:
4042		drm_dbg_kms(&i915->drm, "Invalid test request '%02x'\n",
4043			    request);
4044		break;
4045	}
4046
4047	if (response & DP_TEST_ACK)
4048		intel_dp->compliance.test_type = request;
4049
4050update_status:
4051	status = drm_dp_dpcd_writeb(&intel_dp->aux, DP_TEST_RESPONSE, response);
4052	if (status <= 0)
4053		drm_dbg_kms(&i915->drm,
4054			    "Could not write test response to sink\n");
4055}
4056
4057static bool intel_dp_link_ok(struct intel_dp *intel_dp,
4058			     u8 link_status[DP_LINK_STATUS_SIZE])
4059{
4060	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4061	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
4062	bool uhbr = intel_dp->link_rate >= 1000000;
4063	bool ok;
4064
4065	if (uhbr)
4066		ok = drm_dp_128b132b_lane_channel_eq_done(link_status,
4067							  intel_dp->lane_count);
4068	else
4069		ok = drm_dp_channel_eq_ok(link_status, intel_dp->lane_count);
4070
4071	if (ok)
4072		return true;
4073
4074	intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
4075	drm_dbg_kms(&i915->drm,
4076		    "[ENCODER:%d:%s] %s link not ok, retraining\n",
4077		    encoder->base.base.id, encoder->base.name,
4078		    uhbr ? "128b/132b" : "8b/10b");
4079
4080	return false;
4081}
4082
4083static void
4084intel_dp_mst_hpd_irq(struct intel_dp *intel_dp, u8 *esi, u8 *ack)
4085{
4086	bool handled = false;
4087
4088	drm_dp_mst_hpd_irq_handle_event(&intel_dp->mst_mgr, esi, ack, &handled);
4089
4090	if (esi[1] & DP_CP_IRQ) {
4091		intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
4092		ack[1] |= DP_CP_IRQ;
4093	}
4094}
4095
4096static bool intel_dp_mst_link_status(struct intel_dp *intel_dp)
4097{
4098	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
4099	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
4100	u8 link_status[DP_LINK_STATUS_SIZE] = {};
4101	const size_t esi_link_status_size = DP_LINK_STATUS_SIZE - 2;
4102
4103	if (drm_dp_dpcd_read(&intel_dp->aux, DP_LANE0_1_STATUS_ESI, link_status,
4104			     esi_link_status_size) != esi_link_status_size) {
4105		drm_err(&i915->drm,
4106			"[ENCODER:%d:%s] Failed to read link status\n",
4107			encoder->base.base.id, encoder->base.name);
4108		return false;
4109	}
4110
4111	return intel_dp_link_ok(intel_dp, link_status);
4112}
4113
4114/**
4115 * intel_dp_check_mst_status - service any pending MST interrupts, check link status
4116 * @intel_dp: Intel DP struct
4117 *
4118 * Read any pending MST interrupts, call MST core to handle these and ack the
4119 * interrupts. Check if the main and AUX link state is ok.
4120 *
4121 * Returns:
4122 * - %true if pending interrupts were serviced (or no interrupts were
4123 *   pending) w/o detecting an error condition.
4124 * - %false if an error condition - like AUX failure or a loss of link - is
4125 *   detected, which needs servicing from the hotplug work.
4126 */
4127static bool
4128intel_dp_check_mst_status(struct intel_dp *intel_dp)
4129{
4130	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4131	bool link_ok = true;
4132
4133	drm_WARN_ON_ONCE(&i915->drm, intel_dp->active_mst_links < 0);
4134
4135	for (;;) {
4136		u8 esi[4] = {};
4137		u8 ack[4] = {};
4138
4139		if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) {
4140			drm_dbg_kms(&i915->drm,
4141				    "failed to get ESI - device may have failed\n");
4142			link_ok = false;
4143
4144			break;
4145		}
4146
4147		drm_dbg_kms(&i915->drm, "DPRX ESI: %4ph\n", esi);
4148
4149		if (intel_dp->active_mst_links > 0 && link_ok &&
4150		    esi[3] & LINK_STATUS_CHANGED) {
4151			if (!intel_dp_mst_link_status(intel_dp))
4152				link_ok = false;
4153			ack[3] |= LINK_STATUS_CHANGED;
4154		}
4155
4156		intel_dp_mst_hpd_irq(intel_dp, esi, ack);
4157
4158		if (!memchr_inv(ack, 0, sizeof(ack)))
4159			break;
4160
4161		if (!intel_dp_ack_sink_irq_esi(intel_dp, ack))
4162			drm_dbg_kms(&i915->drm, "Failed to ack ESI\n");
4163
4164		if (ack[1] & (DP_DOWN_REP_MSG_RDY | DP_UP_REQ_MSG_RDY))
4165			drm_dp_mst_hpd_irq_send_new_request(&intel_dp->mst_mgr);
4166	}
4167
4168	return link_ok;
4169}
4170
4171static void
4172intel_dp_handle_hdmi_link_status_change(struct intel_dp *intel_dp)
4173{
4174	bool is_active;
4175	u8 buf = 0;
4176
4177	is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux);
4178	if (intel_dp->frl.is_trained && !is_active) {
4179		if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, &buf) < 0)
4180			return;
4181
4182		buf &=  ~DP_PCON_ENABLE_HDMI_LINK;
4183		if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf) < 0)
4184			return;
4185
4186		drm_dp_pcon_hdmi_frl_link_error_count(&intel_dp->aux, &intel_dp->attached_connector->base);
4187
4188		intel_dp->frl.is_trained = false;
4189
4190		/* Restart FRL training or fall back to TMDS mode */
4191		intel_dp_check_frl_training(intel_dp);
4192	}
4193}
4194
4195static bool
4196intel_dp_needs_link_retrain(struct intel_dp *intel_dp)
4197{
4198	u8 link_status[DP_LINK_STATUS_SIZE];
4199
4200	if (!intel_dp->link_trained)
4201		return false;
4202
4203	/*
4204	 * While PSR source HW is enabled, it will control main-link sending
4205	 * frames, enabling and disabling it so trying to do a retrain will fail
4206	 * as the link would or not be on or it could mix training patterns
4207	 * and frame data at the same time causing retrain to fail.
4208	 * Also when exiting PSR, HW will retrain the link anyways fixing
4209	 * any link status error.
4210	 */
4211	if (intel_psr_enabled(intel_dp))
4212		return false;
4213
4214	if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX,
4215					     link_status) < 0)
4216		return false;
4217
4218	/*
4219	 * Validate the cached values of intel_dp->link_rate and
4220	 * intel_dp->lane_count before attempting to retrain.
4221	 *
4222	 * FIXME would be nice to user the crtc state here, but since
4223	 * we need to call this from the short HPD handler that seems
4224	 * a bit hard.
4225	 */
4226	if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate,
4227					intel_dp->lane_count))
4228		return false;
4229
4230	/* Retrain if link not ok */
4231	return !intel_dp_link_ok(intel_dp, link_status);
4232}
4233
4234static bool intel_dp_has_connector(struct intel_dp *intel_dp,
4235				   const struct drm_connector_state *conn_state)
4236{
4237	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4238	struct intel_encoder *encoder;
4239	enum pipe pipe;
4240
4241	if (!conn_state->best_encoder)
4242		return false;
4243
4244	/* SST */
4245	encoder = &dp_to_dig_port(intel_dp)->base;
4246	if (conn_state->best_encoder == &encoder->base)
4247		return true;
4248
4249	/* MST */
4250	for_each_pipe(i915, pipe) {
4251		encoder = &intel_dp->mst_encoders[pipe]->base;
4252		if (conn_state->best_encoder == &encoder->base)
4253			return true;
4254	}
4255
4256	return false;
4257}
4258
4259int intel_dp_get_active_pipes(struct intel_dp *intel_dp,
4260			      struct drm_modeset_acquire_ctx *ctx,
4261			      u8 *pipe_mask)
4262{
4263	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4264	struct drm_connector_list_iter conn_iter;
4265	struct intel_connector *connector;
4266	int ret = 0;
4267
4268	*pipe_mask = 0;
4269
4270	drm_connector_list_iter_begin(&i915->drm, &conn_iter);
4271	for_each_intel_connector_iter(connector, &conn_iter) {
4272		struct drm_connector_state *conn_state =
4273			connector->base.state;
4274		struct intel_crtc_state *crtc_state;
4275		struct intel_crtc *crtc;
4276
4277		if (!intel_dp_has_connector(intel_dp, conn_state))
4278			continue;
4279
4280		crtc = to_intel_crtc(conn_state->crtc);
4281		if (!crtc)
4282			continue;
4283
4284		ret = drm_modeset_lock(&crtc->base.mutex, ctx);
4285		if (ret)
4286			break;
4287
4288		crtc_state = to_intel_crtc_state(crtc->base.state);
4289
4290		drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
4291
4292		if (!crtc_state->hw.active)
4293			continue;
4294
4295		if (conn_state->commit &&
4296		    !try_wait_for_completion(&conn_state->commit->hw_done))
4297			continue;
4298
4299		*pipe_mask |= BIT(crtc->pipe);
4300	}
4301	drm_connector_list_iter_end(&conn_iter);
4302
4303	return ret;
4304}
4305
4306static bool intel_dp_is_connected(struct intel_dp *intel_dp)
4307{
4308	struct intel_connector *connector = intel_dp->attached_connector;
4309
4310	return connector->base.status == connector_status_connected ||
4311		intel_dp->is_mst;
4312}
4313
4314int intel_dp_retrain_link(struct intel_encoder *encoder,
4315			  struct drm_modeset_acquire_ctx *ctx)
4316{
4317	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4318	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4319	struct intel_crtc *crtc;
4320	u8 pipe_mask;
4321	int ret;
4322
4323	if (!intel_dp_is_connected(intel_dp))
4324		return 0;
4325
4326	ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
4327			       ctx);
4328	if (ret)
4329		return ret;
4330
4331	if (!intel_dp_needs_link_retrain(intel_dp))
4332		return 0;
4333
4334	ret = intel_dp_get_active_pipes(intel_dp, ctx, &pipe_mask);
4335	if (ret)
4336		return ret;
4337
4338	if (pipe_mask == 0)
4339		return 0;
4340
4341	if (!intel_dp_needs_link_retrain(intel_dp))
4342		return 0;
4343
4344	drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] retraining link\n",
4345		    encoder->base.base.id, encoder->base.name);
4346
4347	for_each_intel_crtc_in_pipe_mask(&dev_priv->drm, crtc, pipe_mask) {
4348		const struct intel_crtc_state *crtc_state =
4349			to_intel_crtc_state(crtc->base.state);
4350
4351		/* Suppress underruns caused by re-training */
4352		intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, false);
4353		if (crtc_state->has_pch_encoder)
4354			intel_set_pch_fifo_underrun_reporting(dev_priv,
4355							      intel_crtc_pch_transcoder(crtc), false);
4356	}
4357
4358	for_each_intel_crtc_in_pipe_mask(&dev_priv->drm, crtc, pipe_mask) {
4359		const struct intel_crtc_state *crtc_state =
4360			to_intel_crtc_state(crtc->base.state);
4361
4362		/* retrain on the MST master transcoder */
4363		if (DISPLAY_VER(dev_priv) >= 12 &&
4364		    intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST) &&
4365		    !intel_dp_mst_is_master_trans(crtc_state))
4366			continue;
4367
4368		intel_dp_check_frl_training(intel_dp);
4369		intel_dp_pcon_dsc_configure(intel_dp, crtc_state);
4370		intel_dp_start_link_train(intel_dp, crtc_state);
4371		intel_dp_stop_link_train(intel_dp, crtc_state);
4372		break;
4373	}
4374
4375	for_each_intel_crtc_in_pipe_mask(&dev_priv->drm, crtc, pipe_mask) {
4376		const struct intel_crtc_state *crtc_state =
4377			to_intel_crtc_state(crtc->base.state);
4378
4379		/* Keep underrun reporting disabled until things are stable */
4380		intel_crtc_wait_for_next_vblank(crtc);
4381
4382		intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
4383		if (crtc_state->has_pch_encoder)
4384			intel_set_pch_fifo_underrun_reporting(dev_priv,
4385							      intel_crtc_pch_transcoder(crtc), true);
4386	}
4387
4388	return 0;
4389}
4390
4391static int intel_dp_prep_phy_test(struct intel_dp *intel_dp,
4392				  struct drm_modeset_acquire_ctx *ctx,
4393				  u8 *pipe_mask)
4394{
4395	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4396	struct drm_connector_list_iter conn_iter;
4397	struct intel_connector *connector;
4398	int ret = 0;
4399
4400	*pipe_mask = 0;
4401
4402	drm_connector_list_iter_begin(&i915->drm, &conn_iter);
4403	for_each_intel_connector_iter(connector, &conn_iter) {
4404		struct drm_connector_state *conn_state =
4405			connector->base.state;
4406		struct intel_crtc_state *crtc_state;
4407		struct intel_crtc *crtc;
4408
4409		if (!intel_dp_has_connector(intel_dp, conn_state))
4410			continue;
4411
4412		crtc = to_intel_crtc(conn_state->crtc);
4413		if (!crtc)
4414			continue;
4415
4416		ret = drm_modeset_lock(&crtc->base.mutex, ctx);
4417		if (ret)
4418			break;
4419
4420		crtc_state = to_intel_crtc_state(crtc->base.state);
4421
4422		drm_WARN_ON(&i915->drm, !intel_crtc_has_dp_encoder(crtc_state));
4423
4424		if (!crtc_state->hw.active)
4425			continue;
4426
4427		if (conn_state->commit &&
4428		    !try_wait_for_completion(&conn_state->commit->hw_done))
4429			continue;
4430
4431		*pipe_mask |= BIT(crtc->pipe);
4432	}
4433	drm_connector_list_iter_end(&conn_iter);
4434
4435	return ret;
4436}
4437
4438static int intel_dp_do_phy_test(struct intel_encoder *encoder,
4439				struct drm_modeset_acquire_ctx *ctx)
4440{
4441	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4442	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4443	struct intel_crtc *crtc;
4444	u8 pipe_mask;
4445	int ret;
4446
4447	ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
4448			       ctx);
4449	if (ret)
4450		return ret;
4451
4452	ret = intel_dp_prep_phy_test(intel_dp, ctx, &pipe_mask);
4453	if (ret)
4454		return ret;
4455
4456	if (pipe_mask == 0)
4457		return 0;
4458
4459	drm_dbg_kms(&dev_priv->drm, "[ENCODER:%d:%s] PHY test\n",
4460		    encoder->base.base.id, encoder->base.name);
4461
4462	for_each_intel_crtc_in_pipe_mask(&dev_priv->drm, crtc, pipe_mask) {
4463		const struct intel_crtc_state *crtc_state =
4464			to_intel_crtc_state(crtc->base.state);
4465
4466		/* test on the MST master transcoder */
4467		if (DISPLAY_VER(dev_priv) >= 12 &&
4468		    intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST) &&
4469		    !intel_dp_mst_is_master_trans(crtc_state))
4470			continue;
4471
4472		intel_dp_process_phy_request(intel_dp, crtc_state);
4473		break;
4474	}
4475
4476	return 0;
4477}
4478
4479void intel_dp_phy_test(struct intel_encoder *encoder)
4480{
4481	struct drm_modeset_acquire_ctx ctx;
4482	int ret;
4483
4484	drm_modeset_acquire_init(&ctx, 0);
4485
4486	for (;;) {
4487		ret = intel_dp_do_phy_test(encoder, &ctx);
4488
4489		if (ret == -EDEADLK) {
4490			drm_modeset_backoff(&ctx);
4491			continue;
4492		}
4493
4494		break;
4495	}
4496
4497	drm_modeset_drop_locks(&ctx);
4498	drm_modeset_acquire_fini(&ctx);
4499	drm_WARN(encoder->base.dev, ret,
4500		 "Acquiring modeset locks failed with %i\n", ret);
4501}
4502
4503static void intel_dp_check_device_service_irq(struct intel_dp *intel_dp)
4504{
4505	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4506	u8 val;
4507
4508	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
4509		return;
4510
4511	if (drm_dp_dpcd_readb(&intel_dp->aux,
4512			      DP_DEVICE_SERVICE_IRQ_VECTOR, &val) != 1 || !val)
4513		return;
4514
4515	drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val);
4516
4517	if (val & DP_AUTOMATED_TEST_REQUEST)
4518		intel_dp_handle_test_request(intel_dp);
4519
4520	if (val & DP_CP_IRQ)
4521		intel_hdcp_handle_cp_irq(intel_dp->attached_connector);
4522
4523	if (val & DP_SINK_SPECIFIC_IRQ)
4524		drm_dbg_kms(&i915->drm, "Sink specific irq unhandled\n");
4525}
4526
4527static void intel_dp_check_link_service_irq(struct intel_dp *intel_dp)
4528{
4529	u8 val;
4530
4531	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
4532		return;
4533
4534	if (drm_dp_dpcd_readb(&intel_dp->aux,
4535			      DP_LINK_SERVICE_IRQ_VECTOR_ESI0, &val) != 1 || !val)
4536		return;
4537
4538	if (drm_dp_dpcd_writeb(&intel_dp->aux,
4539			       DP_LINK_SERVICE_IRQ_VECTOR_ESI0, val) != 1)
4540		return;
4541
4542	if (val & HDMI_LINK_STATUS_CHANGED)
4543		intel_dp_handle_hdmi_link_status_change(intel_dp);
4544}
4545
4546/*
4547 * According to DP spec
4548 * 5.1.2:
4549 *  1. Read DPCD
4550 *  2. Configure link according to Receiver Capabilities
4551 *  3. Use Link Training from 2.5.3.3 and 3.5.1.3
4552 *  4. Check link status on receipt of hot-plug interrupt
4553 *
4554 * intel_dp_short_pulse -  handles short pulse interrupts
4555 * when full detection is not required.
4556 * Returns %true if short pulse is handled and full detection
4557 * is NOT required and %false otherwise.
4558 */
4559static bool
4560intel_dp_short_pulse(struct intel_dp *intel_dp)
4561{
4562	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
4563	u8 old_sink_count = intel_dp->sink_count;
4564	bool ret;
4565
4566	/*
4567	 * Clearing compliance test variables to allow capturing
4568	 * of values for next automated test request.
4569	 */
4570	memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
4571
4572	/*
4573	 * Now read the DPCD to see if it's actually running
4574	 * If the current value of sink count doesn't match with
4575	 * the value that was stored earlier or dpcd read failed
4576	 * we need to do full detection
4577	 */
4578	ret = intel_dp_get_dpcd(intel_dp);
4579
4580	if ((old_sink_count != intel_dp->sink_count) || !ret) {
4581		/* No need to proceed if we are going to do full detect */
4582		return false;
4583	}
4584
4585	intel_dp_check_device_service_irq(intel_dp);
4586	intel_dp_check_link_service_irq(intel_dp);
4587
4588	/* Handle CEC interrupts, if any */
4589	drm_dp_cec_irq(&intel_dp->aux);
4590
4591	/* defer to the hotplug work for link retraining if needed */
4592	if (intel_dp_needs_link_retrain(intel_dp))
4593		return false;
4594
4595	intel_psr_short_pulse(intel_dp);
4596
4597	switch (intel_dp->compliance.test_type) {
4598	case DP_TEST_LINK_TRAINING:
4599		drm_dbg_kms(&dev_priv->drm,
4600			    "Link Training Compliance Test requested\n");
4601		/* Send a Hotplug Uevent to userspace to start modeset */
4602		drm_kms_helper_hotplug_event(&dev_priv->drm);
4603		break;
4604	case DP_TEST_LINK_PHY_TEST_PATTERN:
4605		drm_dbg_kms(&dev_priv->drm,
4606			    "PHY test pattern Compliance Test requested\n");
4607		/*
4608		 * Schedule long hpd to do the test
4609		 *
4610		 * FIXME get rid of the ad-hoc phy test modeset code
4611		 * and properly incorporate it into the normal modeset.
4612		 */
4613		return false;
4614	}
4615
4616	return true;
4617}
4618
4619/* XXX this is probably wrong for multiple downstream ports */
4620static enum drm_connector_status
4621intel_dp_detect_dpcd(struct intel_dp *intel_dp)
4622{
4623	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4624	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4625	u8 *dpcd = intel_dp->dpcd;
4626	u8 type;
4627
4628	if (drm_WARN_ON(&i915->drm, intel_dp_is_edp(intel_dp)))
4629		return connector_status_connected;
4630
4631	lspcon_resume(dig_port);
4632
4633	if (!intel_dp_get_dpcd(intel_dp))
4634		return connector_status_disconnected;
4635
4636	/* if there's no downstream port, we're done */
4637	if (!drm_dp_is_branch(dpcd))
4638		return connector_status_connected;
4639
4640	/* If we're HPD-aware, SINK_COUNT changes dynamically */
4641	if (intel_dp_has_sink_count(intel_dp) &&
4642	    intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) {
4643		return intel_dp->sink_count ?
4644		connector_status_connected : connector_status_disconnected;
4645	}
4646
4647	if (intel_dp_can_mst(intel_dp))
4648		return connector_status_connected;
4649
4650	/* If no HPD, poke DDC gently */
4651	if (drm_probe_ddc(&intel_dp->aux.ddc))
4652		return connector_status_connected;
4653
4654	/* Well we tried, say unknown for unreliable port types */
4655	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
4656		type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK;
4657		if (type == DP_DS_PORT_TYPE_VGA ||
4658		    type == DP_DS_PORT_TYPE_NON_EDID)
4659			return connector_status_unknown;
4660	} else {
4661		type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] &
4662			DP_DWN_STRM_PORT_TYPE_MASK;
4663		if (type == DP_DWN_STRM_PORT_TYPE_ANALOG ||
4664		    type == DP_DWN_STRM_PORT_TYPE_OTHER)
4665			return connector_status_unknown;
4666	}
4667
4668	/* Anything else is out of spec, warn and ignore */
4669	drm_dbg_kms(&i915->drm, "Broken DP branch device, ignoring\n");
4670	return connector_status_disconnected;
4671}
4672
4673static enum drm_connector_status
4674edp_detect(struct intel_dp *intel_dp)
4675{
4676	return connector_status_connected;
4677}
4678
4679/*
4680 * intel_digital_port_connected - is the specified port connected?
4681 * @encoder: intel_encoder
4682 *
4683 * In cases where there's a connector physically connected but it can't be used
4684 * by our hardware we also return false, since the rest of the driver should
4685 * pretty much treat the port as disconnected. This is relevant for type-C
4686 * (starting on ICL) where there's ownership involved.
4687 *
4688 * Return %true if port is connected, %false otherwise.
4689 */
4690bool intel_digital_port_connected(struct intel_encoder *encoder)
4691{
4692	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4693	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4694	bool is_connected = false;
4695	intel_wakeref_t wakeref;
4696
4697	with_intel_display_power(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref)
4698		is_connected = dig_port->connected(encoder);
4699
4700	return is_connected;
4701}
4702
4703static const struct drm_edid *
4704intel_dp_get_edid(struct intel_dp *intel_dp)
4705{
4706	struct intel_connector *connector = intel_dp->attached_connector;
4707	const struct drm_edid *fixed_edid = connector->panel.fixed_edid;
4708
4709	/* Use panel fixed edid if we have one */
4710	if (fixed_edid) {
4711		/* invalid edid */
4712		if (IS_ERR(fixed_edid))
4713			return NULL;
4714
4715		return drm_edid_dup(fixed_edid);
4716	}
4717
4718	return drm_edid_read_ddc(&connector->base, &intel_dp->aux.ddc);
4719}
4720
4721static void
4722intel_dp_update_dfp(struct intel_dp *intel_dp,
4723		    const struct drm_edid *drm_edid)
4724{
4725	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4726	struct intel_connector *connector = intel_dp->attached_connector;
4727	const struct edid *edid;
4728
4729	/* FIXME: Get rid of drm_edid_raw() */
4730	edid = drm_edid_raw(drm_edid);
4731
4732	intel_dp->dfp.max_bpc =
4733		drm_dp_downstream_max_bpc(intel_dp->dpcd,
4734					  intel_dp->downstream_ports, edid);
4735
4736	intel_dp->dfp.max_dotclock =
4737		drm_dp_downstream_max_dotclock(intel_dp->dpcd,
4738					       intel_dp->downstream_ports);
4739
4740	intel_dp->dfp.min_tmds_clock =
4741		drm_dp_downstream_min_tmds_clock(intel_dp->dpcd,
4742						 intel_dp->downstream_ports,
4743						 edid);
4744	intel_dp->dfp.max_tmds_clock =
4745		drm_dp_downstream_max_tmds_clock(intel_dp->dpcd,
4746						 intel_dp->downstream_ports,
4747						 edid);
4748
4749	intel_dp->dfp.pcon_max_frl_bw =
4750		drm_dp_get_pcon_max_frl_bw(intel_dp->dpcd,
4751					   intel_dp->downstream_ports);
4752
4753	drm_dbg_kms(&i915->drm,
4754		    "[CONNECTOR:%d:%s] DFP max bpc %d, max dotclock %d, TMDS clock %d-%d, PCON Max FRL BW %dGbps\n",
4755		    connector->base.base.id, connector->base.name,
4756		    intel_dp->dfp.max_bpc,
4757		    intel_dp->dfp.max_dotclock,
4758		    intel_dp->dfp.min_tmds_clock,
4759		    intel_dp->dfp.max_tmds_clock,
4760		    intel_dp->dfp.pcon_max_frl_bw);
4761
4762	intel_dp_get_pcon_dsc_cap(intel_dp);
4763}
4764
4765static bool
4766intel_dp_can_ycbcr420(struct intel_dp *intel_dp)
4767{
4768	if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420) &&
4769	    (!drm_dp_is_branch(intel_dp->dpcd) || intel_dp->dfp.ycbcr420_passthrough))
4770		return true;
4771
4772	if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_RGB) &&
4773	    dfp_can_convert_from_rgb(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420))
4774		return true;
4775
4776	if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR444) &&
4777	    dfp_can_convert_from_ycbcr444(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420))
4778		return true;
4779
4780	return false;
4781}
4782
4783static void
4784intel_dp_update_420(struct intel_dp *intel_dp)
4785{
4786	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4787	struct intel_connector *connector = intel_dp->attached_connector;
4788
4789	intel_dp->dfp.ycbcr420_passthrough =
4790		drm_dp_downstream_420_passthrough(intel_dp->dpcd,
4791						  intel_dp->downstream_ports);
4792	/* on-board LSPCON always assumed to support 4:4:4->4:2:0 conversion */
4793	intel_dp->dfp.ycbcr_444_to_420 =
4794		dp_to_dig_port(intel_dp)->lspcon.active ||
4795		drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd,
4796							intel_dp->downstream_ports);
4797	intel_dp->dfp.rgb_to_ycbcr =
4798		drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd,
4799							  intel_dp->downstream_ports,
4800							  DP_DS_HDMI_BT709_RGB_YCBCR_CONV);
4801
4802	connector->base.ycbcr_420_allowed = intel_dp_can_ycbcr420(intel_dp);
4803
4804	drm_dbg_kms(&i915->drm,
4805		    "[CONNECTOR:%d:%s] RGB->YcbCr conversion? %s, YCbCr 4:2:0 allowed? %s, YCbCr 4:4:4->4:2:0 conversion? %s\n",
4806		    connector->base.base.id, connector->base.name,
4807		    str_yes_no(intel_dp->dfp.rgb_to_ycbcr),
4808		    str_yes_no(connector->base.ycbcr_420_allowed),
4809		    str_yes_no(intel_dp->dfp.ycbcr_444_to_420));
4810}
4811
4812static void
4813intel_dp_set_edid(struct intel_dp *intel_dp)
4814{
4815	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
4816	struct intel_connector *connector = intel_dp->attached_connector;
4817	const struct drm_edid *drm_edid;
4818	const struct edid *edid;
4819	bool vrr_capable;
4820
4821	intel_dp_unset_edid(intel_dp);
4822	drm_edid = intel_dp_get_edid(intel_dp);
4823	connector->detect_edid = drm_edid;
4824
4825	/* Below we depend on display info having been updated */
4826	drm_edid_connector_update(&connector->base, drm_edid);
4827
4828	vrr_capable = intel_vrr_is_capable(connector);
4829	drm_dbg_kms(&i915->drm, "[CONNECTOR:%d:%s] VRR capable: %s\n",
4830		    connector->base.base.id, connector->base.name, str_yes_no(vrr_capable));
4831	drm_connector_set_vrr_capable_property(&connector->base, vrr_capable);
4832
4833	intel_dp_update_dfp(intel_dp, drm_edid);
4834	intel_dp_update_420(intel_dp);
4835
4836	/* FIXME: Get rid of drm_edid_raw() */
4837	edid = drm_edid_raw(drm_edid);
4838
4839	drm_dp_cec_set_edid(&intel_dp->aux, edid);
4840}
4841
4842static void
4843intel_dp_unset_edid(struct intel_dp *intel_dp)
4844{
4845	struct intel_connector *connector = intel_dp->attached_connector;
4846
4847	drm_dp_cec_unset_edid(&intel_dp->aux);
4848	drm_edid_free(connector->detect_edid);
4849	connector->detect_edid = NULL;
4850
4851	intel_dp->dfp.max_bpc = 0;
4852	intel_dp->dfp.max_dotclock = 0;
4853	intel_dp->dfp.min_tmds_clock = 0;
4854	intel_dp->dfp.max_tmds_clock = 0;
4855
4856	intel_dp->dfp.pcon_max_frl_bw = 0;
4857
4858	intel_dp->dfp.ycbcr_444_to_420 = false;
4859	connector->base.ycbcr_420_allowed = false;
4860
4861	drm_connector_set_vrr_capable_property(&connector->base,
4862					       false);
4863}
4864
4865static int
4866intel_dp_detect(struct drm_connector *connector,
4867		struct drm_modeset_acquire_ctx *ctx,
4868		bool force)
4869{
4870	struct drm_i915_private *dev_priv = to_i915(connector->dev);
4871	struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
4872	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4873	struct intel_encoder *encoder = &dig_port->base;
4874	enum drm_connector_status status;
4875
4876	drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
4877		    connector->base.id, connector->name);
4878	drm_WARN_ON(&dev_priv->drm,
4879		    !drm_modeset_is_locked(&dev_priv->drm.mode_config.connection_mutex));
4880
4881	if (!INTEL_DISPLAY_ENABLED(dev_priv))
4882		return connector_status_disconnected;
4883
4884	/* Can't disconnect eDP */
4885	if (intel_dp_is_edp(intel_dp))
4886		status = edp_detect(intel_dp);
4887	else if (intel_digital_port_connected(encoder))
4888		status = intel_dp_detect_dpcd(intel_dp);
4889	else
4890		status = connector_status_disconnected;
4891
4892	if (status == connector_status_disconnected) {
4893		memset(&intel_dp->compliance, 0, sizeof(intel_dp->compliance));
4894		memset(intel_dp->dsc_dpcd, 0, sizeof(intel_dp->dsc_dpcd));
4895
4896		if (intel_dp->is_mst) {
4897			drm_dbg_kms(&dev_priv->drm,
4898				    "MST device may have disappeared %d vs %d\n",
4899				    intel_dp->is_mst,
4900				    intel_dp->mst_mgr.mst_state);
4901			intel_dp->is_mst = false;
4902			drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
4903							intel_dp->is_mst);
4904		}
4905
4906		goto out;
4907	}
4908
4909	/* Read DP Sink DSC Cap DPCD regs for DP v1.4 */
4910	if (HAS_DSC(dev_priv))
4911		intel_dp_get_dsc_sink_cap(intel_dp);
4912
4913	intel_dp_configure_mst(intel_dp);
4914
4915	/*
4916	 * TODO: Reset link params when switching to MST mode, until MST
4917	 * supports link training fallback params.
4918	 */
4919	if (intel_dp->reset_link_params || intel_dp->is_mst) {
4920		intel_dp_reset_max_link_params(intel_dp);
4921		intel_dp->reset_link_params = false;
4922	}
4923
4924	intel_dp_print_rates(intel_dp);
4925
4926	if (intel_dp->is_mst) {
4927		/*
4928		 * If we are in MST mode then this connector
4929		 * won't appear connected or have anything
4930		 * with EDID on it
4931		 */
4932		status = connector_status_disconnected;
4933		goto out;
4934	}
4935
4936	/*
4937	 * Some external monitors do not signal loss of link synchronization
4938	 * with an IRQ_HPD, so force a link status check.
4939	 */
4940	if (!intel_dp_is_edp(intel_dp)) {
4941		int ret;
4942
4943		ret = intel_dp_retrain_link(encoder, ctx);
4944		if (ret)
4945			return ret;
4946	}
4947
4948	/*
4949	 * Clearing NACK and defer counts to get their exact values
4950	 * while reading EDID which are required by Compliance tests
4951	 * 4.2.2.4 and 4.2.2.5
4952	 */
4953	intel_dp->aux.i2c_nack_count = 0;
4954	intel_dp->aux.i2c_defer_count = 0;
4955
4956	intel_dp_set_edid(intel_dp);
4957	if (intel_dp_is_edp(intel_dp) ||
4958	    to_intel_connector(connector)->detect_edid)
4959		status = connector_status_connected;
4960
4961	intel_dp_check_device_service_irq(intel_dp);
4962
4963out:
4964	if (status != connector_status_connected && !intel_dp->is_mst)
4965		intel_dp_unset_edid(intel_dp);
4966
4967	/*
4968	 * Make sure the refs for power wells enabled during detect are
4969	 * dropped to avoid a new detect cycle triggered by HPD polling.
4970	 */
4971	intel_display_power_flush_work(dev_priv);
4972
4973	if (!intel_dp_is_edp(intel_dp))
4974		drm_dp_set_subconnector_property(connector,
4975						 status,
4976						 intel_dp->dpcd,
4977						 intel_dp->downstream_ports);
4978	return status;
4979}
4980
4981static void
4982intel_dp_force(struct drm_connector *connector)
4983{
4984	struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
4985	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
4986	struct intel_encoder *intel_encoder = &dig_port->base;
4987	struct drm_i915_private *dev_priv = to_i915(intel_encoder->base.dev);
4988	enum intel_display_power_domain aux_domain =
4989		intel_aux_power_domain(dig_port);
4990	intel_wakeref_t wakeref;
4991
4992	drm_dbg_kms(&dev_priv->drm, "[CONNECTOR:%d:%s]\n",
4993		    connector->base.id, connector->name);
4994	intel_dp_unset_edid(intel_dp);
4995
4996	if (connector->status != connector_status_connected)
4997		return;
4998
4999	wakeref = intel_display_power_get(dev_priv, aux_domain);
5000
5001	intel_dp_set_edid(intel_dp);
5002
5003	intel_display_power_put(dev_priv, aux_domain, wakeref);
5004}
5005
5006static int intel_dp_get_modes(struct drm_connector *connector)
5007{
5008	struct intel_connector *intel_connector = to_intel_connector(connector);
5009	int num_modes;
5010
5011	/* drm_edid_connector_update() done in ->detect() or ->force() */
5012	num_modes = drm_edid_connector_add_modes(connector);
5013
5014	/* Also add fixed mode, which may or may not be present in EDID */
5015	if (intel_dp_is_edp(intel_attached_dp(intel_connector)))
5016		num_modes += intel_panel_get_modes(intel_connector);
5017
5018	if (num_modes)
5019		return num_modes;
5020
5021	if (!intel_connector->detect_edid) {
5022		struct intel_dp *intel_dp = intel_attached_dp(intel_connector);
5023		struct drm_display_mode *mode;
5024
5025		mode = drm_dp_downstream_mode(connector->dev,
5026					      intel_dp->dpcd,
5027					      intel_dp->downstream_ports);
5028		if (mode) {
5029			drm_mode_probed_add(connector, mode);
5030			num_modes++;
5031		}
5032	}
5033
5034	return num_modes;
5035}
5036
5037static int
5038intel_dp_connector_register(struct drm_connector *connector)
5039{
5040	struct drm_i915_private *i915 = to_i915(connector->dev);
5041	struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5042	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
5043	struct intel_lspcon *lspcon = &dig_port->lspcon;
5044	int ret;
5045
5046	ret = intel_connector_register(connector);
5047	if (ret)
5048		return ret;
5049
5050	drm_dbg_kms(&i915->drm, "registering %s bus for %s\n",
5051		    intel_dp->aux.name, connector->kdev->kobj.name);
5052
5053	intel_dp->aux.dev = connector->kdev;
5054	ret = drm_dp_aux_register(&intel_dp->aux);
5055	if (!ret)
5056		drm_dp_cec_register_connector(&intel_dp->aux, connector);
5057
5058	if (!intel_bios_encoder_is_lspcon(dig_port->base.devdata))
5059		return ret;
5060
5061	/*
5062	 * ToDo: Clean this up to handle lspcon init and resume more
5063	 * efficiently and streamlined.
5064	 */
5065	if (lspcon_init(dig_port)) {
5066		lspcon_detect_hdr_capability(lspcon);
5067		if (lspcon->hdr_supported)
5068			drm_connector_attach_hdr_output_metadata_property(connector);
5069	}
5070
5071	return ret;
5072}
5073
5074static void
5075intel_dp_connector_unregister(struct drm_connector *connector)
5076{
5077	struct intel_dp *intel_dp = intel_attached_dp(to_intel_connector(connector));
5078
5079	drm_dp_cec_unregister_connector(&intel_dp->aux);
5080	drm_dp_aux_unregister(&intel_dp->aux);
5081	intel_connector_unregister(connector);
5082}
5083
5084void intel_dp_encoder_flush_work(struct drm_encoder *encoder)
5085{
5086	struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
5087	struct intel_dp *intel_dp = &dig_port->dp;
5088
5089	intel_dp_mst_encoder_cleanup(dig_port);
5090
5091	intel_pps_vdd_off_sync(intel_dp);
5092
5093	/*
5094	 * Ensure power off delay is respected on module remove, so that we can
5095	 * reduce delays at driver probe. See pps_init_timestamps().
5096	 */
5097	intel_pps_wait_power_cycle(intel_dp);
5098
5099	intel_dp_aux_fini(intel_dp);
5100}
5101
5102void intel_dp_encoder_suspend(struct intel_encoder *intel_encoder)
5103{
5104	struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
5105
5106	intel_pps_vdd_off_sync(intel_dp);
5107}
5108
5109void intel_dp_encoder_shutdown(struct intel_encoder *intel_encoder)
5110{
5111	struct intel_dp *intel_dp = enc_to_intel_dp(intel_encoder);
5112
5113	intel_pps_wait_power_cycle(intel_dp);
5114}
5115
5116static int intel_modeset_tile_group(struct intel_atomic_state *state,
5117				    int tile_group_id)
5118{
5119	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
5120	struct drm_connector_list_iter conn_iter;
5121	struct drm_connector *connector;
5122	int ret = 0;
5123
5124	drm_connector_list_iter_begin(&dev_priv->drm, &conn_iter);
5125	drm_for_each_connector_iter(connector, &conn_iter) {
5126		struct drm_connector_state *conn_state;
5127		struct intel_crtc_state *crtc_state;
5128		struct intel_crtc *crtc;
5129
5130		if (!connector->has_tile ||
5131		    connector->tile_group->id != tile_group_id)
5132			continue;
5133
5134		conn_state = drm_atomic_get_connector_state(&state->base,
5135							    connector);
5136		if (IS_ERR(conn_state)) {
5137			ret = PTR_ERR(conn_state);
5138			break;
5139		}
5140
5141		crtc = to_intel_crtc(conn_state->crtc);
5142
5143		if (!crtc)
5144			continue;
5145
5146		crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
5147		crtc_state->uapi.mode_changed = true;
5148
5149		ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
5150		if (ret)
5151			break;
5152	}
5153	drm_connector_list_iter_end(&conn_iter);
5154
5155	return ret;
5156}
5157
5158static int intel_modeset_affected_transcoders(struct intel_atomic_state *state, u8 transcoders)
5159{
5160	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
5161	struct intel_crtc *crtc;
5162
5163	if (transcoders == 0)
5164		return 0;
5165
5166	for_each_intel_crtc(&dev_priv->drm, crtc) {
5167		struct intel_crtc_state *crtc_state;
5168		int ret;
5169
5170		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
5171		if (IS_ERR(crtc_state))
5172			return PTR_ERR(crtc_state);
5173
5174		if (!crtc_state->hw.enable)
5175			continue;
5176
5177		if (!(transcoders & BIT(crtc_state->cpu_transcoder)))
5178			continue;
5179
5180		crtc_state->uapi.mode_changed = true;
5181
5182		ret = drm_atomic_add_affected_connectors(&state->base, &crtc->base);
5183		if (ret)
5184			return ret;
5185
5186		ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
5187		if (ret)
5188			return ret;
5189
5190		transcoders &= ~BIT(crtc_state->cpu_transcoder);
5191	}
5192
5193	drm_WARN_ON(&dev_priv->drm, transcoders != 0);
5194
5195	return 0;
5196}
5197
5198static int intel_modeset_synced_crtcs(struct intel_atomic_state *state,
5199				      struct drm_connector *connector)
5200{
5201	const struct drm_connector_state *old_conn_state =
5202		drm_atomic_get_old_connector_state(&state->base, connector);
5203	const struct intel_crtc_state *old_crtc_state;
5204	struct intel_crtc *crtc;
5205	u8 transcoders;
5206
5207	crtc = to_intel_crtc(old_conn_state->crtc);
5208	if (!crtc)
5209		return 0;
5210
5211	old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc);
5212
5213	if (!old_crtc_state->hw.active)
5214		return 0;
5215
5216	transcoders = old_crtc_state->sync_mode_slaves_mask;
5217	if (old_crtc_state->master_transcoder != INVALID_TRANSCODER)
5218		transcoders |= BIT(old_crtc_state->master_transcoder);
5219
5220	return intel_modeset_affected_transcoders(state,
5221						  transcoders);
5222}
5223
5224static int intel_dp_connector_atomic_check(struct drm_connector *conn,
5225					   struct drm_atomic_state *_state)
5226{
5227	struct drm_i915_private *dev_priv = to_i915(conn->dev);
5228	struct intel_atomic_state *state = to_intel_atomic_state(_state);
5229	struct drm_connector_state *conn_state = drm_atomic_get_new_connector_state(_state, conn);
5230	struct intel_connector *intel_conn = to_intel_connector(conn);
5231	struct intel_dp *intel_dp = enc_to_intel_dp(intel_conn->encoder);
5232	int ret;
5233
5234	ret = intel_digital_connector_atomic_check(conn, &state->base);
5235	if (ret)
5236		return ret;
5237
5238	if (intel_dp_mst_source_support(intel_dp)) {
5239		ret = drm_dp_mst_root_conn_atomic_check(conn_state, &intel_dp->mst_mgr);
5240		if (ret)
5241			return ret;
5242	}
5243
5244	/*
5245	 * We don't enable port sync on BDW due to missing w/as and
5246	 * due to not having adjusted the modeset sequence appropriately.
5247	 */
5248	if (DISPLAY_VER(dev_priv) < 9)
5249		return 0;
5250
5251	if (!intel_connector_needs_modeset(state, conn))
5252		return 0;
5253
5254	if (conn->has_tile) {
5255		ret = intel_modeset_tile_group(state, conn->tile_group->id);
5256		if (ret)
5257			return ret;
5258	}
5259
5260	return intel_modeset_synced_crtcs(state, conn);
5261}
5262
5263static void intel_dp_oob_hotplug_event(struct drm_connector *connector)
5264{
5265	struct intel_encoder *encoder = intel_attached_encoder(to_intel_connector(connector));
5266	struct drm_i915_private *i915 = to_i915(connector->dev);
5267
5268	spin_lock_irq(&i915->irq_lock);
5269	i915->display.hotplug.event_bits |= BIT(encoder->hpd_pin);
5270	spin_unlock_irq(&i915->irq_lock);
5271	queue_delayed_work(i915->unordered_wq, &i915->display.hotplug.hotplug_work, 0);
5272}
5273
5274static const struct drm_connector_funcs intel_dp_connector_funcs = {
5275	.force = intel_dp_force,
5276	.fill_modes = drm_helper_probe_single_connector_modes,
5277	.atomic_get_property = intel_digital_connector_atomic_get_property,
5278	.atomic_set_property = intel_digital_connector_atomic_set_property,
5279	.late_register = intel_dp_connector_register,
5280	.early_unregister = intel_dp_connector_unregister,
5281	.destroy = intel_connector_destroy,
5282	.atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
5283	.atomic_duplicate_state = intel_digital_connector_duplicate_state,
5284	.oob_hotplug_event = intel_dp_oob_hotplug_event,
5285};
5286
5287static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
5288	.detect_ctx = intel_dp_detect,
5289	.get_modes = intel_dp_get_modes,
5290	.mode_valid = intel_dp_mode_valid,
5291	.atomic_check = intel_dp_connector_atomic_check,
5292};
5293
5294enum irqreturn
5295intel_dp_hpd_pulse(struct intel_digital_port *dig_port, bool long_hpd)
5296{
5297	struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev);
5298	struct intel_dp *intel_dp = &dig_port->dp;
5299
5300	if (dig_port->base.type == INTEL_OUTPUT_EDP &&
5301	    (long_hpd || !intel_pps_have_panel_power_or_vdd(intel_dp))) {
5302		/*
5303		 * vdd off can generate a long/short pulse on eDP which
5304		 * would require vdd on to handle it, and thus we
5305		 * would end up in an endless cycle of
5306		 * "vdd off -> long/short hpd -> vdd on -> detect -> vdd off -> ..."
5307		 */
5308		drm_dbg_kms(&i915->drm,
5309			    "ignoring %s hpd on eDP [ENCODER:%d:%s]\n",
5310			    long_hpd ? "long" : "short",
5311			    dig_port->base.base.base.id,
5312			    dig_port->base.base.name);
5313		return IRQ_HANDLED;
5314	}
5315
5316	drm_dbg_kms(&i915->drm, "got hpd irq on [ENCODER:%d:%s] - %s\n",
5317		    dig_port->base.base.base.id,
5318		    dig_port->base.base.name,
5319		    long_hpd ? "long" : "short");
5320
5321	if (long_hpd) {
5322		intel_dp->reset_link_params = true;
5323		return IRQ_NONE;
5324	}
5325
5326	if (intel_dp->is_mst) {
5327		if (!intel_dp_check_mst_status(intel_dp))
5328			return IRQ_NONE;
5329	} else if (!intel_dp_short_pulse(intel_dp)) {
5330		return IRQ_NONE;
5331	}
5332
5333	return IRQ_HANDLED;
5334}
5335
5336static bool _intel_dp_is_port_edp(struct drm_i915_private *dev_priv,
5337				  const struct intel_bios_encoder_data *devdata,
5338				  enum port port)
5339{
5340	/*
5341	 * eDP not supported on g4x. so bail out early just
5342	 * for a bit extra safety in case the VBT is bonkers.
5343	 */
5344	if (DISPLAY_VER(dev_priv) < 5)
5345		return false;
5346
5347	if (DISPLAY_VER(dev_priv) < 9 && port == PORT_A)
5348		return true;
5349
5350	return devdata && intel_bios_encoder_supports_edp(devdata);
5351}
5352
5353bool intel_dp_is_port_edp(struct drm_i915_private *i915, enum port port)
5354{
5355	const struct intel_bios_encoder_data *devdata =
5356		intel_bios_encoder_data_lookup(i915, port);
5357
5358	return _intel_dp_is_port_edp(i915, devdata, port);
5359}
5360
5361static bool
5362has_gamut_metadata_dip(struct intel_encoder *encoder)
5363{
5364	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
5365	enum port port = encoder->port;
5366
5367	if (intel_bios_encoder_is_lspcon(encoder->devdata))
5368		return false;
5369
5370	if (DISPLAY_VER(i915) >= 11)
5371		return true;
5372
5373	if (port == PORT_A)
5374		return false;
5375
5376	if (IS_HASWELL(i915) || IS_BROADWELL(i915) ||
5377	    DISPLAY_VER(i915) >= 9)
5378		return true;
5379
5380	return false;
5381}
5382
5383static void
5384intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
5385{
5386	struct drm_i915_private *dev_priv = to_i915(connector->dev);
5387	enum port port = dp_to_dig_port(intel_dp)->base.port;
5388
5389	if (!intel_dp_is_edp(intel_dp))
5390		drm_connector_attach_dp_subconnector_property(connector);
5391
5392	if (!IS_G4X(dev_priv) && port != PORT_A)
5393		intel_attach_force_audio_property(connector);
5394
5395	intel_attach_broadcast_rgb_property(connector);
5396	if (HAS_GMCH(dev_priv))
5397		drm_connector_attach_max_bpc_property(connector, 6, 10);
5398	else if (DISPLAY_VER(dev_priv) >= 5)
5399		drm_connector_attach_max_bpc_property(connector, 6, 12);
5400
5401	/* Register HDMI colorspace for case of lspcon */
5402	if (intel_bios_encoder_is_lspcon(dp_to_dig_port(intel_dp)->base.devdata)) {
5403		drm_connector_attach_content_type_property(connector);
5404		intel_attach_hdmi_colorspace_property(connector);
5405	} else {
5406		intel_attach_dp_colorspace_property(connector);
5407	}
5408
5409	if (has_gamut_metadata_dip(&dp_to_dig_port(intel_dp)->base))
5410		drm_connector_attach_hdr_output_metadata_property(connector);
5411
5412	if (HAS_VRR(dev_priv))
5413		drm_connector_attach_vrr_capable_property(connector);
5414}
5415
5416static void
5417intel_edp_add_properties(struct intel_dp *intel_dp)
5418{
5419	struct intel_connector *connector = intel_dp->attached_connector;
5420	struct drm_i915_private *i915 = to_i915(connector->base.dev);
5421	const struct drm_display_mode *fixed_mode =
5422		intel_panel_preferred_fixed_mode(connector);
5423
5424	intel_attach_scaling_mode_property(&connector->base);
5425
5426	drm_connector_set_panel_orientation_with_quirk(&connector->base,
5427						       i915->display.vbt.orientation,
5428						       fixed_mode->hdisplay,
5429						       fixed_mode->vdisplay);
5430}
5431
5432static void intel_edp_backlight_setup(struct intel_dp *intel_dp,
5433				      struct intel_connector *connector)
5434{
5435	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
5436	enum pipe pipe = INVALID_PIPE;
5437
5438	if (IS_VALLEYVIEW(i915) || IS_CHERRYVIEW(i915)) {
5439		/*
5440		 * Figure out the current pipe for the initial backlight setup.
5441		 * If the current pipe isn't valid, try the PPS pipe, and if that
5442		 * fails just assume pipe A.
5443		 */
5444		pipe = vlv_active_pipe(intel_dp);
5445
5446		if (pipe != PIPE_A && pipe != PIPE_B)
5447			pipe = intel_dp->pps.pps_pipe;
5448
5449		if (pipe != PIPE_A && pipe != PIPE_B)
5450			pipe = PIPE_A;
5451	}
5452
5453	intel_backlight_setup(connector, pipe);
5454}
5455
5456static bool intel_edp_init_connector(struct intel_dp *intel_dp,
5457				     struct intel_connector *intel_connector)
5458{
5459	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
5460	struct drm_connector *connector = &intel_connector->base;
5461	struct drm_display_mode *fixed_mode;
5462	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
5463	bool has_dpcd;
5464	const struct drm_edid *drm_edid;
5465
5466	if (!intel_dp_is_edp(intel_dp))
5467		return true;
5468
5469	/*
5470	 * On IBX/CPT we may get here with LVDS already registered. Since the
5471	 * driver uses the only internal power sequencer available for both
5472	 * eDP and LVDS bail out early in this case to prevent interfering
5473	 * with an already powered-on LVDS power sequencer.
5474	 */
5475	if (intel_get_lvds_encoder(dev_priv)) {
5476		drm_WARN_ON(&dev_priv->drm,
5477			    !(HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)));
5478		drm_info(&dev_priv->drm,
5479			 "LVDS was detected, not registering eDP\n");
5480
5481		return false;
5482	}
5483
5484	intel_bios_init_panel_early(dev_priv, &intel_connector->panel,
5485				    encoder->devdata);
5486
5487	if (!intel_pps_init(intel_dp)) {
5488		drm_info(&dev_priv->drm,
5489			 "[ENCODER:%d:%s] unusable PPS, disabling eDP\n",
5490			 encoder->base.base.id, encoder->base.name);
5491		/*
5492		 * The BIOS may have still enabled VDD on the PPS even
5493		 * though it's unusable. Make sure we turn it back off
5494		 * and to release the power domain references/etc.
5495		 */
5496		goto out_vdd_off;
5497	}
5498
5499	/*
5500	 * Enable HPD sense for live status check.
5501	 * intel_hpd_irq_setup() will turn it off again
5502	 * if it's no longer needed later.
5503	 *
5504	 * The DPCD probe below will make sure VDD is on.
5505	 */
5506	intel_hpd_enable_detection(encoder);
5507
5508	/* Cache DPCD and EDID for edp. */
5509	has_dpcd = intel_edp_init_dpcd(intel_dp);
5510
5511	if (!has_dpcd) {
5512		/* if this fails, presume the device is a ghost */
5513		drm_info(&dev_priv->drm,
5514			 "[ENCODER:%d:%s] failed to retrieve link info, disabling eDP\n",
5515			 encoder->base.base.id, encoder->base.name);
5516		goto out_vdd_off;
5517	}
5518
5519	/*
5520	 * VBT and straps are liars. Also check HPD as that seems
5521	 * to be the most reliable piece of information available.
5522	 *
5523	 * ... expect on devices that forgot to hook HPD up for eDP
5524	 * (eg. Acer Chromebook C710), so we'll check it only if multiple
5525	 * ports are attempting to use the same AUX CH, according to VBT.
5526	 */
5527	if (intel_bios_dp_has_shared_aux_ch(encoder->devdata)) {
5528		/*
5529		 * If this fails, presume the DPCD answer came
5530		 * from some other port using the same AUX CH.
5531		 *
5532		 * FIXME maybe cleaner to check this before the
5533		 * DPCD read? Would need sort out the VDD handling...
5534		 */
5535		if (!intel_digital_port_connected(encoder)) {
5536			drm_info(&dev_priv->drm,
5537				 "[ENCODER:%d:%s] HPD is down, disabling eDP\n",
5538				 encoder->base.base.id, encoder->base.name);
5539			goto out_vdd_off;
5540		}
5541
5542		/*
5543		 * Unfortunately even the HPD based detection fails on
5544		 * eg. Asus B360M-A (CFL+CNP), so as a last resort fall
5545		 * back to checking for a VGA branch device. Only do this
5546		 * on known affected platforms to minimize false positives.
5547		 */
5548		if (DISPLAY_VER(dev_priv) == 9 && drm_dp_is_branch(intel_dp->dpcd) &&
5549		    (intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_TYPE_MASK) ==
5550		    DP_DWN_STRM_PORT_TYPE_ANALOG) {
5551			drm_info(&dev_priv->drm,
5552				 "[ENCODER:%d:%s] VGA converter detected, disabling eDP\n",
5553				 encoder->base.base.id, encoder->base.name);
5554			goto out_vdd_off;
5555		}
5556	}
5557
5558	mutex_lock(&dev_priv->drm.mode_config.mutex);
5559	drm_edid = drm_edid_read_ddc(connector, &intel_dp->aux.ddc);
5560	if (!drm_edid) {
5561		/* Fallback to EDID from ACPI OpRegion, if any */
5562		drm_edid = intel_opregion_get_edid(intel_connector);
5563		if (drm_edid)
5564			drm_dbg_kms(&dev_priv->drm,
5565				    "[CONNECTOR:%d:%s] Using OpRegion EDID\n",
5566				    connector->base.id, connector->name);
5567	}
5568	if (drm_edid) {
5569		if (drm_edid_connector_update(connector, drm_edid) ||
5570		    !drm_edid_connector_add_modes(connector)) {
5571			drm_edid_connector_update(connector, NULL);
5572			drm_edid_free(drm_edid);
5573			drm_edid = ERR_PTR(-EINVAL);
5574		}
5575	} else {
5576		drm_edid = ERR_PTR(-ENOENT);
5577	}
5578
5579	intel_bios_init_panel_late(dev_priv, &intel_connector->panel, encoder->devdata,
5580				   IS_ERR(drm_edid) ? NULL : drm_edid);
5581
5582	intel_panel_add_edid_fixed_modes(intel_connector, true);
5583
5584	/* MSO requires information from the EDID */
5585	intel_edp_mso_init(intel_dp);
5586
5587	/* multiply the mode clock and horizontal timings for MSO */
5588	list_for_each_entry(fixed_mode, &intel_connector->panel.fixed_modes, head)
5589		intel_edp_mso_mode_fixup(intel_connector, fixed_mode);
5590
5591	/* fallback to VBT if available for eDP */
5592	if (!intel_panel_preferred_fixed_mode(intel_connector))
5593		intel_panel_add_vbt_lfp_fixed_mode(intel_connector);
5594
5595	mutex_unlock(&dev_priv->drm.mode_config.mutex);
5596
5597	if (!intel_panel_preferred_fixed_mode(intel_connector)) {
5598		drm_info(&dev_priv->drm,
5599			 "[ENCODER:%d:%s] failed to find fixed mode for the panel, disabling eDP\n",
5600			 encoder->base.base.id, encoder->base.name);
5601		goto out_vdd_off;
5602	}
5603
5604	intel_panel_init(intel_connector, drm_edid);
5605
5606	intel_edp_backlight_setup(intel_dp, intel_connector);
5607
5608	intel_edp_add_properties(intel_dp);
5609
5610	intel_pps_init_late(intel_dp);
5611
5612	return true;
5613
5614out_vdd_off:
5615	intel_pps_vdd_off_sync(intel_dp);
5616
5617	return false;
5618}
5619
5620static void intel_dp_modeset_retry_work_fn(struct work_struct *work)
5621{
5622	struct intel_connector *intel_connector;
5623	struct drm_connector *connector;
5624
5625	intel_connector = container_of(work, typeof(*intel_connector),
5626				       modeset_retry_work);
5627	connector = &intel_connector->base;
5628	drm_dbg_kms(connector->dev, "[CONNECTOR:%d:%s]\n", connector->base.id,
5629		    connector->name);
5630
5631	/* Grab the locks before changing connector property*/
5632	mutex_lock(&connector->dev->mode_config.mutex);
5633	/* Set connector link status to BAD and send a Uevent to notify
5634	 * userspace to do a modeset.
5635	 */
5636	drm_connector_set_link_status_property(connector,
5637					       DRM_MODE_LINK_STATUS_BAD);
5638	mutex_unlock(&connector->dev->mode_config.mutex);
5639	/* Send Hotplug uevent so userspace can reprobe */
5640	drm_kms_helper_connector_hotplug_event(connector);
5641}
5642
5643bool
5644intel_dp_init_connector(struct intel_digital_port *dig_port,
5645			struct intel_connector *intel_connector)
5646{
5647	struct drm_connector *connector = &intel_connector->base;
5648	struct intel_dp *intel_dp = &dig_port->dp;
5649	struct intel_encoder *intel_encoder = &dig_port->base;
5650	struct drm_device *dev = intel_encoder->base.dev;
5651	struct drm_i915_private *dev_priv = to_i915(dev);
5652	enum port port = intel_encoder->port;
5653	enum phy phy = intel_port_to_phy(dev_priv, port);
5654	int type;
5655
5656	/* Initialize the work for modeset in case of link train failure */
5657	INIT_WORK(&intel_connector->modeset_retry_work,
5658		  intel_dp_modeset_retry_work_fn);
5659
5660	if (drm_WARN(dev, dig_port->max_lanes < 1,
5661		     "Not enough lanes (%d) for DP on [ENCODER:%d:%s]\n",
5662		     dig_port->max_lanes, intel_encoder->base.base.id,
5663		     intel_encoder->base.name))
5664		return false;
5665
5666	intel_dp->reset_link_params = true;
5667	intel_dp->pps.pps_pipe = INVALID_PIPE;
5668	intel_dp->pps.active_pipe = INVALID_PIPE;
5669
5670	/* Preserve the current hw state. */
5671	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
5672	intel_dp->attached_connector = intel_connector;
5673
5674	if (_intel_dp_is_port_edp(dev_priv, intel_encoder->devdata, port)) {
5675		/*
5676		 * Currently we don't support eDP on TypeC ports, although in
5677		 * theory it could work on TypeC legacy ports.
5678		 */
5679		drm_WARN_ON(dev, intel_phy_is_tc(dev_priv, phy));
5680		type = DRM_MODE_CONNECTOR_eDP;
5681		intel_encoder->type = INTEL_OUTPUT_EDP;
5682
5683		/* eDP only on port B and/or C on vlv/chv */
5684		if (drm_WARN_ON(dev, (IS_VALLEYVIEW(dev_priv) ||
5685				      IS_CHERRYVIEW(dev_priv)) &&
5686				port != PORT_B && port != PORT_C))
5687			return false;
5688	} else {
5689		type = DRM_MODE_CONNECTOR_DisplayPort;
5690	}
5691
5692	intel_dp_set_default_sink_rates(intel_dp);
5693	intel_dp_set_default_max_sink_lane_count(intel_dp);
5694
5695	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
5696		intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
5697
5698	drm_dbg_kms(&dev_priv->drm,
5699		    "Adding %s connector on [ENCODER:%d:%s]\n",
5700		    type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP",
5701		    intel_encoder->base.base.id, intel_encoder->base.name);
5702
5703	drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
5704	drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
5705
5706	if (!HAS_GMCH(dev_priv) && DISPLAY_VER(dev_priv) < 12)
5707		connector->interlace_allowed = true;
5708
5709	intel_connector->polled = DRM_CONNECTOR_POLL_HPD;
5710
5711	intel_dp_aux_init(intel_dp);
5712
5713	intel_connector_attach_encoder(intel_connector, intel_encoder);
5714
5715	if (HAS_DDI(dev_priv))
5716		intel_connector->get_hw_state = intel_ddi_connector_get_hw_state;
5717	else
5718		intel_connector->get_hw_state = intel_connector_get_hw_state;
5719
5720	if (!intel_edp_init_connector(intel_dp, intel_connector)) {
5721		intel_dp_aux_fini(intel_dp);
5722		goto fail;
5723	}
5724
5725	intel_dp_set_source_rates(intel_dp);
5726	intel_dp_set_common_rates(intel_dp);
5727	intel_dp_reset_max_link_params(intel_dp);
5728
5729	/* init MST on ports that can support it */
5730	intel_dp_mst_encoder_init(dig_port,
5731				  intel_connector->base.base.id);
5732
5733	intel_dp_add_properties(intel_dp, connector);
5734
5735	if (is_hdcp_supported(dev_priv, port) && !intel_dp_is_edp(intel_dp)) {
5736		int ret = intel_dp_hdcp_init(dig_port, intel_connector);
5737		if (ret)
5738			drm_dbg_kms(&dev_priv->drm,
5739				    "HDCP init failed, skipping.\n");
5740	}
5741
5742	/* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
5743	 * 0xd.  Failure to do so will result in spurious interrupts being
5744	 * generated on the port when a cable is not attached.
5745	 */
5746	if (IS_G45(dev_priv)) {
5747		u32 temp = intel_de_read(dev_priv, PEG_BAND_GAP_DATA);
5748		intel_de_write(dev_priv, PEG_BAND_GAP_DATA,
5749			       (temp & ~0xf) | 0xd);
5750	}
5751
5752	intel_dp->frl.is_trained = false;
5753	intel_dp->frl.trained_rate_gbps = 0;
5754
5755	intel_psr_init(intel_dp);
5756
5757	return true;
5758
5759fail:
5760	intel_display_power_flush_work(dev_priv);
5761	drm_connector_cleanup(connector);
5762
5763	return false;
5764}
5765
5766void intel_dp_mst_suspend(struct drm_i915_private *dev_priv)
5767{
5768	struct intel_encoder *encoder;
5769
5770	if (!HAS_DISPLAY(dev_priv))
5771		return;
5772
5773	for_each_intel_encoder(&dev_priv->drm, encoder) {
5774		struct intel_dp *intel_dp;
5775
5776		if (encoder->type != INTEL_OUTPUT_DDI)
5777			continue;
5778
5779		intel_dp = enc_to_intel_dp(encoder);
5780
5781		if (!intel_dp_mst_source_support(intel_dp))
5782			continue;
5783
5784		if (intel_dp->is_mst)
5785			drm_dp_mst_topology_mgr_suspend(&intel_dp->mst_mgr);
5786	}
5787}
5788
5789void intel_dp_mst_resume(struct drm_i915_private *dev_priv)
5790{
5791	struct intel_encoder *encoder;
5792
5793	if (!HAS_DISPLAY(dev_priv))
5794		return;
5795
5796	for_each_intel_encoder(&dev_priv->drm, encoder) {
5797		struct intel_dp *intel_dp;
5798		int ret;
5799
5800		if (encoder->type != INTEL_OUTPUT_DDI)
5801			continue;
5802
5803		intel_dp = enc_to_intel_dp(encoder);
5804
5805		if (!intel_dp_mst_source_support(intel_dp))
5806			continue;
5807
5808		ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst_mgr,
5809						     true);
5810		if (ret) {
5811			intel_dp->is_mst = false;
5812			drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst_mgr,
5813							false);
5814		}
5815	}
5816}
5817