xref: /linux/drivers/gpu/drm/i915/display/g4x_dp.c (revision 1e525507)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code).
6  */
7 
8 #include <linux/string_helpers.h>
9 
10 #include "g4x_dp.h"
11 #include "i915_reg.h"
12 #include "intel_audio.h"
13 #include "intel_backlight.h"
14 #include "intel_connector.h"
15 #include "intel_crtc.h"
16 #include "intel_de.h"
17 #include "intel_display_power.h"
18 #include "intel_display_types.h"
19 #include "intel_dp.h"
20 #include "intel_dp_aux.h"
21 #include "intel_dp_link_training.h"
22 #include "intel_dpio_phy.h"
23 #include "intel_fifo_underrun.h"
24 #include "intel_hdmi.h"
25 #include "intel_hotplug.h"
26 #include "intel_pch_display.h"
27 #include "intel_pps.h"
28 #include "vlv_sideband.h"
29 
30 static const struct dpll g4x_dpll[] = {
31 	{ .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, },
32 	{ .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, },
33 };
34 
35 static const struct dpll pch_dpll[] = {
36 	{ .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, },
37 	{ .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, },
38 };
39 
40 static const struct dpll vlv_dpll[] = {
41 	{ .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, },
42 	{ .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, },
43 };
44 
45 static const struct dpll chv_dpll[] = {
46 	/* m2 is .22 binary fixed point  */
47 	{ .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ },
48 	{ .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ },
49 };
50 
51 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915)
52 {
53 	return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0];
54 }
55 
56 void g4x_dp_set_clock(struct intel_encoder *encoder,
57 		      struct intel_crtc_state *pipe_config)
58 {
59 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
60 	const struct dpll *divisor = NULL;
61 	int i, count = 0;
62 
63 	if (IS_G4X(dev_priv)) {
64 		divisor = g4x_dpll;
65 		count = ARRAY_SIZE(g4x_dpll);
66 	} else if (HAS_PCH_SPLIT(dev_priv)) {
67 		divisor = pch_dpll;
68 		count = ARRAY_SIZE(pch_dpll);
69 	} else if (IS_CHERRYVIEW(dev_priv)) {
70 		divisor = chv_dpll;
71 		count = ARRAY_SIZE(chv_dpll);
72 	} else if (IS_VALLEYVIEW(dev_priv)) {
73 		divisor = vlv_dpll;
74 		count = ARRAY_SIZE(vlv_dpll);
75 	}
76 
77 	if (divisor && count) {
78 		for (i = 0; i < count; i++) {
79 			if (pipe_config->port_clock == divisor[i].dot) {
80 				pipe_config->dpll = divisor[i];
81 				pipe_config->clock_set = true;
82 				break;
83 			}
84 		}
85 	}
86 }
87 
88 static void intel_dp_prepare(struct intel_encoder *encoder,
89 			     const struct intel_crtc_state *pipe_config)
90 {
91 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
92 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
93 	enum port port = encoder->port;
94 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
95 	const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode;
96 
97 	intel_dp_set_link_params(intel_dp,
98 				 pipe_config->port_clock,
99 				 pipe_config->lane_count);
100 
101 	/*
102 	 * There are four kinds of DP registers:
103 	 * IBX PCH
104 	 * SNB CPU
105 	 * IVB CPU
106 	 * CPT PCH
107 	 *
108 	 * IBX PCH and CPU are the same for almost everything,
109 	 * except that the CPU DP PLL is configured in this
110 	 * register
111 	 *
112 	 * CPT PCH is quite different, having many bits moved
113 	 * to the TRANS_DP_CTL register instead. That
114 	 * configuration happens (oddly) in ilk_pch_enable
115 	 */
116 
117 	/* Preserve the BIOS-computed detected bit. This is
118 	 * supposed to be read-only.
119 	 */
120 	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED;
121 
122 	/* Handle DP bits in common between all three register formats */
123 	intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
124 	intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count);
125 
126 	/* Split out the IBX/CPU vs CPT settings */
127 
128 	if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) {
129 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
130 			intel_dp->DP |= DP_SYNC_HS_HIGH;
131 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
132 			intel_dp->DP |= DP_SYNC_VS_HIGH;
133 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
134 
135 		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
136 			intel_dp->DP |= DP_ENHANCED_FRAMING;
137 
138 		intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe);
139 	} else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
140 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
141 
142 		intel_de_rmw(dev_priv, TRANS_DP_CTL(crtc->pipe),
143 			     TRANS_DP_ENH_FRAMING,
144 			     pipe_config->enhanced_framing ?
145 			     TRANS_DP_ENH_FRAMING : 0);
146 	} else {
147 		if (IS_G4X(dev_priv) && pipe_config->limited_color_range)
148 			intel_dp->DP |= DP_COLOR_RANGE_16_235;
149 
150 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
151 			intel_dp->DP |= DP_SYNC_HS_HIGH;
152 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
153 			intel_dp->DP |= DP_SYNC_VS_HIGH;
154 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
155 
156 		if (pipe_config->enhanced_framing)
157 			intel_dp->DP |= DP_ENHANCED_FRAMING;
158 
159 		if (IS_CHERRYVIEW(dev_priv))
160 			intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe);
161 		else
162 			intel_dp->DP |= DP_PIPE_SEL(crtc->pipe);
163 	}
164 }
165 
166 static void assert_dp_port(struct intel_dp *intel_dp, bool state)
167 {
168 	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
169 	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
170 	bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN;
171 
172 	I915_STATE_WARN(dev_priv, cur_state != state,
173 			"[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n",
174 			dig_port->base.base.base.id, dig_port->base.base.name,
175 			str_on_off(state), str_on_off(cur_state));
176 }
177 #define assert_dp_port_disabled(d) assert_dp_port((d), false)
178 
179 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state)
180 {
181 	bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE;
182 
183 	I915_STATE_WARN(dev_priv, cur_state != state,
184 			"eDP PLL state assertion failure (expected %s, current %s)\n",
185 			str_on_off(state), str_on_off(cur_state));
186 }
187 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
188 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
189 
190 static void ilk_edp_pll_on(struct intel_dp *intel_dp,
191 			   const struct intel_crtc_state *pipe_config)
192 {
193 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
194 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
195 
196 	assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder);
197 	assert_dp_port_disabled(intel_dp);
198 	assert_edp_pll_disabled(dev_priv);
199 
200 	drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n",
201 		    pipe_config->port_clock);
202 
203 	intel_dp->DP &= ~DP_PLL_FREQ_MASK;
204 
205 	if (pipe_config->port_clock == 162000)
206 		intel_dp->DP |= DP_PLL_FREQ_162MHZ;
207 	else
208 		intel_dp->DP |= DP_PLL_FREQ_270MHZ;
209 
210 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
211 	intel_de_posting_read(dev_priv, DP_A);
212 	udelay(500);
213 
214 	/*
215 	 * [DevILK] Work around required when enabling DP PLL
216 	 * while a pipe is enabled going to FDI:
217 	 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI
218 	 * 2. Program DP PLL enable
219 	 */
220 	if (IS_IRONLAKE(dev_priv))
221 		intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe);
222 
223 	intel_dp->DP |= DP_PLL_ENABLE;
224 
225 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
226 	intel_de_posting_read(dev_priv, DP_A);
227 	udelay(200);
228 }
229 
230 static void ilk_edp_pll_off(struct intel_dp *intel_dp,
231 			    const struct intel_crtc_state *old_crtc_state)
232 {
233 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
234 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
235 
236 	assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder);
237 	assert_dp_port_disabled(intel_dp);
238 	assert_edp_pll_enabled(dev_priv);
239 
240 	drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n");
241 
242 	intel_dp->DP &= ~DP_PLL_ENABLE;
243 
244 	intel_de_write(dev_priv, DP_A, intel_dp->DP);
245 	intel_de_posting_read(dev_priv, DP_A);
246 	udelay(200);
247 }
248 
249 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv,
250 				 enum port port, enum pipe *pipe)
251 {
252 	enum pipe p;
253 
254 	for_each_pipe(dev_priv, p) {
255 		u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p));
256 
257 		if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) {
258 			*pipe = p;
259 			return true;
260 		}
261 	}
262 
263 	drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n",
264 		    port_name(port));
265 
266 	/* must initialize pipe to something for the asserts */
267 	*pipe = PIPE_A;
268 
269 	return false;
270 }
271 
272 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv,
273 			 i915_reg_t dp_reg, enum port port,
274 			 enum pipe *pipe)
275 {
276 	bool ret;
277 	u32 val;
278 
279 	val = intel_de_read(dev_priv, dp_reg);
280 
281 	ret = val & DP_PORT_EN;
282 
283 	/* asserts want to know the pipe even if the port is disabled */
284 	if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
285 		*pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB;
286 	else if (HAS_PCH_CPT(dev_priv) && port != PORT_A)
287 		ret &= cpt_dp_port_selected(dev_priv, port, pipe);
288 	else if (IS_CHERRYVIEW(dev_priv))
289 		*pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV;
290 	else
291 		*pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT;
292 
293 	return ret;
294 }
295 
296 static bool intel_dp_get_hw_state(struct intel_encoder *encoder,
297 				  enum pipe *pipe)
298 {
299 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
300 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
301 	intel_wakeref_t wakeref;
302 	bool ret;
303 
304 	wakeref = intel_display_power_get_if_enabled(dev_priv,
305 						     encoder->power_domain);
306 	if (!wakeref)
307 		return false;
308 
309 	ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
310 				  encoder->port, pipe);
311 
312 	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
313 
314 	return ret;
315 }
316 
317 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state)
318 {
319 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
320 
321 	if (crtc_state->has_pch_encoder) {
322 		intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n);
323 		intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2);
324 	} else {
325 		intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder,
326 					       &crtc_state->dp_m_n);
327 		intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder,
328 					       &crtc_state->dp_m2_n2);
329 	}
330 }
331 
332 static void intel_dp_get_config(struct intel_encoder *encoder,
333 				struct intel_crtc_state *pipe_config)
334 {
335 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
336 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
337 	u32 tmp, flags = 0;
338 	enum port port = encoder->port;
339 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
340 
341 	if (encoder->type == INTEL_OUTPUT_EDP)
342 		pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
343 	else
344 		pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
345 
346 	tmp = intel_de_read(dev_priv, intel_dp->output_reg);
347 
348 	pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A;
349 
350 	if (HAS_PCH_CPT(dev_priv) && port != PORT_A) {
351 		u32 trans_dp = intel_de_read(dev_priv,
352 					     TRANS_DP_CTL(crtc->pipe));
353 
354 		if (trans_dp & TRANS_DP_ENH_FRAMING)
355 			pipe_config->enhanced_framing = true;
356 
357 		if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH)
358 			flags |= DRM_MODE_FLAG_PHSYNC;
359 		else
360 			flags |= DRM_MODE_FLAG_NHSYNC;
361 
362 		if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH)
363 			flags |= DRM_MODE_FLAG_PVSYNC;
364 		else
365 			flags |= DRM_MODE_FLAG_NVSYNC;
366 	} else {
367 		if (tmp & DP_ENHANCED_FRAMING)
368 			pipe_config->enhanced_framing = true;
369 
370 		if (tmp & DP_SYNC_HS_HIGH)
371 			flags |= DRM_MODE_FLAG_PHSYNC;
372 		else
373 			flags |= DRM_MODE_FLAG_NHSYNC;
374 
375 		if (tmp & DP_SYNC_VS_HIGH)
376 			flags |= DRM_MODE_FLAG_PVSYNC;
377 		else
378 			flags |= DRM_MODE_FLAG_NVSYNC;
379 	}
380 
381 	pipe_config->hw.adjusted_mode.flags |= flags;
382 
383 	if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235)
384 		pipe_config->limited_color_range = true;
385 
386 	pipe_config->lane_count =
387 		((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1;
388 
389 	g4x_dp_get_m_n(pipe_config);
390 
391 	if (port == PORT_A) {
392 		if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ)
393 			pipe_config->port_clock = 162000;
394 		else
395 			pipe_config->port_clock = 270000;
396 	}
397 
398 	pipe_config->hw.adjusted_mode.crtc_clock =
399 		intel_dotclock_calculate(pipe_config->port_clock,
400 					 &pipe_config->dp_m_n);
401 
402 	if (intel_dp_is_edp(intel_dp))
403 		intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp);
404 
405 	intel_audio_codec_get_config(encoder, pipe_config);
406 }
407 
408 static void
409 intel_dp_link_down(struct intel_encoder *encoder,
410 		   const struct intel_crtc_state *old_crtc_state)
411 {
412 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
413 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
414 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
415 	enum port port = encoder->port;
416 
417 	if (drm_WARN_ON(&dev_priv->drm,
418 			(intel_de_read(dev_priv, intel_dp->output_reg) &
419 			 DP_PORT_EN) == 0))
420 		return;
421 
422 	drm_dbg_kms(&dev_priv->drm, "\n");
423 
424 	if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
425 	    (HAS_PCH_CPT(dev_priv) && port != PORT_A)) {
426 		intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
427 		intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT;
428 	} else {
429 		intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
430 		intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE;
431 	}
432 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
433 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
434 
435 	intel_dp->DP &= ~DP_PORT_EN;
436 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
437 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
438 
439 	/*
440 	 * HW workaround for IBX, we need to move the port
441 	 * to transcoder A after disabling it to allow the
442 	 * matching HDMI port to be enabled on transcoder A.
443 	 */
444 	if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) {
445 		/*
446 		 * We get CPU/PCH FIFO underruns on the other pipe when
447 		 * doing the workaround. Sweep them under the rug.
448 		 */
449 		intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false);
450 		intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false);
451 
452 		/* always enable with pattern 1 (as per spec) */
453 		intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK);
454 		intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) |
455 			DP_LINK_TRAIN_PAT_1;
456 		intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
457 		intel_de_posting_read(dev_priv, intel_dp->output_reg);
458 
459 		intel_dp->DP &= ~DP_PORT_EN;
460 		intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
461 		intel_de_posting_read(dev_priv, intel_dp->output_reg);
462 
463 		intel_wait_for_vblank_if_active(dev_priv, PIPE_A);
464 		intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true);
465 		intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true);
466 	}
467 
468 	msleep(intel_dp->pps.panel_power_down_delay);
469 
470 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
471 		intel_wakeref_t wakeref;
472 
473 		with_intel_pps_lock(intel_dp, wakeref)
474 			intel_dp->pps.active_pipe = INVALID_PIPE;
475 	}
476 }
477 
478 static void g4x_dp_audio_enable(struct intel_encoder *encoder,
479 				const struct intel_crtc_state *crtc_state,
480 				const struct drm_connector_state *conn_state)
481 {
482 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
483 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
484 
485 	if (!crtc_state->has_audio)
486 		return;
487 
488 	/* Enable audio presence detect */
489 	intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
490 	intel_de_write(i915, intel_dp->output_reg, intel_dp->DP);
491 
492 	intel_audio_codec_enable(encoder, crtc_state, conn_state);
493 }
494 
495 static void g4x_dp_audio_disable(struct intel_encoder *encoder,
496 				 const struct intel_crtc_state *old_crtc_state,
497 				 const struct drm_connector_state *old_conn_state)
498 {
499 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
500 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
501 
502 	if (!old_crtc_state->has_audio)
503 		return;
504 
505 	intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state);
506 
507 	/* Disable audio presence detect */
508 	intel_dp->DP &= ~DP_AUDIO_OUTPUT_ENABLE;
509 	intel_de_write(i915, intel_dp->output_reg, intel_dp->DP);
510 }
511 
512 static void intel_disable_dp(struct intel_atomic_state *state,
513 			     struct intel_encoder *encoder,
514 			     const struct intel_crtc_state *old_crtc_state,
515 			     const struct drm_connector_state *old_conn_state)
516 {
517 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
518 
519 	intel_dp->link_trained = false;
520 
521 	/*
522 	 * Make sure the panel is off before trying to change the mode.
523 	 * But also ensure that we have vdd while we switch off the panel.
524 	 */
525 	intel_pps_vdd_on(intel_dp);
526 	intel_edp_backlight_off(old_conn_state);
527 	intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
528 	intel_pps_off(intel_dp);
529 }
530 
531 static void g4x_disable_dp(struct intel_atomic_state *state,
532 			   struct intel_encoder *encoder,
533 			   const struct intel_crtc_state *old_crtc_state,
534 			   const struct drm_connector_state *old_conn_state)
535 {
536 	intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
537 }
538 
539 static void vlv_disable_dp(struct intel_atomic_state *state,
540 			   struct intel_encoder *encoder,
541 			   const struct intel_crtc_state *old_crtc_state,
542 			   const struct drm_connector_state *old_conn_state)
543 {
544 	intel_disable_dp(state, encoder, old_crtc_state, old_conn_state);
545 }
546 
547 static void g4x_post_disable_dp(struct intel_atomic_state *state,
548 				struct intel_encoder *encoder,
549 				const struct intel_crtc_state *old_crtc_state,
550 				const struct drm_connector_state *old_conn_state)
551 {
552 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
553 	enum port port = encoder->port;
554 
555 	/*
556 	 * Bspec does not list a specific disable sequence for g4x DP.
557 	 * Follow the ilk+ sequence (disable pipe before the port) for
558 	 * g4x DP as it does not suffer from underruns like the normal
559 	 * g4x modeset sequence (disable pipe after the port).
560 	 */
561 	intel_dp_link_down(encoder, old_crtc_state);
562 
563 	/* Only ilk+ has port A */
564 	if (port == PORT_A)
565 		ilk_edp_pll_off(intel_dp, old_crtc_state);
566 }
567 
568 static void vlv_post_disable_dp(struct intel_atomic_state *state,
569 				struct intel_encoder *encoder,
570 				const struct intel_crtc_state *old_crtc_state,
571 				const struct drm_connector_state *old_conn_state)
572 {
573 	intel_dp_link_down(encoder, old_crtc_state);
574 }
575 
576 static void chv_post_disable_dp(struct intel_atomic_state *state,
577 				struct intel_encoder *encoder,
578 				const struct intel_crtc_state *old_crtc_state,
579 				const struct drm_connector_state *old_conn_state)
580 {
581 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
582 
583 	intel_dp_link_down(encoder, old_crtc_state);
584 
585 	vlv_dpio_get(dev_priv);
586 
587 	/* Assert data lane reset */
588 	chv_data_lane_soft_reset(encoder, old_crtc_state, true);
589 
590 	vlv_dpio_put(dev_priv);
591 }
592 
593 static void
594 cpt_set_link_train(struct intel_dp *intel_dp,
595 		   const struct intel_crtc_state *crtc_state,
596 		   u8 dp_train_pat)
597 {
598 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
599 
600 	intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT;
601 
602 	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
603 	case DP_TRAINING_PATTERN_DISABLE:
604 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
605 		break;
606 	case DP_TRAINING_PATTERN_1:
607 		intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT;
608 		break;
609 	case DP_TRAINING_PATTERN_2:
610 		intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT;
611 		break;
612 	default:
613 		MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
614 		return;
615 	}
616 
617 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
618 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
619 }
620 
621 static void
622 g4x_set_link_train(struct intel_dp *intel_dp,
623 		   const struct intel_crtc_state *crtc_state,
624 		   u8 dp_train_pat)
625 {
626 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
627 
628 	intel_dp->DP &= ~DP_LINK_TRAIN_MASK;
629 
630 	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
631 	case DP_TRAINING_PATTERN_DISABLE:
632 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
633 		break;
634 	case DP_TRAINING_PATTERN_1:
635 		intel_dp->DP |= DP_LINK_TRAIN_PAT_1;
636 		break;
637 	case DP_TRAINING_PATTERN_2:
638 		intel_dp->DP |= DP_LINK_TRAIN_PAT_2;
639 		break;
640 	default:
641 		MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat));
642 		return;
643 	}
644 
645 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
646 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
647 }
648 
649 static void intel_dp_enable_port(struct intel_dp *intel_dp,
650 				 const struct intel_crtc_state *crtc_state)
651 {
652 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
653 
654 	/* enable with pattern 1 (as per spec) */
655 
656 	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
657 					       DP_PHY_DPRX, DP_TRAINING_PATTERN_1);
658 
659 	/*
660 	 * Magic for VLV/CHV. We _must_ first set up the register
661 	 * without actually enabling the port, and then do another
662 	 * write to enable the port. Otherwise link training will
663 	 * fail when the power sequencer is freshly used for this port.
664 	 */
665 	intel_dp->DP |= DP_PORT_EN;
666 
667 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
668 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
669 }
670 
671 static void intel_enable_dp(struct intel_atomic_state *state,
672 			    struct intel_encoder *encoder,
673 			    const struct intel_crtc_state *pipe_config,
674 			    const struct drm_connector_state *conn_state)
675 {
676 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
677 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
678 	u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg);
679 	intel_wakeref_t wakeref;
680 
681 	if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN))
682 		return;
683 
684 	with_intel_pps_lock(intel_dp, wakeref) {
685 		if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
686 			vlv_pps_init(encoder, pipe_config);
687 
688 		intel_dp_enable_port(intel_dp, pipe_config);
689 
690 		intel_pps_vdd_on_unlocked(intel_dp);
691 		intel_pps_on_unlocked(intel_dp);
692 		intel_pps_vdd_off_unlocked(intel_dp, true);
693 	}
694 
695 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
696 		unsigned int lane_mask = 0x0;
697 
698 		if (IS_CHERRYVIEW(dev_priv))
699 			lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count);
700 
701 		vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
702 				    lane_mask);
703 	}
704 
705 	intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
706 	intel_dp_configure_protocol_converter(intel_dp, pipe_config);
707 	intel_dp_check_frl_training(intel_dp);
708 	intel_dp_pcon_dsc_configure(intel_dp, pipe_config);
709 	intel_dp_start_link_train(intel_dp, pipe_config);
710 	intel_dp_stop_link_train(intel_dp, pipe_config);
711 }
712 
713 static void g4x_enable_dp(struct intel_atomic_state *state,
714 			  struct intel_encoder *encoder,
715 			  const struct intel_crtc_state *pipe_config,
716 			  const struct drm_connector_state *conn_state)
717 {
718 	intel_enable_dp(state, encoder, pipe_config, conn_state);
719 	intel_edp_backlight_on(pipe_config, conn_state);
720 }
721 
722 static void vlv_enable_dp(struct intel_atomic_state *state,
723 			  struct intel_encoder *encoder,
724 			  const struct intel_crtc_state *pipe_config,
725 			  const struct drm_connector_state *conn_state)
726 {
727 	intel_edp_backlight_on(pipe_config, conn_state);
728 }
729 
730 static void g4x_pre_enable_dp(struct intel_atomic_state *state,
731 			      struct intel_encoder *encoder,
732 			      const struct intel_crtc_state *pipe_config,
733 			      const struct drm_connector_state *conn_state)
734 {
735 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
736 	enum port port = encoder->port;
737 
738 	intel_dp_prepare(encoder, pipe_config);
739 
740 	/* Only ilk+ has port A */
741 	if (port == PORT_A)
742 		ilk_edp_pll_on(intel_dp, pipe_config);
743 }
744 
745 static void vlv_pre_enable_dp(struct intel_atomic_state *state,
746 			      struct intel_encoder *encoder,
747 			      const struct intel_crtc_state *pipe_config,
748 			      const struct drm_connector_state *conn_state)
749 {
750 	vlv_phy_pre_encoder_enable(encoder, pipe_config);
751 
752 	intel_enable_dp(state, encoder, pipe_config, conn_state);
753 }
754 
755 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state,
756 				  struct intel_encoder *encoder,
757 				  const struct intel_crtc_state *pipe_config,
758 				  const struct drm_connector_state *conn_state)
759 {
760 	intel_dp_prepare(encoder, pipe_config);
761 
762 	vlv_phy_pre_pll_enable(encoder, pipe_config);
763 }
764 
765 static void chv_pre_enable_dp(struct intel_atomic_state *state,
766 			      struct intel_encoder *encoder,
767 			      const struct intel_crtc_state *pipe_config,
768 			      const struct drm_connector_state *conn_state)
769 {
770 	chv_phy_pre_encoder_enable(encoder, pipe_config);
771 
772 	intel_enable_dp(state, encoder, pipe_config, conn_state);
773 
774 	/* Second common lane will stay alive on its own now */
775 	chv_phy_release_cl2_override(encoder);
776 }
777 
778 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state,
779 				  struct intel_encoder *encoder,
780 				  const struct intel_crtc_state *pipe_config,
781 				  const struct drm_connector_state *conn_state)
782 {
783 	intel_dp_prepare(encoder, pipe_config);
784 
785 	chv_phy_pre_pll_enable(encoder, pipe_config);
786 }
787 
788 static void chv_dp_post_pll_disable(struct intel_atomic_state *state,
789 				    struct intel_encoder *encoder,
790 				    const struct intel_crtc_state *old_crtc_state,
791 				    const struct drm_connector_state *old_conn_state)
792 {
793 	chv_phy_post_pll_disable(encoder, old_crtc_state);
794 }
795 
796 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp,
797 				 const struct intel_crtc_state *crtc_state)
798 {
799 	return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
800 }
801 
802 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp,
803 				 const struct intel_crtc_state *crtc_state)
804 {
805 	return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
806 }
807 
808 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp)
809 {
810 	return DP_TRAIN_PRE_EMPH_LEVEL_2;
811 }
812 
813 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp)
814 {
815 	return DP_TRAIN_PRE_EMPH_LEVEL_3;
816 }
817 
818 static void vlv_set_signal_levels(struct intel_encoder *encoder,
819 				  const struct intel_crtc_state *crtc_state)
820 {
821 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
822 	unsigned long demph_reg_value, preemph_reg_value,
823 		uniqtranscale_reg_value;
824 	u8 train_set = intel_dp->train_set[0];
825 
826 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
827 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
828 		preemph_reg_value = 0x0004000;
829 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
830 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
831 			demph_reg_value = 0x2B405555;
832 			uniqtranscale_reg_value = 0x552AB83A;
833 			break;
834 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
835 			demph_reg_value = 0x2B404040;
836 			uniqtranscale_reg_value = 0x5548B83A;
837 			break;
838 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
839 			demph_reg_value = 0x2B245555;
840 			uniqtranscale_reg_value = 0x5560B83A;
841 			break;
842 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
843 			demph_reg_value = 0x2B405555;
844 			uniqtranscale_reg_value = 0x5598DA3A;
845 			break;
846 		default:
847 			return;
848 		}
849 		break;
850 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
851 		preemph_reg_value = 0x0002000;
852 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
853 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
854 			demph_reg_value = 0x2B404040;
855 			uniqtranscale_reg_value = 0x5552B83A;
856 			break;
857 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
858 			demph_reg_value = 0x2B404848;
859 			uniqtranscale_reg_value = 0x5580B83A;
860 			break;
861 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
862 			demph_reg_value = 0x2B404040;
863 			uniqtranscale_reg_value = 0x55ADDA3A;
864 			break;
865 		default:
866 			return;
867 		}
868 		break;
869 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
870 		preemph_reg_value = 0x0000000;
871 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
872 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
873 			demph_reg_value = 0x2B305555;
874 			uniqtranscale_reg_value = 0x5570B83A;
875 			break;
876 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
877 			demph_reg_value = 0x2B2B4040;
878 			uniqtranscale_reg_value = 0x55ADDA3A;
879 			break;
880 		default:
881 			return;
882 		}
883 		break;
884 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
885 		preemph_reg_value = 0x0006000;
886 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
887 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
888 			demph_reg_value = 0x1B405555;
889 			uniqtranscale_reg_value = 0x55ADDA3A;
890 			break;
891 		default:
892 			return;
893 		}
894 		break;
895 	default:
896 		return;
897 	}
898 
899 	vlv_set_phy_signal_level(encoder, crtc_state,
900 				 demph_reg_value, preemph_reg_value,
901 				 uniqtranscale_reg_value, 0);
902 }
903 
904 static void chv_set_signal_levels(struct intel_encoder *encoder,
905 				  const struct intel_crtc_state *crtc_state)
906 {
907 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
908 	u32 deemph_reg_value, margin_reg_value;
909 	bool uniq_trans_scale = false;
910 	u8 train_set = intel_dp->train_set[0];
911 
912 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
913 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
914 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
915 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
916 			deemph_reg_value = 128;
917 			margin_reg_value = 52;
918 			break;
919 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
920 			deemph_reg_value = 128;
921 			margin_reg_value = 77;
922 			break;
923 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
924 			deemph_reg_value = 128;
925 			margin_reg_value = 102;
926 			break;
927 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
928 			deemph_reg_value = 128;
929 			margin_reg_value = 154;
930 			uniq_trans_scale = true;
931 			break;
932 		default:
933 			return;
934 		}
935 		break;
936 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
937 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
938 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
939 			deemph_reg_value = 85;
940 			margin_reg_value = 78;
941 			break;
942 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
943 			deemph_reg_value = 85;
944 			margin_reg_value = 116;
945 			break;
946 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
947 			deemph_reg_value = 85;
948 			margin_reg_value = 154;
949 			break;
950 		default:
951 			return;
952 		}
953 		break;
954 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
955 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
956 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
957 			deemph_reg_value = 64;
958 			margin_reg_value = 104;
959 			break;
960 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
961 			deemph_reg_value = 64;
962 			margin_reg_value = 154;
963 			break;
964 		default:
965 			return;
966 		}
967 		break;
968 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
969 		switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
970 		case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
971 			deemph_reg_value = 43;
972 			margin_reg_value = 154;
973 			break;
974 		default:
975 			return;
976 		}
977 		break;
978 	default:
979 		return;
980 	}
981 
982 	chv_set_phy_signal_level(encoder, crtc_state,
983 				 deemph_reg_value, margin_reg_value,
984 				 uniq_trans_scale);
985 }
986 
987 static u32 g4x_signal_levels(u8 train_set)
988 {
989 	u32 signal_levels = 0;
990 
991 	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
992 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0:
993 	default:
994 		signal_levels |= DP_VOLTAGE_0_4;
995 		break;
996 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1:
997 		signal_levels |= DP_VOLTAGE_0_6;
998 		break;
999 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2:
1000 		signal_levels |= DP_VOLTAGE_0_8;
1001 		break;
1002 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_3:
1003 		signal_levels |= DP_VOLTAGE_1_2;
1004 		break;
1005 	}
1006 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1007 	case DP_TRAIN_PRE_EMPH_LEVEL_0:
1008 	default:
1009 		signal_levels |= DP_PRE_EMPHASIS_0;
1010 		break;
1011 	case DP_TRAIN_PRE_EMPH_LEVEL_1:
1012 		signal_levels |= DP_PRE_EMPHASIS_3_5;
1013 		break;
1014 	case DP_TRAIN_PRE_EMPH_LEVEL_2:
1015 		signal_levels |= DP_PRE_EMPHASIS_6;
1016 		break;
1017 	case DP_TRAIN_PRE_EMPH_LEVEL_3:
1018 		signal_levels |= DP_PRE_EMPHASIS_9_5;
1019 		break;
1020 	}
1021 	return signal_levels;
1022 }
1023 
1024 static void
1025 g4x_set_signal_levels(struct intel_encoder *encoder,
1026 		      const struct intel_crtc_state *crtc_state)
1027 {
1028 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1029 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1030 	u8 train_set = intel_dp->train_set[0];
1031 	u32 signal_levels;
1032 
1033 	signal_levels = g4x_signal_levels(train_set);
1034 
1035 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1036 		    signal_levels);
1037 
1038 	intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK);
1039 	intel_dp->DP |= signal_levels;
1040 
1041 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1042 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1043 }
1044 
1045 /* SNB CPU eDP voltage swing and pre-emphasis control */
1046 static u32 snb_cpu_edp_signal_levels(u8 train_set)
1047 {
1048 	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1049 					DP_TRAIN_PRE_EMPHASIS_MASK);
1050 
1051 	switch (signal_levels) {
1052 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1053 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1054 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1055 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1056 		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1057 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1058 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1059 		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1060 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1061 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1062 		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1063 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1064 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1065 		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1066 	default:
1067 		MISSING_CASE(signal_levels);
1068 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1069 	}
1070 }
1071 
1072 static void
1073 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1074 			      const struct intel_crtc_state *crtc_state)
1075 {
1076 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1077 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1078 	u8 train_set = intel_dp->train_set[0];
1079 	u32 signal_levels;
1080 
1081 	signal_levels = snb_cpu_edp_signal_levels(train_set);
1082 
1083 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1084 		    signal_levels);
1085 
1086 	intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB;
1087 	intel_dp->DP |= signal_levels;
1088 
1089 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1090 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1091 }
1092 
1093 /* IVB CPU eDP voltage swing and pre-emphasis control */
1094 static u32 ivb_cpu_edp_signal_levels(u8 train_set)
1095 {
1096 	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1097 					DP_TRAIN_PRE_EMPHASIS_MASK);
1098 
1099 	switch (signal_levels) {
1100 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1101 		return EDP_LINK_TRAIN_400MV_0DB_IVB;
1102 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1103 		return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1104 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1105 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2:
1106 		return EDP_LINK_TRAIN_400MV_6DB_IVB;
1107 
1108 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1109 		return EDP_LINK_TRAIN_600MV_0DB_IVB;
1110 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1111 		return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1112 
1113 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0:
1114 		return EDP_LINK_TRAIN_800MV_0DB_IVB;
1115 	case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1:
1116 		return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1117 
1118 	default:
1119 		MISSING_CASE(signal_levels);
1120 		return EDP_LINK_TRAIN_500MV_0DB_IVB;
1121 	}
1122 }
1123 
1124 static void
1125 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder,
1126 			      const struct intel_crtc_state *crtc_state)
1127 {
1128 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1129 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1130 	u8 train_set = intel_dp->train_set[0];
1131 	u32 signal_levels;
1132 
1133 	signal_levels = ivb_cpu_edp_signal_levels(train_set);
1134 
1135 	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
1136 		    signal_levels);
1137 
1138 	intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB;
1139 	intel_dp->DP |= signal_levels;
1140 
1141 	intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP);
1142 	intel_de_posting_read(dev_priv, intel_dp->output_reg);
1143 }
1144 
1145 /*
1146  * If display is now connected check links status,
1147  * there has been known issues of link loss triggering
1148  * long pulse.
1149  *
1150  * Some sinks (eg. ASUS PB287Q) seem to perform some
1151  * weird HPD ping pong during modesets. So we can apparently
1152  * end up with HPD going low during a modeset, and then
1153  * going back up soon after. And once that happens we must
1154  * retrain the link to get a picture. That's in case no
1155  * userspace component reacted to intermittent HPD dip.
1156  */
1157 static enum intel_hotplug_state
1158 intel_dp_hotplug(struct intel_encoder *encoder,
1159 		 struct intel_connector *connector)
1160 {
1161 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1162 	struct drm_modeset_acquire_ctx ctx;
1163 	enum intel_hotplug_state state;
1164 	int ret;
1165 
1166 	if (intel_dp->compliance.test_active &&
1167 	    intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
1168 		intel_dp_phy_test(encoder);
1169 		/* just do the PHY test and nothing else */
1170 		return INTEL_HOTPLUG_UNCHANGED;
1171 	}
1172 
1173 	state = intel_encoder_hotplug(encoder, connector);
1174 
1175 	drm_modeset_acquire_init(&ctx, 0);
1176 
1177 	for (;;) {
1178 		ret = intel_dp_retrain_link(encoder, &ctx);
1179 
1180 		if (ret == -EDEADLK) {
1181 			drm_modeset_backoff(&ctx);
1182 			continue;
1183 		}
1184 
1185 		break;
1186 	}
1187 
1188 	drm_modeset_drop_locks(&ctx);
1189 	drm_modeset_acquire_fini(&ctx);
1190 	drm_WARN(encoder->base.dev, ret,
1191 		 "Acquiring modeset locks failed with %i\n", ret);
1192 
1193 	/*
1194 	 * Keeping it consistent with intel_ddi_hotplug() and
1195 	 * intel_hdmi_hotplug().
1196 	 */
1197 	if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries)
1198 		state = INTEL_HOTPLUG_RETRY;
1199 
1200 	return state;
1201 }
1202 
1203 static bool ibx_digital_port_connected(struct intel_encoder *encoder)
1204 {
1205 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1206 	u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin];
1207 
1208 	return intel_de_read(dev_priv, SDEISR) & bit;
1209 }
1210 
1211 static bool g4x_digital_port_connected(struct intel_encoder *encoder)
1212 {
1213 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1214 	u32 bit;
1215 
1216 	switch (encoder->hpd_pin) {
1217 	case HPD_PORT_B:
1218 		bit = PORTB_HOTPLUG_LIVE_STATUS_G4X;
1219 		break;
1220 	case HPD_PORT_C:
1221 		bit = PORTC_HOTPLUG_LIVE_STATUS_G4X;
1222 		break;
1223 	case HPD_PORT_D:
1224 		bit = PORTD_HOTPLUG_LIVE_STATUS_G4X;
1225 		break;
1226 	default:
1227 		MISSING_CASE(encoder->hpd_pin);
1228 		return false;
1229 	}
1230 
1231 	return intel_de_read(dev_priv, PORT_HOTPLUG_STAT) & bit;
1232 }
1233 
1234 static bool ilk_digital_port_connected(struct intel_encoder *encoder)
1235 {
1236 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1237 	u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin];
1238 
1239 	return intel_de_read(dev_priv, DEISR) & bit;
1240 }
1241 
1242 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
1243 {
1244 	intel_dp_encoder_flush_work(encoder);
1245 
1246 	drm_encoder_cleanup(encoder);
1247 	kfree(enc_to_dig_port(to_intel_encoder(encoder)));
1248 }
1249 
1250 enum pipe vlv_active_pipe(struct intel_dp *intel_dp)
1251 {
1252 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
1253 	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
1254 	enum pipe pipe;
1255 
1256 	if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg,
1257 				encoder->port, &pipe))
1258 		return pipe;
1259 
1260 	return INVALID_PIPE;
1261 }
1262 
1263 static void intel_dp_encoder_reset(struct drm_encoder *encoder)
1264 {
1265 	struct drm_i915_private *dev_priv = to_i915(encoder->dev);
1266 	struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder));
1267 
1268 	intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg);
1269 
1270 	intel_dp->reset_link_params = true;
1271 
1272 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1273 		intel_wakeref_t wakeref;
1274 
1275 		with_intel_pps_lock(intel_dp, wakeref)
1276 			intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp);
1277 	}
1278 
1279 	intel_pps_encoder_reset(intel_dp);
1280 }
1281 
1282 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
1283 	.reset = intel_dp_encoder_reset,
1284 	.destroy = intel_dp_encoder_destroy,
1285 };
1286 
1287 bool g4x_dp_init(struct drm_i915_private *dev_priv,
1288 		 i915_reg_t output_reg, enum port port)
1289 {
1290 	const struct intel_bios_encoder_data *devdata;
1291 	struct intel_digital_port *dig_port;
1292 	struct intel_encoder *intel_encoder;
1293 	struct drm_encoder *encoder;
1294 	struct intel_connector *intel_connector;
1295 
1296 	if (!assert_port_valid(dev_priv, port))
1297 		return false;
1298 
1299 	devdata = intel_bios_encoder_data_lookup(dev_priv, port);
1300 
1301 	/* FIXME bail? */
1302 	if (!devdata)
1303 		drm_dbg_kms(&dev_priv->drm, "No VBT child device for DP-%c\n",
1304 			    port_name(port));
1305 
1306 	dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
1307 	if (!dig_port)
1308 		return false;
1309 
1310 	dig_port->aux_ch = AUX_CH_NONE;
1311 
1312 	intel_connector = intel_connector_alloc();
1313 	if (!intel_connector)
1314 		goto err_connector_alloc;
1315 
1316 	intel_encoder = &dig_port->base;
1317 	encoder = &intel_encoder->base;
1318 
1319 	intel_encoder->devdata = devdata;
1320 
1321 	mutex_init(&dig_port->hdcp_mutex);
1322 
1323 	if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base,
1324 			     &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS,
1325 			     "DP %c", port_name(port)))
1326 		goto err_encoder_init;
1327 
1328 	intel_encoder->hotplug = intel_dp_hotplug;
1329 	intel_encoder->compute_config = intel_dp_compute_config;
1330 	intel_encoder->get_hw_state = intel_dp_get_hw_state;
1331 	intel_encoder->get_config = intel_dp_get_config;
1332 	intel_encoder->sync_state = intel_dp_sync_state;
1333 	intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check;
1334 	intel_encoder->update_pipe = intel_backlight_update;
1335 	intel_encoder->suspend = intel_dp_encoder_suspend;
1336 	intel_encoder->shutdown = intel_dp_encoder_shutdown;
1337 	if (IS_CHERRYVIEW(dev_priv)) {
1338 		intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable;
1339 		intel_encoder->pre_enable = chv_pre_enable_dp;
1340 		intel_encoder->enable = vlv_enable_dp;
1341 		intel_encoder->disable = vlv_disable_dp;
1342 		intel_encoder->post_disable = chv_post_disable_dp;
1343 		intel_encoder->post_pll_disable = chv_dp_post_pll_disable;
1344 	} else if (IS_VALLEYVIEW(dev_priv)) {
1345 		intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable;
1346 		intel_encoder->pre_enable = vlv_pre_enable_dp;
1347 		intel_encoder->enable = vlv_enable_dp;
1348 		intel_encoder->disable = vlv_disable_dp;
1349 		intel_encoder->post_disable = vlv_post_disable_dp;
1350 	} else {
1351 		intel_encoder->pre_enable = g4x_pre_enable_dp;
1352 		intel_encoder->enable = g4x_enable_dp;
1353 		intel_encoder->disable = g4x_disable_dp;
1354 		intel_encoder->post_disable = g4x_post_disable_dp;
1355 	}
1356 	intel_encoder->audio_enable = g4x_dp_audio_enable;
1357 	intel_encoder->audio_disable = g4x_dp_audio_disable;
1358 
1359 	if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) ||
1360 	    (HAS_PCH_CPT(dev_priv) && port != PORT_A))
1361 		dig_port->dp.set_link_train = cpt_set_link_train;
1362 	else
1363 		dig_port->dp.set_link_train = g4x_set_link_train;
1364 
1365 	if (IS_CHERRYVIEW(dev_priv))
1366 		intel_encoder->set_signal_levels = chv_set_signal_levels;
1367 	else if (IS_VALLEYVIEW(dev_priv))
1368 		intel_encoder->set_signal_levels = vlv_set_signal_levels;
1369 	else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A)
1370 		intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels;
1371 	else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A)
1372 		intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels;
1373 	else
1374 		intel_encoder->set_signal_levels = g4x_set_signal_levels;
1375 
1376 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) ||
1377 	    (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) {
1378 		dig_port->dp.preemph_max = intel_dp_preemph_max_3;
1379 		dig_port->dp.voltage_max = intel_dp_voltage_max_3;
1380 	} else {
1381 		dig_port->dp.preemph_max = intel_dp_preemph_max_2;
1382 		dig_port->dp.voltage_max = intel_dp_voltage_max_2;
1383 	}
1384 
1385 	dig_port->dp.output_reg = output_reg;
1386 	dig_port->max_lanes = 4;
1387 
1388 	intel_encoder->type = INTEL_OUTPUT_DP;
1389 	intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port);
1390 	if (IS_CHERRYVIEW(dev_priv)) {
1391 		if (port == PORT_D)
1392 			intel_encoder->pipe_mask = BIT(PIPE_C);
1393 		else
1394 			intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B);
1395 	} else {
1396 		intel_encoder->pipe_mask = ~0;
1397 	}
1398 	intel_encoder->cloneable = 0;
1399 	intel_encoder->port = port;
1400 	intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
1401 
1402 	dig_port->hpd_pulse = intel_dp_hpd_pulse;
1403 
1404 	if (HAS_GMCH(dev_priv)) {
1405 		dig_port->connected = g4x_digital_port_connected;
1406 	} else {
1407 		if (port == PORT_A)
1408 			dig_port->connected = ilk_digital_port_connected;
1409 		else
1410 			dig_port->connected = ibx_digital_port_connected;
1411 	}
1412 
1413 	if (port != PORT_A)
1414 		intel_infoframe_init(dig_port);
1415 
1416 	dig_port->aux_ch = intel_dp_aux_ch(intel_encoder);
1417 	if (dig_port->aux_ch == AUX_CH_NONE)
1418 		goto err_init_connector;
1419 
1420 	if (!intel_dp_init_connector(dig_port, intel_connector))
1421 		goto err_init_connector;
1422 
1423 	return true;
1424 
1425 err_init_connector:
1426 	drm_encoder_cleanup(encoder);
1427 err_encoder_init:
1428 	kfree(intel_connector);
1429 err_connector_alloc:
1430 	kfree(dig_port);
1431 	return false;
1432 }
1433