1 /*
2  * Copyright 2007-11 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: Dave Airlie
24  *          Alex Deucher
25  */
26 
27 #include <linux/pci.h>
28 
29 #include <acpi/video.h>
30 
31 #include <drm/amdgpu_drm.h>
32 #include "amdgpu.h"
33 #include "amdgpu_connectors.h"
34 #include "amdgpu_display.h"
35 #include "atom.h"
36 #include "atombios_encoders.h"
37 #include "atombios_dp.h"
38 #include <linux/backlight.h>
39 #include "bif/bif_4_1_d.h"
40 
41 u8
42 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
43 {
44 	u8 backlight_level;
45 	u32 bios_2_scratch;
46 
47 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
48 
49 	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
50 			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
51 
52 	return backlight_level;
53 }
54 
55 void
56 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
57 					    u8 backlight_level)
58 {
59 	u32 bios_2_scratch;
60 
61 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
62 
63 	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
64 	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
65 			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
66 
67 	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
68 }
69 
70 u8
71 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
72 {
73 	struct drm_device *dev = amdgpu_encoder->base.dev;
74 	struct amdgpu_device *adev = drm_to_adev(dev);
75 
76 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
77 		return 0;
78 
79 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
80 }
81 
82 void
83 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
84 				     u8 level)
85 {
86 	struct drm_encoder *encoder = &amdgpu_encoder->base;
87 	struct drm_device *dev = amdgpu_encoder->base.dev;
88 	struct amdgpu_device *adev = drm_to_adev(dev);
89 	struct amdgpu_encoder_atom_dig *dig;
90 
91 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
92 		return;
93 
94 	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
95 	    amdgpu_encoder->enc_priv) {
96 		dig = amdgpu_encoder->enc_priv;
97 		dig->backlight_level = level;
98 		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
99 
100 		switch (amdgpu_encoder->encoder_id) {
101 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
102 		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
103 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
104 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
105 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
106 			if (dig->backlight_level == 0)
107 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
108 								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
109 			else {
110 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
111 								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
112 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
113 								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
114 			}
115 			break;
116 		default:
117 			break;
118 		}
119 	}
120 }
121 
122 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
123 {
124 	u8 level;
125 
126 	/* Convert brightness to hardware level */
127 	if (bd->props.brightness < 0)
128 		level = 0;
129 	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
130 		level = AMDGPU_MAX_BL_LEVEL;
131 	else
132 		level = bd->props.brightness;
133 
134 	return level;
135 }
136 
137 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
138 {
139 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
140 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
141 
142 	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
143 					     amdgpu_atombios_encoder_backlight_level(bd));
144 
145 	return 0;
146 }
147 
148 static int
149 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
150 {
151 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
152 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
153 	struct drm_device *dev = amdgpu_encoder->base.dev;
154 	struct amdgpu_device *adev = drm_to_adev(dev);
155 
156 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
157 }
158 
159 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
160 	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
161 	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
162 };
163 
164 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
165 				     struct drm_connector *drm_connector)
166 {
167 	struct drm_device *dev = amdgpu_encoder->base.dev;
168 	struct amdgpu_device *adev = drm_to_adev(dev);
169 	struct backlight_device *bd;
170 	struct backlight_properties props;
171 	struct amdgpu_backlight_privdata *pdata;
172 	struct amdgpu_encoder_atom_dig *dig;
173 	char bl_name[16];
174 
175 	/* Mac laptops with multiple GPUs use the gmux driver for backlight
176 	 * so don't register a backlight device
177 	 */
178 	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
179 	    (adev->pdev->device == 0x6741))
180 		return;
181 
182 	if (!amdgpu_encoder->enc_priv)
183 		return;
184 
185 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
186 		goto register_acpi_backlight;
187 
188 	if (!acpi_video_backlight_use_native()) {
189 		drm_info(dev, "Skipping amdgpu atom DIG backlight registration\n");
190 		goto register_acpi_backlight;
191 	}
192 
193 	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
194 	if (!pdata) {
195 		DRM_ERROR("Memory allocation failed\n");
196 		goto error;
197 	}
198 
199 	memset(&props, 0, sizeof(props));
200 	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
201 	props.type = BACKLIGHT_RAW;
202 	snprintf(bl_name, sizeof(bl_name),
203 		 "amdgpu_bl%d", dev->primary->index);
204 	bd = backlight_device_register(bl_name, drm_connector->kdev,
205 				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
206 	if (IS_ERR(bd)) {
207 		DRM_ERROR("Backlight registration failed\n");
208 		goto error;
209 	}
210 
211 	pdata->encoder = amdgpu_encoder;
212 
213 	dig = amdgpu_encoder->enc_priv;
214 	dig->bl_dev = bd;
215 
216 	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
217 	bd->props.power = FB_BLANK_UNBLANK;
218 	backlight_update_status(bd);
219 
220 	DRM_INFO("amdgpu atom DIG backlight initialized\n");
221 
222 	return;
223 
224 error:
225 	kfree(pdata);
226 	return;
227 
228 register_acpi_backlight:
229 	/* Try registering an ACPI video backlight device instead. */
230 	acpi_video_register_backlight();
231 }
232 
233 void
234 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
235 {
236 	struct drm_device *dev = amdgpu_encoder->base.dev;
237 	struct amdgpu_device *adev = drm_to_adev(dev);
238 	struct backlight_device *bd = NULL;
239 	struct amdgpu_encoder_atom_dig *dig;
240 
241 	if (!amdgpu_encoder->enc_priv)
242 		return;
243 
244 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
245 		return;
246 
247 	dig = amdgpu_encoder->enc_priv;
248 	bd = dig->bl_dev;
249 	dig->bl_dev = NULL;
250 
251 	if (bd) {
252 		struct amdgpu_legacy_backlight_privdata *pdata;
253 
254 		pdata = bl_get_data(bd);
255 		backlight_device_unregister(bd);
256 		kfree(pdata);
257 
258 		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
259 	}
260 }
261 
262 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
263 {
264 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
265 	switch (amdgpu_encoder->encoder_id) {
266 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
267 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
268 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
269 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
270 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
271 		return true;
272 	default:
273 		return false;
274 	}
275 }
276 
277 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
278 				 const struct drm_display_mode *mode,
279 				 struct drm_display_mode *adjusted_mode)
280 {
281 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
282 
283 	/* set the active encoder to connector routing */
284 	amdgpu_encoder_set_active_device(encoder);
285 	drm_mode_set_crtcinfo(adjusted_mode, 0);
286 
287 	/* hw bug */
288 	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
289 	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
290 		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
291 
292 	/* vertical FP must be at least 1 */
293 	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
294 		adjusted_mode->crtc_vsync_start++;
295 
296 	/* get the native mode for scaling */
297 	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
298 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
299 	else if (amdgpu_encoder->rmx_type != RMX_OFF)
300 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
301 
302 	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
303 	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
304 		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
305 		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
306 	}
307 
308 	return true;
309 }
310 
311 static void
312 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
313 {
314 	struct drm_device *dev = encoder->dev;
315 	struct amdgpu_device *adev = drm_to_adev(dev);
316 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
317 	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
318 	int index = 0;
319 
320 	memset(&args, 0, sizeof(args));
321 
322 	switch (amdgpu_encoder->encoder_id) {
323 	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
324 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
325 		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
326 		break;
327 	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
328 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
329 		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
330 		break;
331 	}
332 
333 	args.ucAction = action;
334 	args.ucDacStandard = ATOM_DAC1_PS2;
335 	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
336 
337 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
338 
339 }
340 
341 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
342 {
343 	int bpc = 8;
344 
345 	if (encoder->crtc) {
346 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
347 		bpc = amdgpu_crtc->bpc;
348 	}
349 
350 	switch (bpc) {
351 	case 0:
352 		return PANEL_BPC_UNDEFINE;
353 	case 6:
354 		return PANEL_6BIT_PER_COLOR;
355 	case 8:
356 	default:
357 		return PANEL_8BIT_PER_COLOR;
358 	case 10:
359 		return PANEL_10BIT_PER_COLOR;
360 	case 12:
361 		return PANEL_12BIT_PER_COLOR;
362 	case 16:
363 		return PANEL_16BIT_PER_COLOR;
364 	}
365 }
366 
367 union dvo_encoder_control {
368 	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
369 	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
370 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
371 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
372 };
373 
374 static void
375 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
376 {
377 	struct drm_device *dev = encoder->dev;
378 	struct amdgpu_device *adev = drm_to_adev(dev);
379 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
380 	union dvo_encoder_control args;
381 	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
382 	uint8_t frev, crev;
383 
384 	memset(&args, 0, sizeof(args));
385 
386 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
387 		return;
388 
389 	switch (frev) {
390 	case 1:
391 		switch (crev) {
392 		case 1:
393 			/* R4xx, R5xx */
394 			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
395 
396 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
397 				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
398 
399 			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
400 			break;
401 		case 2:
402 			/* RS600/690/740 */
403 			args.dvo.sDVOEncoder.ucAction = action;
404 			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
405 			/* DFP1, CRT1, TV1 depending on the type of port */
406 			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
407 
408 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
409 				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
410 			break;
411 		case 3:
412 			/* R6xx */
413 			args.dvo_v3.ucAction = action;
414 			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
415 			args.dvo_v3.ucDVOConfig = 0; /* XXX */
416 			break;
417 		case 4:
418 			/* DCE8 */
419 			args.dvo_v4.ucAction = action;
420 			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
421 			args.dvo_v4.ucDVOConfig = 0; /* XXX */
422 			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
423 			break;
424 		default:
425 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
426 			break;
427 		}
428 		break;
429 	default:
430 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
431 		break;
432 	}
433 
434 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
435 }
436 
437 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
438 {
439 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
440 	struct drm_connector *connector;
441 	struct amdgpu_connector *amdgpu_connector;
442 	struct amdgpu_connector_atom_dig *dig_connector;
443 
444 	/* dp bridges are always DP */
445 	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
446 		return ATOM_ENCODER_MODE_DP;
447 
448 	/* DVO is always DVO */
449 	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
450 	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
451 		return ATOM_ENCODER_MODE_DVO;
452 
453 	connector = amdgpu_get_connector_for_encoder(encoder);
454 	/* if we don't have an active device yet, just use one of
455 	 * the connectors tied to the encoder.
456 	 */
457 	if (!connector)
458 		connector = amdgpu_get_connector_for_encoder_init(encoder);
459 	amdgpu_connector = to_amdgpu_connector(connector);
460 
461 	switch (connector->connector_type) {
462 	case DRM_MODE_CONNECTOR_DVII:
463 	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
464 		if (amdgpu_audio != 0) {
465 			if (amdgpu_connector->use_digital &&
466 			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
467 				return ATOM_ENCODER_MODE_HDMI;
468 			else if (connector->display_info.is_hdmi &&
469 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
470 				return ATOM_ENCODER_MODE_HDMI;
471 			else if (amdgpu_connector->use_digital)
472 				return ATOM_ENCODER_MODE_DVI;
473 			else
474 				return ATOM_ENCODER_MODE_CRT;
475 		} else if (amdgpu_connector->use_digital) {
476 			return ATOM_ENCODER_MODE_DVI;
477 		} else {
478 			return ATOM_ENCODER_MODE_CRT;
479 		}
480 		break;
481 	case DRM_MODE_CONNECTOR_DVID:
482 	case DRM_MODE_CONNECTOR_HDMIA:
483 	default:
484 		if (amdgpu_audio != 0) {
485 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
486 				return ATOM_ENCODER_MODE_HDMI;
487 			else if (connector->display_info.is_hdmi &&
488 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
489 				return ATOM_ENCODER_MODE_HDMI;
490 			else
491 				return ATOM_ENCODER_MODE_DVI;
492 		} else {
493 			return ATOM_ENCODER_MODE_DVI;
494 		}
495 	case DRM_MODE_CONNECTOR_LVDS:
496 		return ATOM_ENCODER_MODE_LVDS;
497 	case DRM_MODE_CONNECTOR_DisplayPort:
498 		dig_connector = amdgpu_connector->con_priv;
499 		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
500 		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
501 			return ATOM_ENCODER_MODE_DP;
502 		} else if (amdgpu_audio != 0) {
503 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
504 				return ATOM_ENCODER_MODE_HDMI;
505 			else if (connector->display_info.is_hdmi &&
506 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
507 				return ATOM_ENCODER_MODE_HDMI;
508 			else
509 				return ATOM_ENCODER_MODE_DVI;
510 		} else {
511 			return ATOM_ENCODER_MODE_DVI;
512 		}
513 	case DRM_MODE_CONNECTOR_eDP:
514 		return ATOM_ENCODER_MODE_DP;
515 	case DRM_MODE_CONNECTOR_DVIA:
516 	case DRM_MODE_CONNECTOR_VGA:
517 		return ATOM_ENCODER_MODE_CRT;
518 	case DRM_MODE_CONNECTOR_Composite:
519 	case DRM_MODE_CONNECTOR_SVIDEO:
520 	case DRM_MODE_CONNECTOR_9PinDIN:
521 		/* fix me */
522 		return ATOM_ENCODER_MODE_TV;
523 	}
524 }
525 
526 /*
527  * DIG Encoder/Transmitter Setup
528  *
529  * DCE 6.0
530  * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
531  * Supports up to 6 digital outputs
532  * - 6 DIG encoder blocks.
533  * - DIG to PHY mapping is hardcoded
534  * DIG1 drives UNIPHY0 link A, A+B
535  * DIG2 drives UNIPHY0 link B
536  * DIG3 drives UNIPHY1 link A, A+B
537  * DIG4 drives UNIPHY1 link B
538  * DIG5 drives UNIPHY2 link A, A+B
539  * DIG6 drives UNIPHY2 link B
540  *
541  * Routing
542  * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
543  * Examples:
544  * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
545  * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
546  * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
547  * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
548  */
549 
550 union dig_encoder_control {
551 	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
552 	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
553 	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
554 	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
555 	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
556 };
557 
558 void
559 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
560 				   int action, int panel_mode)
561 {
562 	struct drm_device *dev = encoder->dev;
563 	struct amdgpu_device *adev = drm_to_adev(dev);
564 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
565 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
566 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
567 	union dig_encoder_control args;
568 	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
569 	uint8_t frev, crev;
570 	int dp_clock = 0;
571 	int dp_lane_count = 0;
572 	int hpd_id = AMDGPU_HPD_NONE;
573 
574 	if (connector) {
575 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
576 		struct amdgpu_connector_atom_dig *dig_connector =
577 			amdgpu_connector->con_priv;
578 
579 		dp_clock = dig_connector->dp_clock;
580 		dp_lane_count = dig_connector->dp_lane_count;
581 		hpd_id = amdgpu_connector->hpd.hpd;
582 	}
583 
584 	/* no dig encoder assigned */
585 	if (dig->dig_encoder == -1)
586 		return;
587 
588 	memset(&args, 0, sizeof(args));
589 
590 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
591 		return;
592 
593 	switch (frev) {
594 	case 1:
595 		switch (crev) {
596 		case 1:
597 			args.v1.ucAction = action;
598 			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
599 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
600 				args.v3.ucPanelMode = panel_mode;
601 			else
602 				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
603 
604 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
605 				args.v1.ucLaneNum = dp_lane_count;
606 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
607 				args.v1.ucLaneNum = 8;
608 			else
609 				args.v1.ucLaneNum = 4;
610 
611 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
612 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
613 			switch (amdgpu_encoder->encoder_id) {
614 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
615 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
616 				break;
617 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
618 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
619 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
620 				break;
621 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
622 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
623 				break;
624 			}
625 			if (dig->linkb)
626 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
627 			else
628 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
629 			break;
630 		case 2:
631 		case 3:
632 			args.v3.ucAction = action;
633 			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
634 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
635 				args.v3.ucPanelMode = panel_mode;
636 			else
637 				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
638 
639 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
640 				args.v3.ucLaneNum = dp_lane_count;
641 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
642 				args.v3.ucLaneNum = 8;
643 			else
644 				args.v3.ucLaneNum = 4;
645 
646 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
647 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
648 			args.v3.acConfig.ucDigSel = dig->dig_encoder;
649 			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
650 			break;
651 		case 4:
652 			args.v4.ucAction = action;
653 			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
654 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
655 				args.v4.ucPanelMode = panel_mode;
656 			else
657 				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
658 
659 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
660 				args.v4.ucLaneNum = dp_lane_count;
661 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
662 				args.v4.ucLaneNum = 8;
663 			else
664 				args.v4.ucLaneNum = 4;
665 
666 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
667 				if (dp_clock == 540000)
668 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
669 				else if (dp_clock == 324000)
670 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
671 				else if (dp_clock == 270000)
672 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
673 				else
674 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
675 			}
676 			args.v4.acConfig.ucDigSel = dig->dig_encoder;
677 			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
678 			if (hpd_id == AMDGPU_HPD_NONE)
679 				args.v4.ucHPD_ID = 0;
680 			else
681 				args.v4.ucHPD_ID = hpd_id + 1;
682 			break;
683 		case 5:
684 			switch (action) {
685 			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
686 				args.v5.asDPPanelModeParam.ucAction = action;
687 				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
688 				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
689 				break;
690 			case ATOM_ENCODER_CMD_STREAM_SETUP:
691 				args.v5.asStreamParam.ucAction = action;
692 				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
693 				args.v5.asStreamParam.ucDigMode =
694 					amdgpu_atombios_encoder_get_encoder_mode(encoder);
695 				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
696 					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
697 				else if (amdgpu_dig_monitor_is_duallink(encoder,
698 									amdgpu_encoder->pixel_clock))
699 					args.v5.asStreamParam.ucLaneNum = 8;
700 				else
701 					args.v5.asStreamParam.ucLaneNum = 4;
702 				args.v5.asStreamParam.ulPixelClock =
703 					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
704 				args.v5.asStreamParam.ucBitPerColor =
705 					amdgpu_atombios_encoder_get_bpc(encoder);
706 				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
707 				break;
708 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
709 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
710 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
711 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
712 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
713 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
714 			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
715 			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
716 				args.v5.asCmdParam.ucAction = action;
717 				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
718 				break;
719 			default:
720 				DRM_ERROR("Unsupported action 0x%x\n", action);
721 				break;
722 			}
723 			break;
724 		default:
725 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
726 			break;
727 		}
728 		break;
729 	default:
730 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
731 		break;
732 	}
733 
734 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
735 
736 }
737 
738 union dig_transmitter_control {
739 	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
740 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
741 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
742 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
743 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
744 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
745 };
746 
747 void
748 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
749 					      uint8_t lane_num, uint8_t lane_set)
750 {
751 	struct drm_device *dev = encoder->dev;
752 	struct amdgpu_device *adev = drm_to_adev(dev);
753 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
754 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
755 	struct drm_connector *connector;
756 	union dig_transmitter_control args;
757 	int index = 0;
758 	uint8_t frev, crev;
759 	bool is_dp = false;
760 	int pll_id = 0;
761 	int dp_clock = 0;
762 	int dp_lane_count = 0;
763 	int connector_object_id = 0;
764 	int dig_encoder = dig->dig_encoder;
765 	int hpd_id = AMDGPU_HPD_NONE;
766 
767 	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
768 		connector = amdgpu_get_connector_for_encoder_init(encoder);
769 		/* just needed to avoid bailing in the encoder check.  the encoder
770 		 * isn't used for init
771 		 */
772 		dig_encoder = 0;
773 	} else
774 		connector = amdgpu_get_connector_for_encoder(encoder);
775 
776 	if (connector) {
777 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
778 		struct amdgpu_connector_atom_dig *dig_connector =
779 			amdgpu_connector->con_priv;
780 
781 		hpd_id = amdgpu_connector->hpd.hpd;
782 		dp_clock = dig_connector->dp_clock;
783 		dp_lane_count = dig_connector->dp_lane_count;
784 		connector_object_id =
785 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
786 	}
787 
788 	if (encoder->crtc) {
789 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
790 		pll_id = amdgpu_crtc->pll_id;
791 	}
792 
793 	/* no dig encoder assigned */
794 	if (dig_encoder == -1)
795 		return;
796 
797 	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
798 		is_dp = true;
799 
800 	memset(&args, 0, sizeof(args));
801 
802 	switch (amdgpu_encoder->encoder_id) {
803 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
804 		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
805 		break;
806 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
807 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
808 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
809 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
810 		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
811 		break;
812 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
813 		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
814 		break;
815 	}
816 
817 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
818 		return;
819 
820 	switch (frev) {
821 	case 1:
822 		switch (crev) {
823 		case 1:
824 			args.v1.ucAction = action;
825 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
826 				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
827 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
828 				args.v1.asMode.ucLaneSel = lane_num;
829 				args.v1.asMode.ucLaneSet = lane_set;
830 			} else {
831 				if (is_dp)
832 					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
833 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
834 					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
835 				else
836 					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
837 			}
838 
839 			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
840 
841 			if (dig_encoder)
842 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
843 			else
844 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
845 
846 			if (dig->linkb)
847 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
848 			else
849 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
850 
851 			if (is_dp)
852 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
853 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
854 				if (dig->coherent_mode)
855 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
856 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
857 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
858 			}
859 			break;
860 		case 2:
861 			args.v2.ucAction = action;
862 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
863 				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
864 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
865 				args.v2.asMode.ucLaneSel = lane_num;
866 				args.v2.asMode.ucLaneSet = lane_set;
867 			} else {
868 				if (is_dp)
869 					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
870 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
871 					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
872 				else
873 					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
874 			}
875 
876 			args.v2.acConfig.ucEncoderSel = dig_encoder;
877 			if (dig->linkb)
878 				args.v2.acConfig.ucLinkSel = 1;
879 
880 			switch (amdgpu_encoder->encoder_id) {
881 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
882 				args.v2.acConfig.ucTransmitterSel = 0;
883 				break;
884 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
885 				args.v2.acConfig.ucTransmitterSel = 1;
886 				break;
887 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
888 				args.v2.acConfig.ucTransmitterSel = 2;
889 				break;
890 			}
891 
892 			if (is_dp) {
893 				args.v2.acConfig.fCoherentMode = 1;
894 				args.v2.acConfig.fDPConnector = 1;
895 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
896 				if (dig->coherent_mode)
897 					args.v2.acConfig.fCoherentMode = 1;
898 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
899 					args.v2.acConfig.fDualLinkConnector = 1;
900 			}
901 			break;
902 		case 3:
903 			args.v3.ucAction = action;
904 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
905 				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
906 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
907 				args.v3.asMode.ucLaneSel = lane_num;
908 				args.v3.asMode.ucLaneSet = lane_set;
909 			} else {
910 				if (is_dp)
911 					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
912 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
913 					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
914 				else
915 					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
916 			}
917 
918 			if (is_dp)
919 				args.v3.ucLaneNum = dp_lane_count;
920 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
921 				args.v3.ucLaneNum = 8;
922 			else
923 				args.v3.ucLaneNum = 4;
924 
925 			if (dig->linkb)
926 				args.v3.acConfig.ucLinkSel = 1;
927 			if (dig_encoder & 1)
928 				args.v3.acConfig.ucEncoderSel = 1;
929 
930 			/* Select the PLL for the PHY
931 			 * DP PHY should be clocked from external src if there is
932 			 * one.
933 			 */
934 			/* On DCE4, if there is an external clock, it generates the DP ref clock */
935 			if (is_dp && adev->clock.dp_extclk)
936 				args.v3.acConfig.ucRefClkSource = 2; /* external src */
937 			else
938 				args.v3.acConfig.ucRefClkSource = pll_id;
939 
940 			switch (amdgpu_encoder->encoder_id) {
941 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
942 				args.v3.acConfig.ucTransmitterSel = 0;
943 				break;
944 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
945 				args.v3.acConfig.ucTransmitterSel = 1;
946 				break;
947 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
948 				args.v3.acConfig.ucTransmitterSel = 2;
949 				break;
950 			}
951 
952 			if (is_dp)
953 				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
954 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
955 				if (dig->coherent_mode)
956 					args.v3.acConfig.fCoherentMode = 1;
957 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
958 					args.v3.acConfig.fDualLinkConnector = 1;
959 			}
960 			break;
961 		case 4:
962 			args.v4.ucAction = action;
963 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
964 				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
965 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
966 				args.v4.asMode.ucLaneSel = lane_num;
967 				args.v4.asMode.ucLaneSet = lane_set;
968 			} else {
969 				if (is_dp)
970 					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
971 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
972 					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
973 				else
974 					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
975 			}
976 
977 			if (is_dp)
978 				args.v4.ucLaneNum = dp_lane_count;
979 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
980 				args.v4.ucLaneNum = 8;
981 			else
982 				args.v4.ucLaneNum = 4;
983 
984 			if (dig->linkb)
985 				args.v4.acConfig.ucLinkSel = 1;
986 			if (dig_encoder & 1)
987 				args.v4.acConfig.ucEncoderSel = 1;
988 
989 			/* Select the PLL for the PHY
990 			 * DP PHY should be clocked from external src if there is
991 			 * one.
992 			 */
993 			/* On DCE5 DCPLL usually generates the DP ref clock */
994 			if (is_dp) {
995 				if (adev->clock.dp_extclk)
996 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
997 				else
998 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
999 			} else
1000 				args.v4.acConfig.ucRefClkSource = pll_id;
1001 
1002 			switch (amdgpu_encoder->encoder_id) {
1003 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1004 				args.v4.acConfig.ucTransmitterSel = 0;
1005 				break;
1006 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1007 				args.v4.acConfig.ucTransmitterSel = 1;
1008 				break;
1009 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1010 				args.v4.acConfig.ucTransmitterSel = 2;
1011 				break;
1012 			}
1013 
1014 			if (is_dp)
1015 				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1016 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1017 				if (dig->coherent_mode)
1018 					args.v4.acConfig.fCoherentMode = 1;
1019 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1020 					args.v4.acConfig.fDualLinkConnector = 1;
1021 			}
1022 			break;
1023 		case 5:
1024 			args.v5.ucAction = action;
1025 			if (is_dp)
1026 				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1027 			else
1028 				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1029 
1030 			switch (amdgpu_encoder->encoder_id) {
1031 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1032 				if (dig->linkb)
1033 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1034 				else
1035 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1036 				break;
1037 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1038 				if (dig->linkb)
1039 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1040 				else
1041 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1042 				break;
1043 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1044 				if (dig->linkb)
1045 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1046 				else
1047 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1048 				break;
1049 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1050 				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1051 				break;
1052 			}
1053 			if (is_dp)
1054 				args.v5.ucLaneNum = dp_lane_count;
1055 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1056 				args.v5.ucLaneNum = 8;
1057 			else
1058 				args.v5.ucLaneNum = 4;
1059 			args.v5.ucConnObjId = connector_object_id;
1060 			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1061 
1062 			if (is_dp && adev->clock.dp_extclk)
1063 				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1064 			else
1065 				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1066 
1067 			if (is_dp)
1068 				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1069 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1070 				if (dig->coherent_mode)
1071 					args.v5.asConfig.ucCoherentMode = 1;
1072 			}
1073 			if (hpd_id == AMDGPU_HPD_NONE)
1074 				args.v5.asConfig.ucHPDSel = 0;
1075 			else
1076 				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1077 			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1078 			args.v5.ucDPLaneSet = lane_set;
1079 			break;
1080 		case 6:
1081 			args.v6.ucAction = action;
1082 			if (is_dp)
1083 				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1084 			else
1085 				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1086 
1087 			switch (amdgpu_encoder->encoder_id) {
1088 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1089 				if (dig->linkb)
1090 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1091 				else
1092 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1093 				break;
1094 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1095 				if (dig->linkb)
1096 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1097 				else
1098 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1099 				break;
1100 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1101 				if (dig->linkb)
1102 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1103 				else
1104 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1105 				break;
1106 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1107 				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1108 				break;
1109 			}
1110 			if (is_dp)
1111 				args.v6.ucLaneNum = dp_lane_count;
1112 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1113 				args.v6.ucLaneNum = 8;
1114 			else
1115 				args.v6.ucLaneNum = 4;
1116 			args.v6.ucConnObjId = connector_object_id;
1117 			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1118 				args.v6.ucDPLaneSet = lane_set;
1119 			else
1120 				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1121 
1122 			if (hpd_id == AMDGPU_HPD_NONE)
1123 				args.v6.ucHPDSel = 0;
1124 			else
1125 				args.v6.ucHPDSel = hpd_id + 1;
1126 			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1127 			break;
1128 		default:
1129 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1130 			break;
1131 		}
1132 		break;
1133 	default:
1134 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1135 		break;
1136 	}
1137 
1138 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1139 }
1140 
1141 bool
1142 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1143 				     int action)
1144 {
1145 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1146 	struct drm_device *dev = amdgpu_connector->base.dev;
1147 	struct amdgpu_device *adev = drm_to_adev(dev);
1148 	union dig_transmitter_control args;
1149 	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1150 	uint8_t frev, crev;
1151 
1152 	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1153 		goto done;
1154 
1155 	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1156 	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1157 		goto done;
1158 
1159 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1160 		goto done;
1161 
1162 	memset(&args, 0, sizeof(args));
1163 
1164 	args.v1.ucAction = action;
1165 
1166 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1167 
1168 	/* wait for the panel to power up */
1169 	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1170 		int i;
1171 
1172 		for (i = 0; i < 300; i++) {
1173 			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1174 				return true;
1175 			mdelay(1);
1176 		}
1177 		return false;
1178 	}
1179 done:
1180 	return true;
1181 }
1182 
1183 union external_encoder_control {
1184 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1185 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1186 };
1187 
1188 static void
1189 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1190 					struct drm_encoder *ext_encoder,
1191 					int action)
1192 {
1193 	struct drm_device *dev = encoder->dev;
1194 	struct amdgpu_device *adev = drm_to_adev(dev);
1195 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1196 	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1197 	union external_encoder_control args;
1198 	struct drm_connector *connector;
1199 	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1200 	u8 frev, crev;
1201 	int dp_clock = 0;
1202 	int dp_lane_count = 0;
1203 	int connector_object_id = 0;
1204 	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1205 
1206 	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1207 		connector = amdgpu_get_connector_for_encoder_init(encoder);
1208 	else
1209 		connector = amdgpu_get_connector_for_encoder(encoder);
1210 
1211 	if (connector) {
1212 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1213 		struct amdgpu_connector_atom_dig *dig_connector =
1214 			amdgpu_connector->con_priv;
1215 
1216 		dp_clock = dig_connector->dp_clock;
1217 		dp_lane_count = dig_connector->dp_lane_count;
1218 		connector_object_id =
1219 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1220 	}
1221 
1222 	memset(&args, 0, sizeof(args));
1223 
1224 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1225 		return;
1226 
1227 	switch (frev) {
1228 	case 1:
1229 		/* no params on frev 1 */
1230 		break;
1231 	case 2:
1232 		switch (crev) {
1233 		case 1:
1234 		case 2:
1235 			args.v1.sDigEncoder.ucAction = action;
1236 			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1237 			args.v1.sDigEncoder.ucEncoderMode =
1238 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1239 
1240 			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1241 				if (dp_clock == 270000)
1242 					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1243 				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1244 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1245 				args.v1.sDigEncoder.ucLaneNum = 8;
1246 			else
1247 				args.v1.sDigEncoder.ucLaneNum = 4;
1248 			break;
1249 		case 3:
1250 			args.v3.sExtEncoder.ucAction = action;
1251 			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1252 				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1253 			else
1254 				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1255 			args.v3.sExtEncoder.ucEncoderMode =
1256 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1257 
1258 			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1259 				if (dp_clock == 270000)
1260 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1261 				else if (dp_clock == 540000)
1262 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1263 				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1264 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1265 				args.v3.sExtEncoder.ucLaneNum = 8;
1266 			else
1267 				args.v3.sExtEncoder.ucLaneNum = 4;
1268 			switch (ext_enum) {
1269 			case GRAPH_OBJECT_ENUM_ID1:
1270 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1271 				break;
1272 			case GRAPH_OBJECT_ENUM_ID2:
1273 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1274 				break;
1275 			case GRAPH_OBJECT_ENUM_ID3:
1276 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1277 				break;
1278 			}
1279 			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1280 			break;
1281 		default:
1282 			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1283 			return;
1284 		}
1285 		break;
1286 	default:
1287 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1288 		return;
1289 	}
1290 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1291 }
1292 
1293 static void
1294 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1295 {
1296 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1297 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1298 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1299 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1300 	struct amdgpu_connector *amdgpu_connector = NULL;
1301 	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1302 
1303 	if (connector) {
1304 		amdgpu_connector = to_amdgpu_connector(connector);
1305 		amdgpu_dig_connector = amdgpu_connector->con_priv;
1306 	}
1307 
1308 	if (action == ATOM_ENABLE) {
1309 		if (!connector)
1310 			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1311 		else
1312 			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1313 
1314 		/* setup and enable the encoder */
1315 		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1316 		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1317 						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1318 						   dig->panel_mode);
1319 		if (ext_encoder)
1320 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1321 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1322 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1323 		    connector) {
1324 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1325 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1326 								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1327 				amdgpu_dig_connector->edp_on = true;
1328 			}
1329 		}
1330 		/* enable the transmitter */
1331 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1332 						       ATOM_TRANSMITTER_ACTION_ENABLE,
1333 						       0, 0);
1334 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1335 		    connector) {
1336 			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1337 			amdgpu_atombios_dp_link_train(encoder, connector);
1338 			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1339 		}
1340 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1341 			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1342 		if (ext_encoder)
1343 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1344 	} else {
1345 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1346 		    connector)
1347 			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1348 							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1349 		if (ext_encoder)
1350 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1351 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1352 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1353 							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1354 
1355 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1356 		    connector)
1357 			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1358 		/* disable the transmitter */
1359 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1360 						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1361 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1362 		    connector) {
1363 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1364 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1365 								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1366 				amdgpu_dig_connector->edp_on = false;
1367 			}
1368 		}
1369 	}
1370 }
1371 
1372 void
1373 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1374 {
1375 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1376 
1377 	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1378 		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1379 		  amdgpu_encoder->active_device);
1380 	switch (amdgpu_encoder->encoder_id) {
1381 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1382 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1383 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1384 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1385 		switch (mode) {
1386 		case DRM_MODE_DPMS_ON:
1387 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1388 			break;
1389 		case DRM_MODE_DPMS_STANDBY:
1390 		case DRM_MODE_DPMS_SUSPEND:
1391 		case DRM_MODE_DPMS_OFF:
1392 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1393 			break;
1394 		}
1395 		break;
1396 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1397 		switch (mode) {
1398 		case DRM_MODE_DPMS_ON:
1399 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1400 			break;
1401 		case DRM_MODE_DPMS_STANDBY:
1402 		case DRM_MODE_DPMS_SUSPEND:
1403 		case DRM_MODE_DPMS_OFF:
1404 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1405 			break;
1406 		}
1407 		break;
1408 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1409 		switch (mode) {
1410 		case DRM_MODE_DPMS_ON:
1411 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1412 			break;
1413 		case DRM_MODE_DPMS_STANDBY:
1414 		case DRM_MODE_DPMS_SUSPEND:
1415 		case DRM_MODE_DPMS_OFF:
1416 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1417 			break;
1418 		}
1419 		break;
1420 	default:
1421 		return;
1422 	}
1423 }
1424 
1425 union crtc_source_param {
1426 	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1427 	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1428 	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1429 };
1430 
1431 void
1432 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1433 {
1434 	struct drm_device *dev = encoder->dev;
1435 	struct amdgpu_device *adev = drm_to_adev(dev);
1436 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1437 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1438 	union crtc_source_param args;
1439 	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1440 	uint8_t frev, crev;
1441 	struct amdgpu_encoder_atom_dig *dig;
1442 
1443 	memset(&args, 0, sizeof(args));
1444 
1445 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1446 		return;
1447 
1448 	switch (frev) {
1449 	case 1:
1450 		switch (crev) {
1451 		case 1:
1452 		default:
1453 			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1454 			switch (amdgpu_encoder->encoder_id) {
1455 			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1456 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1457 				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1458 				break;
1459 			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1460 			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1461 				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1462 					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1463 				else
1464 					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1465 				break;
1466 			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1467 			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1468 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1469 				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1470 				break;
1471 			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1472 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1473 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1474 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1475 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1476 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1477 				else
1478 					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1479 				break;
1480 			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1481 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1482 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1483 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1484 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1485 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1486 				else
1487 					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1488 				break;
1489 			}
1490 			break;
1491 		case 2:
1492 			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1493 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1494 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1495 
1496 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1497 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1498 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1499 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1500 				else
1501 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1502 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1503 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1504 			} else {
1505 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1506 			}
1507 			switch (amdgpu_encoder->encoder_id) {
1508 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1509 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1510 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1511 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1512 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1513 				dig = amdgpu_encoder->enc_priv;
1514 				switch (dig->dig_encoder) {
1515 				case 0:
1516 					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1517 					break;
1518 				case 1:
1519 					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1520 					break;
1521 				case 2:
1522 					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1523 					break;
1524 				case 3:
1525 					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1526 					break;
1527 				case 4:
1528 					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1529 					break;
1530 				case 5:
1531 					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1532 					break;
1533 				case 6:
1534 					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1535 					break;
1536 				}
1537 				break;
1538 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1539 				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1540 				break;
1541 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1542 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1543 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1544 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1545 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1546 				else
1547 					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1548 				break;
1549 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1550 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1551 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1552 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1553 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1554 				else
1555 					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1556 				break;
1557 			}
1558 			break;
1559 		case 3:
1560 			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1561 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1562 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1563 
1564 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1565 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1566 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1567 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1568 				else
1569 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1570 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1571 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1572 			} else {
1573 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1574 			}
1575 			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1576 			switch (amdgpu_encoder->encoder_id) {
1577 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1578 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1579 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1580 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1581 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1582 				dig = amdgpu_encoder->enc_priv;
1583 				switch (dig->dig_encoder) {
1584 				case 0:
1585 					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1586 					break;
1587 				case 1:
1588 					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1589 					break;
1590 				case 2:
1591 					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1592 					break;
1593 				case 3:
1594 					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1595 					break;
1596 				case 4:
1597 					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1598 					break;
1599 				case 5:
1600 					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1601 					break;
1602 				case 6:
1603 					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1604 					break;
1605 				}
1606 				break;
1607 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1608 				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1609 				break;
1610 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1611 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1612 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1613 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1614 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1615 				else
1616 					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1617 				break;
1618 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1619 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1620 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1621 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1622 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1623 				else
1624 					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1625 				break;
1626 			}
1627 			break;
1628 		}
1629 		break;
1630 	default:
1631 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1632 		return;
1633 	}
1634 
1635 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1636 }
1637 
1638 /* This only needs to be called once at startup */
1639 void
1640 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1641 {
1642 	struct drm_device *dev = adev_to_drm(adev);
1643 	struct drm_encoder *encoder;
1644 
1645 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1646 		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1647 		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1648 
1649 		switch (amdgpu_encoder->encoder_id) {
1650 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1651 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1652 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1653 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1654 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1655 							       0, 0);
1656 			break;
1657 		}
1658 
1659 		if (ext_encoder)
1660 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1661 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1662 	}
1663 }
1664 
1665 static bool
1666 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1667 				 struct drm_connector *connector)
1668 {
1669 	struct drm_device *dev = encoder->dev;
1670 	struct amdgpu_device *adev = drm_to_adev(dev);
1671 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1672 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1673 
1674 	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1675 				       ATOM_DEVICE_CV_SUPPORT |
1676 				       ATOM_DEVICE_CRT_SUPPORT)) {
1677 		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1678 		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1679 		uint8_t frev, crev;
1680 
1681 		memset(&args, 0, sizeof(args));
1682 
1683 		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1684 			return false;
1685 
1686 		args.sDacload.ucMisc = 0;
1687 
1688 		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1689 		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1690 			args.sDacload.ucDacType = ATOM_DAC_A;
1691 		else
1692 			args.sDacload.ucDacType = ATOM_DAC_B;
1693 
1694 		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1695 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1696 		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1697 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1698 		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1699 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1700 			if (crev >= 3)
1701 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1702 		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1703 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1704 			if (crev >= 3)
1705 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1706 		}
1707 
1708 		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1709 
1710 		return true;
1711 	} else
1712 		return false;
1713 }
1714 
1715 enum drm_connector_status
1716 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1717 			    struct drm_connector *connector)
1718 {
1719 	struct drm_device *dev = encoder->dev;
1720 	struct amdgpu_device *adev = drm_to_adev(dev);
1721 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1722 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1723 	uint32_t bios_0_scratch;
1724 
1725 	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1726 		DRM_DEBUG_KMS("detect returned false \n");
1727 		return connector_status_unknown;
1728 	}
1729 
1730 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1731 
1732 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1733 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1734 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1735 			return connector_status_connected;
1736 	}
1737 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1738 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1739 			return connector_status_connected;
1740 	}
1741 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1742 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1743 			return connector_status_connected;
1744 	}
1745 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1746 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1747 			return connector_status_connected; /* CTV */
1748 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1749 			return connector_status_connected; /* STV */
1750 	}
1751 	return connector_status_disconnected;
1752 }
1753 
1754 enum drm_connector_status
1755 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1756 			    struct drm_connector *connector)
1757 {
1758 	struct drm_device *dev = encoder->dev;
1759 	struct amdgpu_device *adev = drm_to_adev(dev);
1760 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1761 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1762 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1763 	u32 bios_0_scratch;
1764 
1765 	if (!ext_encoder)
1766 		return connector_status_unknown;
1767 
1768 	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1769 		return connector_status_unknown;
1770 
1771 	/* load detect on the dp bridge */
1772 	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1773 						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1774 
1775 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1776 
1777 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1778 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1779 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1780 			return connector_status_connected;
1781 	}
1782 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1783 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1784 			return connector_status_connected;
1785 	}
1786 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1787 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1788 			return connector_status_connected;
1789 	}
1790 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1791 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1792 			return connector_status_connected; /* CTV */
1793 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1794 			return connector_status_connected; /* STV */
1795 	}
1796 	return connector_status_disconnected;
1797 }
1798 
1799 void
1800 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1801 {
1802 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1803 
1804 	if (ext_encoder)
1805 		/* ddc_setup on the dp bridge */
1806 		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1807 							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1808 
1809 }
1810 
1811 void
1812 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1813 				       struct drm_encoder *encoder,
1814 				       bool connected)
1815 {
1816 	struct drm_device *dev = connector->dev;
1817 	struct amdgpu_device *adev = drm_to_adev(dev);
1818 	struct amdgpu_connector *amdgpu_connector =
1819 	    to_amdgpu_connector(connector);
1820 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1821 	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1822 
1823 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1824 	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1825 	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1826 
1827 	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1828 	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1829 		if (connected) {
1830 			DRM_DEBUG_KMS("LCD1 connected\n");
1831 			bios_0_scratch |= ATOM_S0_LCD1;
1832 			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1833 			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1834 		} else {
1835 			DRM_DEBUG_KMS("LCD1 disconnected\n");
1836 			bios_0_scratch &= ~ATOM_S0_LCD1;
1837 			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1838 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1839 		}
1840 	}
1841 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1842 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1843 		if (connected) {
1844 			DRM_DEBUG_KMS("CRT1 connected\n");
1845 			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1846 			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1847 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1848 		} else {
1849 			DRM_DEBUG_KMS("CRT1 disconnected\n");
1850 			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1851 			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1852 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1853 		}
1854 	}
1855 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1856 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1857 		if (connected) {
1858 			DRM_DEBUG_KMS("CRT2 connected\n");
1859 			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1860 			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1861 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1862 		} else {
1863 			DRM_DEBUG_KMS("CRT2 disconnected\n");
1864 			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1865 			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1866 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1867 		}
1868 	}
1869 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1870 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1871 		if (connected) {
1872 			DRM_DEBUG_KMS("DFP1 connected\n");
1873 			bios_0_scratch |= ATOM_S0_DFP1;
1874 			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1875 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1876 		} else {
1877 			DRM_DEBUG_KMS("DFP1 disconnected\n");
1878 			bios_0_scratch &= ~ATOM_S0_DFP1;
1879 			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1880 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1881 		}
1882 	}
1883 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1884 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1885 		if (connected) {
1886 			DRM_DEBUG_KMS("DFP2 connected\n");
1887 			bios_0_scratch |= ATOM_S0_DFP2;
1888 			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1889 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1890 		} else {
1891 			DRM_DEBUG_KMS("DFP2 disconnected\n");
1892 			bios_0_scratch &= ~ATOM_S0_DFP2;
1893 			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1894 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1895 		}
1896 	}
1897 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1898 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1899 		if (connected) {
1900 			DRM_DEBUG_KMS("DFP3 connected\n");
1901 			bios_0_scratch |= ATOM_S0_DFP3;
1902 			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1903 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1904 		} else {
1905 			DRM_DEBUG_KMS("DFP3 disconnected\n");
1906 			bios_0_scratch &= ~ATOM_S0_DFP3;
1907 			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1908 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1909 		}
1910 	}
1911 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1912 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1913 		if (connected) {
1914 			DRM_DEBUG_KMS("DFP4 connected\n");
1915 			bios_0_scratch |= ATOM_S0_DFP4;
1916 			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1917 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1918 		} else {
1919 			DRM_DEBUG_KMS("DFP4 disconnected\n");
1920 			bios_0_scratch &= ~ATOM_S0_DFP4;
1921 			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1922 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1923 		}
1924 	}
1925 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1926 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1927 		if (connected) {
1928 			DRM_DEBUG_KMS("DFP5 connected\n");
1929 			bios_0_scratch |= ATOM_S0_DFP5;
1930 			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1931 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1932 		} else {
1933 			DRM_DEBUG_KMS("DFP5 disconnected\n");
1934 			bios_0_scratch &= ~ATOM_S0_DFP5;
1935 			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1936 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1937 		}
1938 	}
1939 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1940 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1941 		if (connected) {
1942 			DRM_DEBUG_KMS("DFP6 connected\n");
1943 			bios_0_scratch |= ATOM_S0_DFP6;
1944 			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1945 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1946 		} else {
1947 			DRM_DEBUG_KMS("DFP6 disconnected\n");
1948 			bios_0_scratch &= ~ATOM_S0_DFP6;
1949 			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1950 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1951 		}
1952 	}
1953 
1954 	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1955 	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1956 	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1957 }
1958 
1959 union lvds_info {
1960 	struct _ATOM_LVDS_INFO info;
1961 	struct _ATOM_LVDS_INFO_V12 info_12;
1962 };
1963 
1964 struct amdgpu_encoder_atom_dig *
1965 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1966 {
1967 	struct drm_device *dev = encoder->base.dev;
1968 	struct amdgpu_device *adev = drm_to_adev(dev);
1969 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1970 	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1971 	uint16_t data_offset, misc;
1972 	union lvds_info *lvds_info;
1973 	uint8_t frev, crev;
1974 	struct amdgpu_encoder_atom_dig *lvds = NULL;
1975 	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1976 
1977 	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1978 				   &frev, &crev, &data_offset)) {
1979 		lvds_info =
1980 			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
1981 		lvds =
1982 		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1983 
1984 		if (!lvds)
1985 			return NULL;
1986 
1987 		lvds->native_mode.clock =
1988 		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1989 		lvds->native_mode.hdisplay =
1990 		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1991 		lvds->native_mode.vdisplay =
1992 		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1993 		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1994 			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1995 		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1996 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1997 		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
1998 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
1999 		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2000 			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2001 		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2002 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2003 		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2004 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2005 		lvds->panel_pwr_delay =
2006 		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2007 		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2008 
2009 		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2010 		if (misc & ATOM_VSYNC_POLARITY)
2011 			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2012 		if (misc & ATOM_HSYNC_POLARITY)
2013 			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2014 		if (misc & ATOM_COMPOSITESYNC)
2015 			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2016 		if (misc & ATOM_INTERLACE)
2017 			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2018 		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2019 			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2020 
2021 		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2022 		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2023 
2024 		/* set crtc values */
2025 		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2026 
2027 		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2028 
2029 		encoder->native_mode = lvds->native_mode;
2030 
2031 		if (encoder_enum == 2)
2032 			lvds->linkb = true;
2033 		else
2034 			lvds->linkb = false;
2035 
2036 		/* parse the lcd record table */
2037 		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2038 			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2039 			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2040 			bool bad_record = false;
2041 			u8 *record;
2042 
2043 			if ((frev == 1) && (crev < 2))
2044 				/* absolute */
2045 				record = (u8 *)(mode_info->atom_context->bios +
2046 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2047 			else
2048 				/* relative */
2049 				record = (u8 *)(mode_info->atom_context->bios +
2050 						data_offset +
2051 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2052 			while (*record != ATOM_RECORD_END_TYPE) {
2053 				switch (*record) {
2054 				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2055 					record += sizeof(ATOM_PATCH_RECORD_MODE);
2056 					break;
2057 				case LCD_RTS_RECORD_TYPE:
2058 					record += sizeof(ATOM_LCD_RTS_RECORD);
2059 					break;
2060 				case LCD_CAP_RECORD_TYPE:
2061 					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2062 					break;
2063 				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2064 					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2065 					if (fake_edid_record->ucFakeEDIDLength) {
2066 						struct edid *edid;
2067 						int edid_size =
2068 							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2069 						edid = kmalloc(edid_size, GFP_KERNEL);
2070 						if (edid) {
2071 							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2072 							       fake_edid_record->ucFakeEDIDLength);
2073 
2074 							if (drm_edid_is_valid(edid)) {
2075 								adev->mode_info.bios_hardcoded_edid = edid;
2076 								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2077 							} else
2078 								kfree(edid);
2079 						}
2080 					}
2081 					record += fake_edid_record->ucFakeEDIDLength ?
2082 						  struct_size(fake_edid_record,
2083 							      ucFakeEDIDString,
2084 							      fake_edid_record->ucFakeEDIDLength) :
2085 						  /* empty fake edid record must be 3 bytes long */
2086 						  sizeof(ATOM_FAKE_EDID_PATCH_RECORD) + 1;
2087 					break;
2088 				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2089 					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2090 					lvds->native_mode.width_mm = panel_res_record->usHSize;
2091 					lvds->native_mode.height_mm = panel_res_record->usVSize;
2092 					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2093 					break;
2094 				default:
2095 					DRM_ERROR("Bad LCD record %d\n", *record);
2096 					bad_record = true;
2097 					break;
2098 				}
2099 				if (bad_record)
2100 					break;
2101 			}
2102 		}
2103 	}
2104 	return lvds;
2105 }
2106 
2107 struct amdgpu_encoder_atom_dig *
2108 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2109 {
2110 	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2111 	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2112 
2113 	if (!dig)
2114 		return NULL;
2115 
2116 	/* coherent mode by default */
2117 	dig->coherent_mode = true;
2118 	dig->dig_encoder = -1;
2119 
2120 	if (encoder_enum == 2)
2121 		dig->linkb = true;
2122 	else
2123 		dig->linkb = false;
2124 
2125 	return dig;
2126 }
2127 
2128