xref: /openbsd/sys/dev/pci/drm/radeon/atombios_dp.c (revision cecf84d4)
1 /*	$OpenBSD: atombios_dp.c,v 1.6 2015/04/18 14:47:34 jsg Exp $	*/
2 /*
3  * Copyright 2007-8 Advanced Micro Devices, Inc.
4  * Copyright 2008 Red Hat Inc.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the "Software"),
8  * to deal in the Software without restriction, including without limitation
9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10  * and/or sell copies of the Software, and to permit persons to whom the
11  * Software is furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22  * OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors: Dave Airlie
25  *          Alex Deucher
26  *          Jerome Glisse
27  */
28 #include <dev/pci/drm/drmP.h>
29 #include <dev/pci/drm/radeon_drm.h>
30 #include "radeon.h"
31 
32 #include "atom.h"
33 #include "atom-bits.h"
34 #include <dev/pci/drm/drm_dp_helper.h>
35 
36 /* move these to drm_dp_helper.c/h */
37 #define DP_LINK_CONFIGURATION_SIZE 9
38 #define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE
39 
40 #ifdef DRMDEBUG
41 static char *voltage_names[] = {
42         "0.4V", "0.6V", "0.8V", "1.2V"
43 };
44 static char *pre_emph_names[] = {
45         "0dB", "3.5dB", "6dB", "9.5dB"
46 };
47 #endif
48 
49 /***** radeon AUX functions *****/
50 
51 /* Atom needs data in little endian format
52  * so swap as appropriate when copying data to
53  * or from atom. Note that atom operates on
54  * dw units.
55  */
56 void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le)
57 {
58 #ifdef __BIG_ENDIAN
59 	u8 src_tmp[20], dst_tmp[20]; /* used for byteswapping */
60 	u32 *dst32, *src32;
61 	int i;
62 
63 	memcpy(src_tmp, src, num_bytes);
64 	src32 = (u32 *)src_tmp;
65 	dst32 = (u32 *)dst_tmp;
66 	if (to_le) {
67 		for (i = 0; i < ((num_bytes + 3) / 4); i++)
68 			dst32[i] = cpu_to_le32(src32[i]);
69 		memcpy(dst, dst_tmp, num_bytes);
70 	} else {
71 		u8 dws = num_bytes & ~3;
72 		for (i = 0; i < ((num_bytes + 3) / 4); i++)
73 			dst32[i] = le32_to_cpu(src32[i]);
74 		memcpy(dst, dst_tmp, dws);
75 		if (num_bytes % 4) {
76 			for (i = 0; i < (num_bytes % 4); i++)
77 				dst[dws+i] = dst_tmp[dws+i];
78 		}
79 	}
80 #else
81 	memcpy(dst, src, num_bytes);
82 #endif
83 }
84 
85 union aux_channel_transaction {
86 	PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
87 	PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
88 };
89 
90 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
91 				 u8 *send, int send_bytes,
92 				 u8 *recv, int recv_size,
93 				 u8 delay, u8 *ack)
94 {
95 	struct drm_device *dev = chan->dev;
96 	struct radeon_device *rdev = dev->dev_private;
97 	union aux_channel_transaction args;
98 	int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
99 	unsigned char *base;
100 	int recv_bytes;
101 
102 	memset(&args, 0, sizeof(args));
103 
104 	base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1);
105 
106 	radeon_atom_copy_swap(base, send, send_bytes, true);
107 
108 	args.v1.lpAuxRequest = cpu_to_le16((u16)(0 + 4));
109 	args.v1.lpDataOut = cpu_to_le16((u16)(16 + 4));
110 	args.v1.ucDataOutLen = 0;
111 	args.v1.ucChannelID = chan->rec.i2c_id;
112 	args.v1.ucDelay = delay / 10;
113 	if (ASIC_IS_DCE4(rdev))
114 		args.v2.ucHPD_ID = chan->rec.hpd;
115 
116 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
117 
118 	*ack = args.v1.ucReplyStatus;
119 
120 	/* timeout */
121 	if (args.v1.ucReplyStatus == 1) {
122 		DRM_DEBUG_KMS("dp_aux_ch timeout\n");
123 		return -ETIMEDOUT;
124 	}
125 
126 	/* flags not zero */
127 	if (args.v1.ucReplyStatus == 2) {
128 		DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
129 		return -EBUSY;
130 	}
131 
132 	/* error */
133 	if (args.v1.ucReplyStatus == 3) {
134 		DRM_DEBUG_KMS("dp_aux_ch error\n");
135 		return -EIO;
136 	}
137 
138 	recv_bytes = args.v1.ucDataOutLen;
139 	if (recv_bytes > recv_size)
140 		recv_bytes = recv_size;
141 
142 	if (recv && recv_size)
143 		radeon_atom_copy_swap(recv, base + 16, recv_bytes, false);
144 
145 	return recv_bytes;
146 }
147 
148 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
149 				      u16 address, u8 *send, u8 send_bytes, u8 delay)
150 {
151 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
152 	int ret;
153 	u8 msg[20];
154 	int msg_bytes = send_bytes + 4;
155 	u8 ack;
156 	unsigned retry;
157 
158 	if (send_bytes > 16)
159 		return -1;
160 
161 	msg[0] = address;
162 	msg[1] = address >> 8;
163 	msg[2] = AUX_NATIVE_WRITE << 4;
164 	msg[3] = (msg_bytes << 4) | (send_bytes - 1);
165 	memcpy(&msg[4], send, send_bytes);
166 
167 	for (retry = 0; retry < 4; retry++) {
168 		ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
169 					    msg, msg_bytes, NULL, 0, delay, &ack);
170 		if (ret == -EBUSY)
171 			continue;
172 		else if (ret < 0)
173 			return ret;
174 		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
175 			return send_bytes;
176 		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
177 			udelay(400);
178 		else
179 			return -EIO;
180 	}
181 
182 	return -EIO;
183 }
184 
185 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
186 				     u16 address, u8 *recv, int recv_bytes, u8 delay)
187 {
188 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
189 	u8 msg[4];
190 	int msg_bytes = 4;
191 	u8 ack;
192 	int ret;
193 	unsigned retry;
194 
195 	msg[0] = address;
196 	msg[1] = address >> 8;
197 	msg[2] = AUX_NATIVE_READ << 4;
198 	msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
199 
200 	for (retry = 0; retry < 4; retry++) {
201 		ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
202 					    msg, msg_bytes, recv, recv_bytes, delay, &ack);
203 		if (ret == -EBUSY)
204 			continue;
205 		else if (ret < 0)
206 			return ret;
207 		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
208 			return ret;
209 		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
210 			udelay(400);
211 		else
212 			return -EIO;
213 	}
214 
215 	return -EIO;
216 }
217 
218 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
219 				 u16 reg, u8 val)
220 {
221 	radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
222 }
223 
224 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
225 			       u16 reg)
226 {
227 	u8 val = 0;
228 
229 	radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
230 
231 	return val;
232 }
233 
234 int radeon_dp_i2c_aux_ch(struct i2c_controller *adapter, int mode,
235 			 u8 write_byte, u8 *read_byte)
236 {
237 	struct i2c_algo_dp_aux_data *algo_data = adapter->ic_cookie;
238 	struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
239 	u16 address = algo_data->address;
240 	u8 msg[5];
241 	u8 reply[2];
242 	unsigned retry;
243 	int msg_bytes;
244 	int reply_bytes = 1;
245 	int ret;
246 	u8 ack;
247 
248 	/* Set up the command byte */
249 	if (mode & MODE_I2C_READ)
250 		msg[2] = AUX_I2C_READ << 4;
251 	else
252 		msg[2] = AUX_I2C_WRITE << 4;
253 
254 	if (!(mode & MODE_I2C_STOP))
255 		msg[2] |= AUX_I2C_MOT << 4;
256 
257 	msg[0] = address;
258 	msg[1] = address >> 8;
259 
260 	switch (mode) {
261 	case MODE_I2C_WRITE:
262 		msg_bytes = 5;
263 		msg[3] = msg_bytes << 4;
264 		msg[4] = write_byte;
265 		break;
266 	case MODE_I2C_READ:
267 		msg_bytes = 4;
268 		msg[3] = msg_bytes << 4;
269 		break;
270 	default:
271 		msg_bytes = 4;
272 		msg[3] = 3 << 4;
273 		break;
274 	}
275 
276 	for (retry = 0; retry < 4; retry++) {
277 		ret = radeon_process_aux_ch(auxch,
278 					    msg, msg_bytes, reply, reply_bytes, 0, &ack);
279 		if (ret == -EBUSY)
280 			continue;
281 		else if (ret < 0) {
282 			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
283 			return ret;
284 		}
285 
286 		switch (ack & AUX_NATIVE_REPLY_MASK) {
287 		case AUX_NATIVE_REPLY_ACK:
288 			/* I2C-over-AUX Reply field is only valid
289 			 * when paired with AUX ACK.
290 			 */
291 			break;
292 		case AUX_NATIVE_REPLY_NACK:
293 			DRM_DEBUG_KMS("aux_ch native nack\n");
294 			return -EREMOTEIO;
295 		case AUX_NATIVE_REPLY_DEFER:
296 			DRM_DEBUG_KMS("aux_ch native defer\n");
297 			udelay(400);
298 			continue;
299 		default:
300 			DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
301 			return -EREMOTEIO;
302 		}
303 
304 		switch (ack & AUX_I2C_REPLY_MASK) {
305 		case AUX_I2C_REPLY_ACK:
306 			if (mode == MODE_I2C_READ)
307 				*read_byte = reply[0];
308 			return ret;
309 		case AUX_I2C_REPLY_NACK:
310 			DRM_DEBUG_KMS("aux_i2c nack\n");
311 			return -EREMOTEIO;
312 		case AUX_I2C_REPLY_DEFER:
313 			DRM_DEBUG_KMS("aux_i2c defer\n");
314 			udelay(400);
315 			break;
316 		default:
317 			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
318 			return -EREMOTEIO;
319 		}
320 	}
321 
322 	DRM_DEBUG_KMS("aux i2c too many retries, giving up\n");
323 	return -EREMOTEIO;
324 }
325 
326 /***** general DP utility functions *****/
327 
328 #define DP_VOLTAGE_MAX         DP_TRAIN_VOLTAGE_SWING_1200
329 #define DP_PRE_EMPHASIS_MAX    DP_TRAIN_PRE_EMPHASIS_9_5
330 
331 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
332 				int lane_count,
333 				u8 train_set[4])
334 {
335 	u8 v = 0;
336 	u8 p = 0;
337 	int lane;
338 
339 	for (lane = 0; lane < lane_count; lane++) {
340 		u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane);
341 		u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
342 
343 		DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
344 			  lane,
345 			  voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
346 			  pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
347 
348 		if (this_v > v)
349 			v = this_v;
350 		if (this_p > p)
351 			p = this_p;
352 	}
353 
354 	if (v >= DP_VOLTAGE_MAX)
355 		v |= DP_TRAIN_MAX_SWING_REACHED;
356 
357 	if (p >= DP_PRE_EMPHASIS_MAX)
358 		p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
359 
360 	DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
361 		  voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
362 		  pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
363 
364 	for (lane = 0; lane < 4; lane++)
365 		train_set[lane] = v | p;
366 }
367 
368 /* convert bits per color to bits per pixel */
369 /* get bpc from the EDID */
370 static int convert_bpc_to_bpp(int bpc)
371 {
372 	if (bpc == 0)
373 		return 24;
374 	else
375 		return bpc * 3;
376 }
377 
378 /* get the max pix clock supported by the link rate and lane num */
379 static int dp_get_max_dp_pix_clock(int link_rate,
380 				   int lane_num,
381 				   int bpp)
382 {
383 	return (link_rate * lane_num * 8) / bpp;
384 }
385 
386 /***** radeon specific DP functions *****/
387 
388 static int radeon_dp_get_max_link_rate(struct drm_connector *connector,
389 				       u8 dpcd[DP_DPCD_SIZE])
390 {
391 	int max_link_rate;
392 
393 	if (radeon_connector_is_dp12_capable(connector))
394 		max_link_rate = min(drm_dp_max_link_rate(dpcd), 540000);
395 	else
396 		max_link_rate = min(drm_dp_max_link_rate(dpcd), 270000);
397 
398 	return max_link_rate;
399 }
400 
401 /* First get the min lane# when low rate is used according to pixel clock
402  * (prefer low rate), second check max lane# supported by DP panel,
403  * if the max lane# < low rate lane# then use max lane# instead.
404  */
405 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
406 					u8 dpcd[DP_DPCD_SIZE],
407 					int pix_clock)
408 {
409 	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
410 	int max_link_rate = radeon_dp_get_max_link_rate(connector, dpcd);
411 	int max_lane_num = drm_dp_max_lane_count(dpcd);
412 	int lane_num;
413 	int max_dp_pix_clock;
414 
415 	for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
416 		max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
417 		if (pix_clock <= max_dp_pix_clock)
418 			break;
419 	}
420 
421 	return lane_num;
422 }
423 
424 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
425 				       u8 dpcd[DP_DPCD_SIZE],
426 				       int pix_clock)
427 {
428 	int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector));
429 	int lane_num, max_pix_clock;
430 
431 	if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) ==
432 	    ENCODER_OBJECT_ID_NUTMEG)
433 		return 270000;
434 
435 	lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
436 	max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
437 	if (pix_clock <= max_pix_clock)
438 		return 162000;
439 	max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
440 	if (pix_clock <= max_pix_clock)
441 		return 270000;
442 	if (radeon_connector_is_dp12_capable(connector)) {
443 		max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
444 		if (pix_clock <= max_pix_clock)
445 			return 540000;
446 	}
447 
448 	return radeon_dp_get_max_link_rate(connector, dpcd);
449 }
450 
451 static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
452 				    int action, int dp_clock,
453 				    u8 ucconfig, u8 lane_num)
454 {
455 	DP_ENCODER_SERVICE_PARAMETERS args;
456 	int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
457 
458 	memset(&args, 0, sizeof(args));
459 	args.ucLinkClock = dp_clock / 10;
460 	args.ucConfig = ucconfig;
461 	args.ucAction = action;
462 	args.ucLaneNum = lane_num;
463 	args.ucStatus = 0;
464 
465 	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
466 	return args.ucStatus;
467 }
468 
469 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
470 {
471 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
472 	struct drm_device *dev = radeon_connector->base.dev;
473 	struct radeon_device *rdev = dev->dev_private;
474 
475 	return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
476 					 dig_connector->dp_i2c_bus->rec.i2c_id, 0);
477 }
478 
479 static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector)
480 {
481 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
482 	u8 buf[3];
483 
484 	if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
485 		return;
486 
487 	if (radeon_dp_aux_native_read(radeon_connector, DP_SINK_OUI, buf, 3, 0))
488 		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
489 			      buf[0], buf[1], buf[2]);
490 
491 	if (radeon_dp_aux_native_read(radeon_connector, DP_BRANCH_OUI, buf, 3, 0))
492 		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
493 			      buf[0], buf[1], buf[2]);
494 }
495 
496 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
497 {
498 	struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
499 	u8 msg[DP_DPCD_SIZE];
500 	int ret, i;
501 
502 	ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg,
503 					DP_DPCD_SIZE, 0);
504 	if (ret > 0) {
505 		memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE);
506 		DRM_DEBUG_KMS("DPCD: ");
507 		for (i = 0; i < DP_DPCD_SIZE; i++)
508 			DRM_DEBUG_KMS("%02x ", msg[i]);
509 		DRM_DEBUG_KMS("\n");
510 
511 		radeon_dp_probe_oui(radeon_connector);
512 
513 		return true;
514 	}
515 	dig_connector->dpcd[0] = 0;
516 	return false;
517 }
518 
519 int radeon_dp_get_panel_mode(struct drm_encoder *encoder,
520 			     struct drm_connector *connector)
521 {
522 	struct drm_device *dev = encoder->dev;
523 	struct radeon_device *rdev = dev->dev_private;
524 	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
525 	int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
526 	u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector);
527 	u8 tmp;
528 
529 	if (!ASIC_IS_DCE4(rdev))
530 		return panel_mode;
531 
532 	if (dp_bridge != ENCODER_OBJECT_ID_NONE) {
533 		/* DP bridge chips */
534 		tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP);
535 		if (tmp & 1)
536 			panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
537 		else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) ||
538 			 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS))
539 			panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
540 		else
541 			panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
542 	} else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
543 		/* eDP */
544 		tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP);
545 		if (tmp & 1)
546 			panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE;
547 	}
548 
549 	return panel_mode;
550 }
551 
552 void radeon_dp_set_link_config(struct drm_connector *connector,
553 			       const struct drm_display_mode *mode)
554 {
555 	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
556 	struct radeon_connector_atom_dig *dig_connector;
557 
558 	if (!radeon_connector->con_priv)
559 		return;
560 	dig_connector = radeon_connector->con_priv;
561 
562 	if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
563 	    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
564 		dig_connector->dp_clock =
565 			radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
566 		dig_connector->dp_lane_count =
567 			radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
568 	}
569 }
570 
571 int radeon_dp_mode_valid_helper(struct drm_connector *connector,
572 				struct drm_display_mode *mode)
573 {
574 	struct radeon_connector *radeon_connector = to_radeon_connector(connector);
575 	struct radeon_connector_atom_dig *dig_connector;
576 	int dp_clock;
577 
578 	if (!radeon_connector->con_priv)
579 		return MODE_CLOCK_HIGH;
580 	dig_connector = radeon_connector->con_priv;
581 
582 	dp_clock =
583 		radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
584 
585 	if ((dp_clock == 540000) &&
586 	    (!radeon_connector_is_dp12_capable(connector)))
587 		return MODE_CLOCK_HIGH;
588 
589 	return MODE_OK;
590 }
591 
592 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
593 				      u8 link_status[DP_LINK_STATUS_SIZE])
594 {
595 	int ret;
596 	ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
597 					link_status, DP_LINK_STATUS_SIZE, 100);
598 	if (ret <= 0) {
599 		return false;
600 	}
601 
602 	DRM_DEBUG_KMS("link status %*ph\n", 6, link_status);
603 	return true;
604 }
605 
606 bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
607 {
608 	u8 link_status[DP_LINK_STATUS_SIZE];
609 	struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
610 
611 	if (!radeon_dp_get_link_status(radeon_connector, link_status))
612 		return false;
613 	if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count))
614 		return false;
615 	return true;
616 }
617 
618 struct radeon_dp_link_train_info {
619 	struct radeon_device *rdev;
620 	struct drm_encoder *encoder;
621 	struct drm_connector *connector;
622 	struct radeon_connector *radeon_connector;
623 	int enc_id;
624 	int dp_clock;
625 	int dp_lane_count;
626 	bool tp3_supported;
627 	u8 dpcd[DP_RECEIVER_CAP_SIZE];
628 	u8 train_set[4];
629 	u8 link_status[DP_LINK_STATUS_SIZE];
630 	u8 tries;
631 	bool use_dpencoder;
632 };
633 
634 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
635 {
636 	/* set the initial vs/emph on the source */
637 	atombios_dig_transmitter_setup(dp_info->encoder,
638 				       ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
639 				       0, dp_info->train_set[0]); /* sets all lanes at once */
640 
641 	/* set the vs/emph on the sink */
642 	radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
643 				   dp_info->train_set, dp_info->dp_lane_count, 0);
644 }
645 
646 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
647 {
648 	int rtp = 0;
649 
650 	/* set training pattern on the source */
651 	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
652 		switch (tp) {
653 		case DP_TRAINING_PATTERN_1:
654 			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
655 			break;
656 		case DP_TRAINING_PATTERN_2:
657 			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
658 			break;
659 		case DP_TRAINING_PATTERN_3:
660 			rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
661 			break;
662 		}
663 		atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
664 	} else {
665 		switch (tp) {
666 		case DP_TRAINING_PATTERN_1:
667 			rtp = 0;
668 			break;
669 		case DP_TRAINING_PATTERN_2:
670 			rtp = 1;
671 			break;
672 		}
673 		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
674 					  dp_info->dp_clock, dp_info->enc_id, rtp);
675 	}
676 
677 	/* enable training pattern on the sink */
678 	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
679 }
680 
681 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
682 {
683 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder);
684 	struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
685 	u8 tmp;
686 
687 	/* power up the sink */
688 	if (dp_info->dpcd[0] >= 0x11)
689 		radeon_write_dpcd_reg(dp_info->radeon_connector,
690 				      DP_SET_POWER, DP_SET_POWER_D0);
691 
692 	/* possibly enable downspread on the sink */
693 	if (dp_info->dpcd[3] & 0x1)
694 		radeon_write_dpcd_reg(dp_info->radeon_connector,
695 				      DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
696 	else
697 		radeon_write_dpcd_reg(dp_info->radeon_connector,
698 				      DP_DOWNSPREAD_CTRL, 0);
699 
700 	if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) &&
701 	    (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) {
702 		radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1);
703 	}
704 
705 	/* set the lane count on the sink */
706 	tmp = dp_info->dp_lane_count;
707 	if (dp_info->dpcd[DP_DPCD_REV] >= 0x11 &&
708 	    dp_info->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)
709 		tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
710 	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
711 
712 	/* set the link rate on the sink */
713 	tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock);
714 	radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
715 
716 	/* start training on the source */
717 	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
718 		atombios_dig_encoder_setup(dp_info->encoder,
719 					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
720 	else
721 		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
722 					  dp_info->dp_clock, dp_info->enc_id, 0);
723 
724 	/* disable the training pattern on the sink */
725 	radeon_write_dpcd_reg(dp_info->radeon_connector,
726 			      DP_TRAINING_PATTERN_SET,
727 			      DP_TRAINING_PATTERN_DISABLE);
728 
729 	return 0;
730 }
731 
732 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
733 {
734 	udelay(400);
735 
736 	/* disable the training pattern on the sink */
737 	radeon_write_dpcd_reg(dp_info->radeon_connector,
738 			      DP_TRAINING_PATTERN_SET,
739 			      DP_TRAINING_PATTERN_DISABLE);
740 
741 	/* disable the training pattern on the source */
742 	if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
743 		atombios_dig_encoder_setup(dp_info->encoder,
744 					   ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
745 	else
746 		radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
747 					  dp_info->dp_clock, dp_info->enc_id, 0);
748 
749 	return 0;
750 }
751 
752 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
753 {
754 	bool clock_recovery;
755  	u8 voltage;
756 	int i;
757 
758 	radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
759 	memset(dp_info->train_set, 0, 4);
760 	radeon_dp_update_vs_emph(dp_info);
761 
762 	udelay(400);
763 
764 	/* clock recovery loop */
765 	clock_recovery = false;
766 	dp_info->tries = 0;
767 	voltage = 0xff;
768 	while (1) {
769 		drm_dp_link_train_clock_recovery_delay(dp_info->dpcd);
770 
771 		if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
772 			DRM_ERROR("displayport link status failed\n");
773 			break;
774 		}
775 
776 		if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
777 			clock_recovery = true;
778 			break;
779 		}
780 
781 		for (i = 0; i < dp_info->dp_lane_count; i++) {
782 			if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
783 				break;
784 		}
785 		if (i == dp_info->dp_lane_count) {
786 			DRM_ERROR("clock recovery reached max voltage\n");
787 			break;
788 		}
789 
790 		if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
791 			++dp_info->tries;
792 			if (dp_info->tries == 5) {
793 				DRM_ERROR("clock recovery tried 5 times\n");
794 				break;
795 			}
796 		} else
797 			dp_info->tries = 0;
798 
799 		voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
800 
801 		/* Compute new train_set as requested by sink */
802 		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
803 
804 		radeon_dp_update_vs_emph(dp_info);
805 	}
806 	if (!clock_recovery) {
807 		DRM_ERROR("clock recovery failed\n");
808 		return -1;
809 	} else {
810 		DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
811 			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
812 			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
813 			  DP_TRAIN_PRE_EMPHASIS_SHIFT);
814 		return 0;
815 	}
816 }
817 
818 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
819 {
820 	bool channel_eq;
821 
822 	if (dp_info->tp3_supported)
823 		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
824 	else
825 		radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
826 
827 	/* channel equalization loop */
828 	dp_info->tries = 0;
829 	channel_eq = false;
830 	while (1) {
831 		drm_dp_link_train_channel_eq_delay(dp_info->dpcd);
832 
833 		if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) {
834 			DRM_ERROR("displayport link status failed\n");
835 			break;
836 		}
837 
838 		if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
839 			channel_eq = true;
840 			break;
841 		}
842 
843 		/* Try 5 times */
844 		if (dp_info->tries > 5) {
845 			DRM_ERROR("channel eq failed: 5 tries\n");
846 			break;
847 		}
848 
849 		/* Compute new train_set as requested by sink */
850 		dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
851 
852 		radeon_dp_update_vs_emph(dp_info);
853 		dp_info->tries++;
854 	}
855 
856 	if (!channel_eq) {
857 		DRM_ERROR("channel eq failed\n");
858 		return -1;
859 	} else {
860 		DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
861 			  dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
862 			  (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
863 			  >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
864 		return 0;
865 	}
866 }
867 
868 void radeon_dp_link_train(struct drm_encoder *encoder,
869 			  struct drm_connector *connector)
870 {
871 	struct drm_device *dev = encoder->dev;
872 	struct radeon_device *rdev = dev->dev_private;
873 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
874 	struct radeon_encoder_atom_dig *dig;
875 	struct radeon_connector *radeon_connector;
876 	struct radeon_connector_atom_dig *dig_connector;
877 	struct radeon_dp_link_train_info dp_info;
878 	int index;
879 	u8 tmp, frev, crev;
880 
881 	if (!radeon_encoder->enc_priv)
882 		return;
883 	dig = radeon_encoder->enc_priv;
884 
885 	radeon_connector = to_radeon_connector(connector);
886 	if (!radeon_connector->con_priv)
887 		return;
888 	dig_connector = radeon_connector->con_priv;
889 
890 	if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
891 	    (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
892 		return;
893 
894 	/* DPEncoderService newer than 1.1 can't program properly the
895 	 * training pattern. When facing such version use the
896 	 * DIGXEncoderControl (X== 1 | 2)
897 	 */
898 	dp_info.use_dpencoder = true;
899 	index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
900 	if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
901 		if (crev > 1) {
902 			dp_info.use_dpencoder = false;
903 		}
904 	}
905 
906 	dp_info.enc_id = 0;
907 	if (dig->dig_encoder)
908 		dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
909 	else
910 		dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
911 	if (dig->linkb)
912 		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
913 	else
914 		dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
915 
916 	tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
917 	if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
918 		dp_info.tp3_supported = true;
919 	else
920 		dp_info.tp3_supported = false;
921 
922 	memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE);
923 	dp_info.rdev = rdev;
924 	dp_info.encoder = encoder;
925 	dp_info.connector = connector;
926 	dp_info.radeon_connector = radeon_connector;
927 	dp_info.dp_lane_count = dig_connector->dp_lane_count;
928 	dp_info.dp_clock = dig_connector->dp_clock;
929 
930 	if (radeon_dp_link_train_init(&dp_info))
931 		goto done;
932 	if (radeon_dp_link_train_cr(&dp_info))
933 		goto done;
934 	if (radeon_dp_link_train_ce(&dp_info))
935 		goto done;
936 done:
937 	if (radeon_dp_link_train_finish(&dp_info))
938 		return;
939 }
940