1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 * Jerome Glisse 26 * 27 * $FreeBSD: head/sys/dev/drm2/radeon/atombios_dp.c 254885 2013-08-25 19:37:15Z dumbbell $ 28 */ 29 30 #include <drm/drmP.h> 31 #include <uapi_drm/radeon_drm.h> 32 #include "radeon.h" 33 34 #include "atom.h" 35 #include "atom-bits.h" 36 #include <drm/drm_dp_helper.h> 37 38 /* move these to drm_dp_helper.c/h */ 39 #define DP_LINK_CONFIGURATION_SIZE 9 40 #define DP_DPCD_SIZE DP_RECEIVER_CAP_SIZE 41 42 static char *voltage_names[] = { 43 "0.4V", "0.6V", "0.8V", "1.2V" 44 }; 45 static char *pre_emph_names[] = { 46 "0dB", "3.5dB", "6dB", "9.5dB" 47 }; 48 49 /***** radeon AUX functions *****/ 50 union aux_channel_transaction { 51 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1; 52 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2; 53 }; 54 55 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan, 56 u8 *send, int send_bytes, 57 u8 *recv, int recv_size, 58 u8 delay, u8 *ack) 59 { 60 struct drm_device *dev = chan->dev; 61 struct radeon_device *rdev = dev->dev_private; 62 union aux_channel_transaction args; 63 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction); 64 unsigned char *base; 65 int recv_bytes; 66 67 memset(&args, 0, sizeof(args)); 68 69 base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1); 70 71 memcpy(base, send, send_bytes); 72 73 args.v1.lpAuxRequest = 0 + 4; 74 args.v1.lpDataOut = 16 + 4; 75 args.v1.ucDataOutLen = 0; 76 args.v1.ucChannelID = chan->rec.i2c_id; 77 args.v1.ucDelay = delay / 10; 78 if (ASIC_IS_DCE4(rdev)) 79 args.v2.ucHPD_ID = chan->rec.hpd; 80 81 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 82 83 *ack = args.v1.ucReplyStatus; 84 85 /* timeout */ 86 if (args.v1.ucReplyStatus == 1) { 87 DRM_DEBUG_KMS("dp_aux_ch timeout\n"); 88 return -ETIMEDOUT; 89 } 90 91 /* flags not zero */ 92 if (args.v1.ucReplyStatus == 2) { 93 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n"); 94 return -EBUSY; 95 } 96 97 /* error */ 98 if (args.v1.ucReplyStatus == 3) { 99 DRM_DEBUG_KMS("dp_aux_ch error\n"); 100 return -EIO; 101 } 102 103 recv_bytes = args.v1.ucDataOutLen; 104 if (recv_bytes > recv_size) 105 recv_bytes = recv_size; 106 107 if (recv && recv_size) 108 memcpy(recv, base + 16, recv_bytes); 109 110 return recv_bytes; 111 } 112 113 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, 114 u16 address, u8 *send, u8 send_bytes, u8 delay) 115 { 116 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 117 int ret; 118 u8 msg[20]; 119 int msg_bytes = send_bytes + 4; 120 u8 ack; 121 unsigned retry; 122 123 if (send_bytes > 16) 124 return -1; 125 126 msg[0] = address; 127 msg[1] = address >> 8; 128 msg[2] = AUX_NATIVE_WRITE << 4; 129 msg[3] = (msg_bytes << 4) | (send_bytes - 1); 130 memcpy(&msg[4], send, send_bytes); 131 132 for (retry = 0; retry < 4; retry++) { 133 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 134 msg, msg_bytes, NULL, 0, delay, &ack); 135 if (ret == -EBUSY) 136 continue; 137 else if (ret < 0) 138 return ret; 139 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 140 return send_bytes; 141 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 142 DRM_UDELAY(400); 143 else 144 return -EIO; 145 } 146 147 return -EIO; 148 } 149 150 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, 151 u16 address, u8 *recv, int recv_bytes, u8 delay) 152 { 153 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 154 u8 msg[4]; 155 int msg_bytes = 4; 156 u8 ack; 157 int ret; 158 unsigned retry; 159 160 msg[0] = address; 161 msg[1] = address >> 8; 162 msg[2] = AUX_NATIVE_READ << 4; 163 msg[3] = (msg_bytes << 4) | (recv_bytes - 1); 164 165 for (retry = 0; retry < 4; retry++) { 166 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 167 msg, msg_bytes, recv, recv_bytes, delay, &ack); 168 if (ret == -EBUSY) 169 continue; 170 else if (ret < 0) 171 return ret; 172 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 173 return ret; 174 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 175 DRM_UDELAY(400); 176 else if (ret == 0) 177 return -EPROTO; 178 else 179 return -EIO; 180 } 181 182 return -EIO; 183 } 184 185 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector, 186 u16 reg, u8 val) 187 { 188 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0); 189 } 190 191 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector, 192 u16 reg) 193 { 194 u8 val = 0; 195 196 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0); 197 198 return val; 199 } 200 201 int radeon_dp_i2c_aux_ch(device_t dev, int mode, u8 write_byte, u8 *read_byte) 202 { 203 struct iic_dp_aux_data *algo_data = device_get_softc(dev); 204 struct radeon_i2c_chan *auxch = algo_data->priv; 205 u16 address = algo_data->address; 206 u8 msg[5]; 207 u8 reply[2]; 208 unsigned retry; 209 int msg_bytes; 210 int reply_bytes = 1; 211 int ret; 212 u8 ack; 213 214 /* Set up the command byte */ 215 if (mode & MODE_I2C_READ) 216 msg[2] = AUX_I2C_READ << 4; 217 else 218 msg[2] = AUX_I2C_WRITE << 4; 219 220 if (!(mode & MODE_I2C_STOP)) 221 msg[2] |= AUX_I2C_MOT << 4; 222 223 msg[0] = address; 224 msg[1] = address >> 8; 225 226 switch (mode) { 227 case MODE_I2C_WRITE: 228 msg_bytes = 5; 229 msg[3] = msg_bytes << 4; 230 msg[4] = write_byte; 231 break; 232 case MODE_I2C_READ: 233 msg_bytes = 4; 234 msg[3] = msg_bytes << 4; 235 break; 236 default: 237 msg_bytes = 4; 238 msg[3] = 3 << 4; 239 break; 240 } 241 242 for (retry = 0; retry < 4; retry++) { 243 ret = radeon_process_aux_ch(auxch, 244 msg, msg_bytes, reply, reply_bytes, 0, &ack); 245 if (ret == -EBUSY) 246 continue; 247 else if (ret < 0) { 248 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 249 return ret; 250 } 251 252 switch (ack & AUX_NATIVE_REPLY_MASK) { 253 case AUX_NATIVE_REPLY_ACK: 254 /* I2C-over-AUX Reply field is only valid 255 * when paired with AUX ACK. 256 */ 257 break; 258 case AUX_NATIVE_REPLY_NACK: 259 DRM_DEBUG_KMS("aux_ch native nack\n"); 260 return -EREMOTEIO; 261 case AUX_NATIVE_REPLY_DEFER: 262 DRM_DEBUG_KMS("aux_ch native defer\n"); 263 DRM_UDELAY(400); 264 continue; 265 default: 266 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack); 267 return -EREMOTEIO; 268 } 269 270 switch (ack & AUX_I2C_REPLY_MASK) { 271 case AUX_I2C_REPLY_ACK: 272 if (mode == MODE_I2C_READ) 273 *read_byte = reply[0]; 274 return ret; 275 case AUX_I2C_REPLY_NACK: 276 DRM_DEBUG_KMS("aux_i2c nack\n"); 277 return -EREMOTEIO; 278 case AUX_I2C_REPLY_DEFER: 279 DRM_DEBUG_KMS("aux_i2c defer\n"); 280 DRM_UDELAY(400); 281 break; 282 default: 283 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack); 284 return -EREMOTEIO; 285 } 286 } 287 288 DRM_DEBUG_KMS("aux i2c too many retries, giving up\n"); 289 return -EREMOTEIO; 290 } 291 292 /***** general DP utility functions *****/ 293 294 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200 295 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5 296 297 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE], 298 int lane_count, 299 u8 train_set[4]) 300 { 301 u8 v = 0; 302 u8 p = 0; 303 int lane; 304 305 for (lane = 0; lane < lane_count; lane++) { 306 u8 this_v = drm_dp_get_adjust_request_voltage(link_status, lane); 307 u8 this_p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane); 308 309 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n", 310 lane, 311 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 312 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); 313 314 if (this_v > v) 315 v = this_v; 316 if (this_p > p) 317 p = this_p; 318 } 319 320 if (v >= DP_VOLTAGE_MAX) 321 v |= DP_TRAIN_MAX_SWING_REACHED; 322 323 if (p >= DP_PRE_EMPHASIS_MAX) 324 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 325 326 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n", 327 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 328 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); 329 330 for (lane = 0; lane < 4; lane++) 331 train_set[lane] = v | p; 332 } 333 334 /* convert bits per color to bits per pixel */ 335 /* get bpc from the EDID */ 336 static int convert_bpc_to_bpp(int bpc) 337 { 338 if (bpc == 0) 339 return 24; 340 else 341 return bpc * 3; 342 } 343 344 /* get the max pix clock supported by the link rate and lane num */ 345 static int dp_get_max_dp_pix_clock(int link_rate, 346 int lane_num, 347 int bpp) 348 { 349 return (link_rate * lane_num * 8) / bpp; 350 } 351 352 /***** radeon specific DP functions *****/ 353 354 /* First get the min lane# when low rate is used according to pixel clock 355 * (prefer low rate), second check max lane# supported by DP panel, 356 * if the max lane# < low rate lane# then use max lane# instead. 357 */ 358 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector, 359 u8 dpcd[DP_DPCD_SIZE], 360 int pix_clock) 361 { 362 int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector)); 363 int max_link_rate = drm_dp_max_link_rate(dpcd); 364 int max_lane_num = drm_dp_max_lane_count(dpcd); 365 int lane_num; 366 int max_dp_pix_clock; 367 368 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) { 369 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp); 370 if (pix_clock <= max_dp_pix_clock) 371 break; 372 } 373 374 return lane_num; 375 } 376 377 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector, 378 u8 dpcd[DP_DPCD_SIZE], 379 int pix_clock) 380 { 381 int bpp = convert_bpc_to_bpp(radeon_get_monitor_bpc(connector)); 382 int lane_num, max_pix_clock; 383 384 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 385 ENCODER_OBJECT_ID_NUTMEG) 386 return 270000; 387 388 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock); 389 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp); 390 if (pix_clock <= max_pix_clock) 391 return 162000; 392 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp); 393 if (pix_clock <= max_pix_clock) 394 return 270000; 395 if (radeon_connector_is_dp12_capable(connector)) { 396 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp); 397 if (pix_clock <= max_pix_clock) 398 return 540000; 399 } 400 401 return drm_dp_max_link_rate(dpcd); 402 } 403 404 static u8 radeon_dp_encoder_service(struct radeon_device *rdev, 405 int action, int dp_clock, 406 u8 ucconfig, u8 lane_num) 407 { 408 DP_ENCODER_SERVICE_PARAMETERS args; 409 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 410 411 memset(&args, 0, sizeof(args)); 412 args.ucLinkClock = dp_clock / 10; 413 args.ucConfig = ucconfig; 414 args.ucAction = action; 415 args.ucLaneNum = lane_num; 416 args.ucStatus = 0; 417 418 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 419 return args.ucStatus; 420 } 421 422 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector) 423 { 424 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 425 struct drm_device *dev = radeon_connector->base.dev; 426 struct radeon_device *rdev = dev->dev_private; 427 428 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0, 429 dig_connector->dp_i2c_bus->rec.i2c_id, 0); 430 } 431 432 static void radeon_dp_probe_oui(struct radeon_connector *radeon_connector) 433 { 434 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 435 u8 buf[3]; 436 437 if (!(dig_connector->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT)) 438 return; 439 440 if (radeon_dp_aux_native_read(radeon_connector, DP_SINK_OUI, buf, 3, 0)) 441 DRM_DEBUG_KMS("Sink OUI: %02hhx%02hhx%02hhx\n", 442 buf[0], buf[1], buf[2]); 443 444 if (radeon_dp_aux_native_read(radeon_connector, DP_BRANCH_OUI, buf, 3, 0)) 445 DRM_DEBUG_KMS("Branch OUI: %02hhx%02hhx%02hhx\n", 446 buf[0], buf[1], buf[2]); 447 } 448 449 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector) 450 { 451 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 452 u8 msg[DP_DPCD_SIZE]; 453 int ret, i; 454 455 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 456 DP_DPCD_SIZE, 0); 457 if (ret > 0) { 458 memcpy(dig_connector->dpcd, msg, DP_DPCD_SIZE); 459 DRM_DEBUG_KMS("DPCD: "); 460 for (i = 0; i < DP_DPCD_SIZE; i++) 461 DRM_DEBUG_KMS("%02x ", msg[i]); 462 DRM_DEBUG_KMS("\n"); 463 464 radeon_dp_probe_oui(radeon_connector); 465 466 return true; 467 } 468 dig_connector->dpcd[0] = 0; 469 return false; 470 } 471 472 int radeon_dp_get_panel_mode(struct drm_encoder *encoder, 473 struct drm_connector *connector) 474 { 475 struct drm_device *dev = encoder->dev; 476 struct radeon_device *rdev = dev->dev_private; 477 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 478 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 479 u16 dp_bridge = radeon_connector_encoder_get_dp_bridge_encoder_id(connector); 480 u8 tmp; 481 482 if (!ASIC_IS_DCE4(rdev)) 483 return panel_mode; 484 485 if (dp_bridge != ENCODER_OBJECT_ID_NONE) { 486 /* DP bridge chips */ 487 tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP); 488 if (tmp & 1) 489 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; 490 else if ((dp_bridge == ENCODER_OBJECT_ID_NUTMEG) || 491 (dp_bridge == ENCODER_OBJECT_ID_TRAVIS)) 492 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; 493 else 494 panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 495 } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 496 /* eDP */ 497 tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP); 498 if (tmp & 1) 499 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; 500 } 501 502 return panel_mode; 503 } 504 505 void radeon_dp_set_link_config(struct drm_connector *connector, 506 const struct drm_display_mode *mode) 507 { 508 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 509 struct radeon_connector_atom_dig *dig_connector; 510 511 if (!radeon_connector->con_priv) 512 return; 513 dig_connector = radeon_connector->con_priv; 514 515 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 516 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 517 dig_connector->dp_clock = 518 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); 519 dig_connector->dp_lane_count = 520 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock); 521 } 522 } 523 524 int radeon_dp_mode_valid_helper(struct drm_connector *connector, 525 struct drm_display_mode *mode) 526 { 527 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 528 struct radeon_connector_atom_dig *dig_connector; 529 int dp_clock; 530 531 if (!radeon_connector->con_priv) 532 return MODE_CLOCK_HIGH; 533 dig_connector = radeon_connector->con_priv; 534 535 dp_clock = 536 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); 537 538 if ((dp_clock == 540000) && 539 (!radeon_connector_is_dp12_capable(connector))) 540 return MODE_CLOCK_HIGH; 541 542 return MODE_OK; 543 } 544 545 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector, 546 u8 link_status[DP_LINK_STATUS_SIZE]) 547 { 548 int ret; 549 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 550 link_status, DP_LINK_STATUS_SIZE, 100); 551 if (ret <= 0) { 552 return false; 553 } 554 555 DRM_DEBUG_KMS("link status %*ph\n", 6, link_status); 556 return true; 557 } 558 559 bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector) 560 { 561 u8 link_status[DP_LINK_STATUS_SIZE]; 562 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; 563 564 if (!radeon_dp_get_link_status(radeon_connector, link_status)) 565 return false; 566 if (drm_dp_channel_eq_ok(link_status, dig->dp_lane_count)) 567 return false; 568 return true; 569 } 570 571 struct radeon_dp_link_train_info { 572 struct radeon_device *rdev; 573 struct drm_encoder *encoder; 574 struct drm_connector *connector; 575 struct radeon_connector *radeon_connector; 576 int enc_id; 577 int dp_clock; 578 int dp_lane_count; 579 bool tp3_supported; 580 u8 dpcd[DP_RECEIVER_CAP_SIZE]; 581 u8 train_set[4]; 582 u8 link_status[DP_LINK_STATUS_SIZE]; 583 u8 tries; 584 bool use_dpencoder; 585 }; 586 587 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info) 588 { 589 /* set the initial vs/emph on the source */ 590 atombios_dig_transmitter_setup(dp_info->encoder, 591 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH, 592 0, dp_info->train_set[0]); /* sets all lanes at once */ 593 594 /* set the vs/emph on the sink */ 595 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET, 596 dp_info->train_set, dp_info->dp_lane_count, 0); 597 } 598 599 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp) 600 { 601 int rtp = 0; 602 603 /* set training pattern on the source */ 604 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) { 605 switch (tp) { 606 case DP_TRAINING_PATTERN_1: 607 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1; 608 break; 609 case DP_TRAINING_PATTERN_2: 610 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2; 611 break; 612 case DP_TRAINING_PATTERN_3: 613 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3; 614 break; 615 } 616 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0); 617 } else { 618 switch (tp) { 619 case DP_TRAINING_PATTERN_1: 620 rtp = 0; 621 break; 622 case DP_TRAINING_PATTERN_2: 623 rtp = 1; 624 break; 625 } 626 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL, 627 dp_info->dp_clock, dp_info->enc_id, rtp); 628 } 629 630 /* enable training pattern on the sink */ 631 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp); 632 } 633 634 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info) 635 { 636 struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder); 637 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; 638 u8 tmp; 639 640 /* power up the sink */ 641 if (dp_info->dpcd[0] >= 0x11) 642 radeon_write_dpcd_reg(dp_info->radeon_connector, 643 DP_SET_POWER, DP_SET_POWER_D0); 644 645 /* possibly enable downspread on the sink */ 646 if (dp_info->dpcd[3] & 0x1) 647 radeon_write_dpcd_reg(dp_info->radeon_connector, 648 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5); 649 else 650 radeon_write_dpcd_reg(dp_info->radeon_connector, 651 DP_DOWNSPREAD_CTRL, 0); 652 653 if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) && 654 (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) { 655 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1); 656 } 657 658 /* set the lane count on the sink */ 659 tmp = dp_info->dp_lane_count; 660 if (dp_info->dpcd[DP_DPCD_REV] >= 0x11 && 661 dp_info->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP) 662 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 663 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp); 664 665 /* set the link rate on the sink */ 666 tmp = drm_dp_link_rate_to_bw_code(dp_info->dp_clock); 667 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp); 668 669 /* start training on the source */ 670 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) 671 atombios_dig_encoder_setup(dp_info->encoder, 672 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0); 673 else 674 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START, 675 dp_info->dp_clock, dp_info->enc_id, 0); 676 677 /* disable the training pattern on the sink */ 678 radeon_write_dpcd_reg(dp_info->radeon_connector, 679 DP_TRAINING_PATTERN_SET, 680 DP_TRAINING_PATTERN_DISABLE); 681 682 return 0; 683 } 684 685 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info) 686 { 687 DRM_UDELAY(400); 688 689 /* disable the training pattern on the sink */ 690 radeon_write_dpcd_reg(dp_info->radeon_connector, 691 DP_TRAINING_PATTERN_SET, 692 DP_TRAINING_PATTERN_DISABLE); 693 694 /* disable the training pattern on the source */ 695 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) 696 atombios_dig_encoder_setup(dp_info->encoder, 697 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0); 698 else 699 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE, 700 dp_info->dp_clock, dp_info->enc_id, 0); 701 702 return 0; 703 } 704 705 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info) 706 { 707 bool clock_recovery; 708 u8 voltage; 709 int i; 710 711 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1); 712 memset(dp_info->train_set, 0, 4); 713 radeon_dp_update_vs_emph(dp_info); 714 715 DRM_UDELAY(400); 716 717 /* clock recovery loop */ 718 clock_recovery = false; 719 dp_info->tries = 0; 720 voltage = 0xff; 721 while (1) { 722 drm_dp_link_train_clock_recovery_delay(dp_info->dpcd); 723 724 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) { 725 DRM_ERROR("displayport link status failed\n"); 726 break; 727 } 728 729 if (drm_dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) { 730 clock_recovery = true; 731 break; 732 } 733 734 for (i = 0; i < dp_info->dp_lane_count; i++) { 735 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 736 break; 737 } 738 if (i == dp_info->dp_lane_count) { 739 DRM_ERROR("clock recovery reached max voltage\n"); 740 break; 741 } 742 743 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 744 ++dp_info->tries; 745 if (dp_info->tries == 5) { 746 DRM_ERROR("clock recovery tried 5 times\n"); 747 break; 748 } 749 } else 750 dp_info->tries = 0; 751 752 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 753 754 /* Compute new train_set as requested by sink */ 755 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); 756 757 radeon_dp_update_vs_emph(dp_info); 758 } 759 if (!clock_recovery) { 760 DRM_ERROR("clock recovery failed\n"); 761 return -1; 762 } else { 763 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n", 764 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 765 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >> 766 DP_TRAIN_PRE_EMPHASIS_SHIFT); 767 return 0; 768 } 769 } 770 771 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info) 772 { 773 bool channel_eq; 774 775 if (dp_info->tp3_supported) 776 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3); 777 else 778 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2); 779 780 /* channel equalization loop */ 781 dp_info->tries = 0; 782 channel_eq = false; 783 while (1) { 784 drm_dp_link_train_channel_eq_delay(dp_info->dpcd); 785 786 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) { 787 DRM_ERROR("displayport link status failed\n"); 788 break; 789 } 790 791 if (drm_dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) { 792 channel_eq = true; 793 break; 794 } 795 796 /* Try 5 times */ 797 if (dp_info->tries > 5) { 798 DRM_ERROR("channel eq failed: 5 tries\n"); 799 break; 800 } 801 802 /* Compute new train_set as requested by sink */ 803 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); 804 805 radeon_dp_update_vs_emph(dp_info); 806 dp_info->tries++; 807 } 808 809 if (!channel_eq) { 810 DRM_ERROR("channel eq failed\n"); 811 return -1; 812 } else { 813 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n", 814 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 815 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) 816 >> DP_TRAIN_PRE_EMPHASIS_SHIFT); 817 return 0; 818 } 819 } 820 821 void radeon_dp_link_train(struct drm_encoder *encoder, 822 struct drm_connector *connector) 823 { 824 struct drm_device *dev = encoder->dev; 825 struct radeon_device *rdev = dev->dev_private; 826 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 827 struct radeon_encoder_atom_dig *dig; 828 struct radeon_connector *radeon_connector; 829 struct radeon_connector_atom_dig *dig_connector; 830 struct radeon_dp_link_train_info dp_info; 831 int index; 832 u8 tmp, frev, crev; 833 834 if (!radeon_encoder->enc_priv) 835 return; 836 dig = radeon_encoder->enc_priv; 837 838 radeon_connector = to_radeon_connector(connector); 839 if (!radeon_connector->con_priv) 840 return; 841 dig_connector = radeon_connector->con_priv; 842 843 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) && 844 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP)) 845 return; 846 847 /* DPEncoderService newer than 1.1 can't program properly the 848 * training pattern. When facing such version use the 849 * DIGXEncoderControl (X== 1 | 2) 850 */ 851 dp_info.use_dpencoder = true; 852 index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 853 if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) { 854 if (crev > 1) { 855 dp_info.use_dpencoder = false; 856 } 857 } 858 859 dp_info.enc_id = 0; 860 if (dig->dig_encoder) 861 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; 862 else 863 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER; 864 if (dig->linkb) 865 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B; 866 else 867 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A; 868 869 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT); 870 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED)) 871 dp_info.tp3_supported = true; 872 else 873 dp_info.tp3_supported = false; 874 875 memcpy(dp_info.dpcd, dig_connector->dpcd, DP_RECEIVER_CAP_SIZE); 876 dp_info.rdev = rdev; 877 dp_info.encoder = encoder; 878 dp_info.connector = connector; 879 dp_info.radeon_connector = radeon_connector; 880 dp_info.dp_lane_count = dig_connector->dp_lane_count; 881 dp_info.dp_clock = dig_connector->dp_clock; 882 883 if (radeon_dp_link_train_init(&dp_info)) 884 goto done; 885 if (radeon_dp_link_train_cr(&dp_info)) 886 goto done; 887 if (radeon_dp_link_train_ce(&dp_info)) 888 goto done; 889 done: 890 if (radeon_dp_link_train_finish(&dp_info)) 891 return; 892 } 893