1 /* 2 * Copyright © 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 * $FreeBSD: src/sys/dev/drm2/i915/intel_dp.c,v 1.1 2012/05/22 11:07:44 kib Exp $ 27 */ 28 29 #include <drm/drmP.h> 30 #include <drm/drm_crtc.h> 31 #include <drm/drm_crtc_helper.h> 32 #include <drm/drm_edid.h> 33 #include "intel_drv.h" 34 #include <drm/i915_drm.h> 35 #include "i915_drv.h" 36 37 #define DP_RECEIVER_CAP_SIZE 0xf 38 #define DP_LINK_STATUS_SIZE 6 39 #define DP_LINK_CHECK_TIMEOUT (10 * 1000) 40 41 #define DP_LINK_CONFIGURATION_SIZE 9 42 43 struct intel_dp { 44 struct intel_encoder base; 45 uint32_t output_reg; 46 uint32_t DP; 47 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 48 bool has_audio; 49 enum hdmi_force_audio force_audio; 50 uint32_t color_range; 51 int dpms_mode; 52 uint8_t link_bw; 53 uint8_t lane_count; 54 uint8_t dpcd[DP_RECEIVER_CAP_SIZE]; 55 device_t dp_iic_bus; 56 device_t adapter; 57 bool is_pch_edp; 58 uint8_t train_set[4]; 59 int panel_power_up_delay; 60 int panel_power_down_delay; 61 int panel_power_cycle_delay; 62 int backlight_on_delay; 63 int backlight_off_delay; 64 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 65 struct timeout_task panel_vdd_task; 66 bool want_panel_vdd; 67 }; 68 69 /** 70 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 71 * @intel_dp: DP struct 72 * 73 * If a CPU or PCH DP output is attached to an eDP panel, this function 74 * will return true, and false otherwise. 75 */ 76 static bool is_edp(struct intel_dp *intel_dp) 77 { 78 return intel_dp->base.type == INTEL_OUTPUT_EDP; 79 } 80 81 /** 82 * is_pch_edp - is the port on the PCH and attached to an eDP panel? 83 * @intel_dp: DP struct 84 * 85 * Returns true if the given DP struct corresponds to a PCH DP port attached 86 * to an eDP panel, false otherwise. Helpful for determining whether we 87 * may need FDI resources for a given DP output or not. 88 */ 89 static bool is_pch_edp(struct intel_dp *intel_dp) 90 { 91 return intel_dp->is_pch_edp; 92 } 93 94 /** 95 * is_cpu_edp - is the port on the CPU and attached to an eDP panel? 96 * @intel_dp: DP struct 97 * 98 * Returns true if the given DP struct corresponds to a CPU eDP port. 99 */ 100 static bool is_cpu_edp(struct intel_dp *intel_dp) 101 { 102 return is_edp(intel_dp) && !is_pch_edp(intel_dp); 103 } 104 105 static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder) 106 { 107 return container_of(encoder, struct intel_dp, base.base); 108 } 109 110 static struct intel_dp *intel_attached_dp(struct drm_connector *connector) 111 { 112 return container_of(intel_attached_encoder(connector), 113 struct intel_dp, base); 114 } 115 116 /** 117 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP? 118 * @encoder: DRM encoder 119 * 120 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed 121 * by intel_display.c. 122 */ 123 bool intel_encoder_is_pch_edp(struct drm_encoder *encoder) 124 { 125 struct intel_dp *intel_dp; 126 127 if (!encoder) 128 return false; 129 130 intel_dp = enc_to_intel_dp(encoder); 131 132 return is_pch_edp(intel_dp); 133 } 134 135 static void intel_dp_start_link_train(struct intel_dp *intel_dp); 136 static void intel_dp_complete_link_train(struct intel_dp *intel_dp); 137 static void intel_dp_link_down(struct intel_dp *intel_dp); 138 139 void 140 intel_edp_link_config(struct intel_encoder *intel_encoder, 141 int *lane_num, int *link_bw) 142 { 143 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 144 145 *lane_num = intel_dp->lane_count; 146 if (intel_dp->link_bw == DP_LINK_BW_1_62) 147 *link_bw = 162000; 148 else if (intel_dp->link_bw == DP_LINK_BW_2_7) 149 *link_bw = 270000; 150 } 151 152 static int 153 intel_dp_max_lane_count(struct intel_dp *intel_dp) 154 { 155 int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 156 switch (max_lane_count) { 157 case 1: case 2: case 4: 158 break; 159 default: 160 max_lane_count = 4; 161 } 162 return max_lane_count; 163 } 164 165 static int 166 intel_dp_max_link_bw(struct intel_dp *intel_dp) 167 { 168 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 169 170 switch (max_link_bw) { 171 case DP_LINK_BW_1_62: 172 case DP_LINK_BW_2_7: 173 break; 174 default: 175 max_link_bw = DP_LINK_BW_1_62; 176 break; 177 } 178 return max_link_bw; 179 } 180 181 static int 182 intel_dp_link_clock(uint8_t link_bw) 183 { 184 if (link_bw == DP_LINK_BW_2_7) 185 return 270000; 186 else 187 return 162000; 188 } 189 190 /* 191 * The units on the numbers in the next two are... bizarre. Examples will 192 * make it clearer; this one parallels an example in the eDP spec. 193 * 194 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as: 195 * 196 * 270000 * 1 * 8 / 10 == 216000 197 * 198 * The actual data capacity of that configuration is 2.16Gbit/s, so the 199 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz - 200 * or equivalently, kilopixels per second - so for 1680x1050R it'd be 201 * 119000. At 18bpp that's 2142000 kilobits per second. 202 * 203 * Thus the strange-looking division by 10 in intel_dp_link_required, to 204 * get the result in decakilobits instead of kilobits. 205 */ 206 207 static int 208 intel_dp_link_required(int pixel_clock, int bpp) 209 { 210 return (pixel_clock * bpp + 9) / 10; 211 } 212 213 static int 214 intel_dp_max_data_rate(int max_link_clock, int max_lanes) 215 { 216 return (max_link_clock * max_lanes * 8) / 10; 217 } 218 219 static bool 220 intel_dp_adjust_dithering(struct intel_dp *intel_dp, 221 const struct drm_display_mode *mode, 222 struct drm_display_mode *adjusted_mode) 223 { 224 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); 225 int max_lanes = intel_dp_max_lane_count(intel_dp); 226 int max_rate, mode_rate; 227 228 mode_rate = intel_dp_link_required(mode->clock, 24); 229 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes); 230 231 if (mode_rate > max_rate) { 232 mode_rate = intel_dp_link_required(mode->clock, 18); 233 if (mode_rate > max_rate) 234 return false; 235 236 if (adjusted_mode) 237 adjusted_mode->private_flags 238 |= INTEL_MODE_DP_FORCE_6BPC; 239 240 return true; 241 } 242 243 return true; 244 } 245 246 static int 247 intel_dp_mode_valid(struct drm_connector *connector, 248 struct drm_display_mode *mode) 249 { 250 struct intel_dp *intel_dp = intel_attached_dp(connector); 251 252 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 253 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 254 return MODE_PANEL; 255 256 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 257 return MODE_PANEL; 258 } 259 260 if (!intel_dp_adjust_dithering(intel_dp, mode, NULL)) 261 return MODE_CLOCK_HIGH; 262 263 if (mode->clock < 10000) 264 return MODE_CLOCK_LOW; 265 266 return MODE_OK; 267 } 268 269 static uint32_t 270 pack_aux(uint8_t *src, int src_bytes) 271 { 272 int i; 273 uint32_t v = 0; 274 275 if (src_bytes > 4) 276 src_bytes = 4; 277 for (i = 0; i < src_bytes; i++) 278 v |= ((uint32_t) src[i]) << ((3-i) * 8); 279 return v; 280 } 281 282 static void 283 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 284 { 285 int i; 286 if (dst_bytes > 4) 287 dst_bytes = 4; 288 for (i = 0; i < dst_bytes; i++) 289 dst[i] = src >> ((3-i) * 8); 290 } 291 292 /* hrawclock is 1/4 the FSB frequency */ 293 static int 294 intel_hrawclk(struct drm_device *dev) 295 { 296 struct drm_i915_private *dev_priv = dev->dev_private; 297 uint32_t clkcfg; 298 299 clkcfg = I915_READ(CLKCFG); 300 switch (clkcfg & CLKCFG_FSB_MASK) { 301 case CLKCFG_FSB_400: 302 return 100; 303 case CLKCFG_FSB_533: 304 return 133; 305 case CLKCFG_FSB_667: 306 return 166; 307 case CLKCFG_FSB_800: 308 return 200; 309 case CLKCFG_FSB_1067: 310 return 266; 311 case CLKCFG_FSB_1333: 312 return 333; 313 /* these two are just a guess; one of them might be right */ 314 case CLKCFG_FSB_1600: 315 case CLKCFG_FSB_1600_ALT: 316 return 400; 317 default: 318 return 133; 319 } 320 } 321 322 static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) 323 { 324 struct drm_device *dev = intel_dp->base.base.dev; 325 struct drm_i915_private *dev_priv = dev->dev_private; 326 327 return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; 328 } 329 330 static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) 331 { 332 struct drm_device *dev = intel_dp->base.base.dev; 333 struct drm_i915_private *dev_priv = dev->dev_private; 334 335 return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; 336 } 337 338 static void 339 intel_dp_check_edp(struct intel_dp *intel_dp) 340 { 341 struct drm_device *dev = intel_dp->base.base.dev; 342 struct drm_i915_private *dev_priv = dev->dev_private; 343 344 if (!is_edp(intel_dp)) 345 return; 346 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) { 347 kprintf("eDP powered off while attempting aux channel communication.\n"); 348 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n", 349 I915_READ(PCH_PP_STATUS), 350 I915_READ(PCH_PP_CONTROL)); 351 } 352 } 353 354 static int 355 intel_dp_aux_ch(struct intel_dp *intel_dp, 356 uint8_t *send, int send_bytes, 357 uint8_t *recv, int recv_size) 358 { 359 uint32_t output_reg = intel_dp->output_reg; 360 struct drm_device *dev = intel_dp->base.base.dev; 361 struct drm_i915_private *dev_priv = dev->dev_private; 362 uint32_t ch_ctl = output_reg + 0x10; 363 uint32_t ch_data = ch_ctl + 4; 364 int i; 365 int recv_bytes; 366 uint32_t status; 367 uint32_t aux_clock_divider; 368 int try, precharge = 5; 369 370 intel_dp_check_edp(intel_dp); 371 /* The clock divider is based off the hrawclk, 372 * and would like to run at 2MHz. So, take the 373 * hrawclk value and divide by 2 and use that 374 * 375 * Note that PCH attached eDP panels should use a 125MHz input 376 * clock divider. 377 */ 378 if (is_cpu_edp(intel_dp)) { 379 if (IS_GEN6(dev) || IS_GEN7(dev)) 380 aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */ 381 else 382 aux_clock_divider = 225; /* eDP input clock at 450Mhz */ 383 } else if (HAS_PCH_SPLIT(dev)) 384 aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */ 385 else 386 aux_clock_divider = intel_hrawclk(dev) / 2; 387 388 /* Try to wait for any previous AUX channel activity */ 389 for (try = 0; try < 3; try++) { 390 status = I915_READ(ch_ctl); 391 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 392 break; 393 DELAY(1000); 394 } 395 396 if (try == 3) { 397 kprintf("dp_aux_ch not started status 0x%08x\n", 398 I915_READ(ch_ctl)); 399 return -EBUSY; 400 } 401 402 /* Must try at least 3 times according to DP spec */ 403 for (try = 0; try < 5; try++) { 404 /* Load the send data into the aux channel data registers */ 405 for (i = 0; i < send_bytes; i += 4) 406 I915_WRITE(ch_data + i, 407 pack_aux(send + i, send_bytes - i)); 408 409 /* Send the command and wait for it to complete */ 410 I915_WRITE(ch_ctl, 411 DP_AUX_CH_CTL_SEND_BUSY | 412 DP_AUX_CH_CTL_TIME_OUT_400us | 413 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 414 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 415 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 416 DP_AUX_CH_CTL_DONE | 417 DP_AUX_CH_CTL_TIME_OUT_ERROR | 418 DP_AUX_CH_CTL_RECEIVE_ERROR); 419 for (;;) { 420 status = I915_READ(ch_ctl); 421 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 422 break; 423 DELAY(100); 424 } 425 426 /* Clear done status and any errors */ 427 I915_WRITE(ch_ctl, 428 status | 429 DP_AUX_CH_CTL_DONE | 430 DP_AUX_CH_CTL_TIME_OUT_ERROR | 431 DP_AUX_CH_CTL_RECEIVE_ERROR); 432 433 if (status & (DP_AUX_CH_CTL_TIME_OUT_ERROR | 434 DP_AUX_CH_CTL_RECEIVE_ERROR)) 435 continue; 436 if (status & DP_AUX_CH_CTL_DONE) 437 break; 438 } 439 440 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 441 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 442 return -EBUSY; 443 } 444 445 /* Check for timeout or receive error. 446 * Timeouts occur when the sink is not connected 447 */ 448 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 449 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 450 return -EIO; 451 } 452 453 /* Timeouts occur when the device isn't connected, so they're 454 * "normal" -- don't fill the kernel log with these */ 455 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 456 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 457 return -ETIMEDOUT; 458 } 459 460 /* Unload any bytes sent back from the other side */ 461 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 462 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 463 if (recv_bytes > recv_size) 464 recv_bytes = recv_size; 465 466 for (i = 0; i < recv_bytes; i += 4) 467 unpack_aux(I915_READ(ch_data + i), 468 recv + i, recv_bytes - i); 469 470 return recv_bytes; 471 } 472 473 /* Write data to the aux channel in native mode */ 474 static int 475 intel_dp_aux_native_write(struct intel_dp *intel_dp, 476 uint16_t address, uint8_t *send, int send_bytes) 477 { 478 int ret; 479 uint8_t msg[20]; 480 int msg_bytes; 481 uint8_t ack; 482 483 intel_dp_check_edp(intel_dp); 484 if (send_bytes > 16) 485 return -1; 486 msg[0] = AUX_NATIVE_WRITE << 4; 487 msg[1] = address >> 8; 488 msg[2] = address & 0xff; 489 msg[3] = send_bytes - 1; 490 memcpy(&msg[4], send, send_bytes); 491 msg_bytes = send_bytes + 4; 492 for (;;) { 493 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1); 494 if (ret < 0) 495 return ret; 496 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 497 break; 498 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 499 DELAY(100); 500 else 501 return -EIO; 502 } 503 return send_bytes; 504 } 505 506 /* Write a single byte to the aux channel in native mode */ 507 static int 508 intel_dp_aux_native_write_1(struct intel_dp *intel_dp, 509 uint16_t address, uint8_t byte) 510 { 511 return intel_dp_aux_native_write(intel_dp, address, &byte, 1); 512 } 513 514 /* read bytes from a native aux channel */ 515 static int 516 intel_dp_aux_native_read(struct intel_dp *intel_dp, 517 uint16_t address, uint8_t *recv, int recv_bytes) 518 { 519 uint8_t msg[4]; 520 int msg_bytes; 521 uint8_t reply[20]; 522 int reply_bytes; 523 uint8_t ack; 524 int ret; 525 526 intel_dp_check_edp(intel_dp); 527 msg[0] = AUX_NATIVE_READ << 4; 528 msg[1] = address >> 8; 529 msg[2] = address & 0xff; 530 msg[3] = recv_bytes - 1; 531 532 msg_bytes = 4; 533 reply_bytes = recv_bytes + 1; 534 535 for (;;) { 536 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, 537 reply, reply_bytes); 538 if (ret == 0) 539 return -EPROTO; 540 if (ret < 0) 541 return ret; 542 ack = reply[0]; 543 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) { 544 memcpy(recv, reply + 1, ret - 1); 545 return ret - 1; 546 } 547 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 548 DELAY(100); 549 else 550 return -EIO; 551 } 552 } 553 554 static int 555 intel_dp_i2c_aux_ch(device_t idev, int mode, uint8_t write_byte, 556 uint8_t *read_byte) 557 { 558 struct iic_dp_aux_data *data; 559 struct intel_dp *intel_dp; 560 uint16_t address; 561 uint8_t msg[5]; 562 uint8_t reply[2]; 563 unsigned retry; 564 int msg_bytes; 565 int reply_bytes; 566 int ret; 567 568 data = device_get_softc(idev); 569 intel_dp = data->priv; 570 address = data->address; 571 572 intel_dp_check_edp(intel_dp); 573 /* Set up the command byte */ 574 if (mode & MODE_I2C_READ) 575 msg[0] = AUX_I2C_READ << 4; 576 else 577 msg[0] = AUX_I2C_WRITE << 4; 578 579 if (!(mode & MODE_I2C_STOP)) 580 msg[0] |= AUX_I2C_MOT << 4; 581 582 msg[1] = address >> 8; 583 msg[2] = address; 584 585 switch (mode) { 586 case MODE_I2C_WRITE: 587 msg[3] = 0; 588 msg[4] = write_byte; 589 msg_bytes = 5; 590 reply_bytes = 1; 591 break; 592 case MODE_I2C_READ: 593 msg[3] = 0; 594 msg_bytes = 4; 595 reply_bytes = 2; 596 break; 597 default: 598 msg_bytes = 3; 599 reply_bytes = 1; 600 break; 601 } 602 603 for (retry = 0; retry < 5; retry++) { 604 ret = intel_dp_aux_ch(intel_dp, 605 msg, msg_bytes, 606 reply, reply_bytes); 607 if (ret < 0) { 608 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 609 return (-ret); 610 } 611 612 switch (reply[0] & AUX_NATIVE_REPLY_MASK) { 613 case AUX_NATIVE_REPLY_ACK: 614 /* I2C-over-AUX Reply field is only valid 615 * when paired with AUX ACK. 616 */ 617 break; 618 case AUX_NATIVE_REPLY_NACK: 619 DRM_DEBUG_KMS("aux_ch native nack\n"); 620 return (EREMOTEIO); 621 case AUX_NATIVE_REPLY_DEFER: 622 DELAY(100); 623 continue; 624 default: 625 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 626 reply[0]); 627 return (EREMOTEIO); 628 } 629 630 switch (reply[0] & AUX_I2C_REPLY_MASK) { 631 case AUX_I2C_REPLY_ACK: 632 if (mode == MODE_I2C_READ) { 633 *read_byte = reply[1]; 634 } 635 return (0/*reply_bytes - 1*/); 636 case AUX_I2C_REPLY_NACK: 637 DRM_DEBUG_KMS("aux_i2c nack\n"); 638 return (EREMOTEIO); 639 case AUX_I2C_REPLY_DEFER: 640 DRM_DEBUG_KMS("aux_i2c defer\n"); 641 DELAY(100); 642 break; 643 default: 644 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 645 return (EREMOTEIO); 646 } 647 } 648 649 DRM_ERROR("too many retries, giving up\n"); 650 return (EREMOTEIO); 651 } 652 653 static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp); 654 static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync); 655 656 static int 657 intel_dp_i2c_init(struct intel_dp *intel_dp, 658 struct intel_connector *intel_connector, const char *name) 659 { 660 int ret; 661 662 DRM_DEBUG_KMS("i2c_init %s\n", name); 663 664 ironlake_edp_panel_vdd_on(intel_dp); 665 ret = iic_dp_aux_add_bus(intel_connector->base.dev->device, name, 666 intel_dp_i2c_aux_ch, intel_dp, &intel_dp->dp_iic_bus, 667 &intel_dp->adapter); 668 ironlake_edp_panel_vdd_off(intel_dp, false); 669 return (ret); 670 } 671 672 static bool 673 intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode, 674 struct drm_display_mode *adjusted_mode) 675 { 676 struct drm_device *dev = encoder->dev; 677 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 678 int lane_count, clock; 679 int max_lane_count = intel_dp_max_lane_count(intel_dp); 680 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; 681 int bpp; 682 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 683 684 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 685 intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 686 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN, 687 mode, adjusted_mode); 688 } 689 690 if (!intel_dp_adjust_dithering(intel_dp, adjusted_mode, adjusted_mode)) 691 return false; 692 693 bpp = adjusted_mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24; 694 695 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 696 for (clock = 0; clock <= max_clock; clock++) { 697 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 698 699 if (intel_dp_link_required(adjusted_mode->clock, bpp) 700 <= link_avail) { 701 intel_dp->link_bw = bws[clock]; 702 intel_dp->lane_count = lane_count; 703 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw); 704 DRM_DEBUG_KMS("Display port link bw %02x lane " 705 "count %d clock %d\n", 706 intel_dp->link_bw, intel_dp->lane_count, 707 adjusted_mode->clock); 708 return true; 709 } 710 } 711 } 712 713 return false; 714 } 715 716 struct intel_dp_m_n { 717 uint32_t tu; 718 uint32_t gmch_m; 719 uint32_t gmch_n; 720 uint32_t link_m; 721 uint32_t link_n; 722 }; 723 724 static void 725 intel_reduce_ratio(uint32_t *num, uint32_t *den) 726 { 727 while (*num > 0xffffff || *den > 0xffffff) { 728 *num >>= 1; 729 *den >>= 1; 730 } 731 } 732 733 static void 734 intel_dp_compute_m_n(int bpp, 735 int nlanes, 736 int pixel_clock, 737 int link_clock, 738 struct intel_dp_m_n *m_n) 739 { 740 m_n->tu = 64; 741 m_n->gmch_m = (pixel_clock * bpp) >> 3; 742 m_n->gmch_n = link_clock * nlanes; 743 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 744 m_n->link_m = pixel_clock; 745 m_n->link_n = link_clock; 746 intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 747 } 748 749 void 750 intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 751 struct drm_display_mode *adjusted_mode) 752 { 753 struct drm_device *dev = crtc->dev; 754 struct drm_mode_config *mode_config = &dev->mode_config; 755 struct drm_encoder *encoder; 756 struct drm_i915_private *dev_priv = dev->dev_private; 757 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 758 int lane_count = 4; 759 struct intel_dp_m_n m_n; 760 int pipe = intel_crtc->pipe; 761 762 /* 763 * Find the lane count in the intel_encoder private 764 */ 765 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 766 struct intel_dp *intel_dp; 767 768 if (encoder->crtc != crtc) 769 continue; 770 771 intel_dp = enc_to_intel_dp(encoder); 772 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 773 intel_dp->base.type == INTEL_OUTPUT_EDP) 774 { 775 lane_count = intel_dp->lane_count; 776 break; 777 } 778 } 779 780 /* 781 * Compute the GMCH and Link ratios. The '3' here is 782 * the number of bytes_per_pixel post-LUT, which we always 783 * set up for 8-bits of R/G/B, or 3 bytes total. 784 */ 785 intel_dp_compute_m_n(intel_crtc->bpp, lane_count, 786 mode->clock, adjusted_mode->clock, &m_n); 787 788 if (HAS_PCH_SPLIT(dev)) { 789 I915_WRITE(TRANSDATA_M1(pipe), 790 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 791 m_n.gmch_m); 792 I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); 793 I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); 794 I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); 795 } else { 796 I915_WRITE(PIPE_GMCH_DATA_M(pipe), 797 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 798 m_n.gmch_m); 799 I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 800 I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 801 I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 802 } 803 } 804 805 static void ironlake_edp_pll_on(struct drm_encoder *encoder); 806 static void ironlake_edp_pll_off(struct drm_encoder *encoder); 807 808 static void 809 intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 810 struct drm_display_mode *adjusted_mode) 811 { 812 struct drm_device *dev = encoder->dev; 813 struct drm_i915_private *dev_priv = dev->dev_private; 814 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 815 struct drm_crtc *crtc = intel_dp->base.base.crtc; 816 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 817 818 /* Turn on the eDP PLL if needed */ 819 if (is_edp(intel_dp)) { 820 if (!is_pch_edp(intel_dp)) 821 ironlake_edp_pll_on(encoder); 822 else 823 ironlake_edp_pll_off(encoder); 824 } 825 826 /* 827 * There are four kinds of DP registers: 828 * 829 * IBX PCH 830 * SNB CPU 831 * IVB CPU 832 * CPT PCH 833 * 834 * IBX PCH and CPU are the same for almost everything, 835 * except that the CPU DP PLL is configured in this 836 * register 837 * 838 * CPT PCH is quite different, having many bits moved 839 * to the TRANS_DP_CTL register instead. That 840 * configuration happens (oddly) in ironlake_pch_enable 841 */ 842 843 /* Preserve the BIOS-computed detected bit. This is 844 * supposed to be read-only. 845 */ 846 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED; 847 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 848 849 /* Handle DP bits in common between all three register formats */ 850 851 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 852 853 switch (intel_dp->lane_count) { 854 case 1: 855 intel_dp->DP |= DP_PORT_WIDTH_1; 856 break; 857 case 2: 858 intel_dp->DP |= DP_PORT_WIDTH_2; 859 break; 860 case 4: 861 intel_dp->DP |= DP_PORT_WIDTH_4; 862 break; 863 } 864 if (intel_dp->has_audio) { 865 DRM_DEBUG_KMS("Enabling DP audio on pipe %c\n", 866 pipe_name(intel_crtc->pipe)); 867 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 868 intel_write_eld(encoder, adjusted_mode); 869 } 870 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 871 intel_dp->link_configuration[0] = intel_dp->link_bw; 872 intel_dp->link_configuration[1] = intel_dp->lane_count; 873 /* 874 * Check for DPCD version > 1.1 and enhanced framing support 875 */ 876 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 877 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 878 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 879 } 880 881 /* Split out the IBX/CPU vs CPT settings */ 882 883 if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) { 884 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 885 intel_dp->DP |= DP_SYNC_HS_HIGH; 886 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 887 intel_dp->DP |= DP_SYNC_VS_HIGH; 888 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 889 890 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 891 intel_dp->DP |= DP_ENHANCED_FRAMING; 892 893 intel_dp->DP |= intel_crtc->pipe << 29; 894 895 /* don't miss out required setting for eDP */ 896 intel_dp->DP |= DP_PLL_ENABLE; 897 if (adjusted_mode->clock < 200000) 898 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 899 else 900 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 901 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) { 902 intel_dp->DP |= intel_dp->color_range; 903 904 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 905 intel_dp->DP |= DP_SYNC_HS_HIGH; 906 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 907 intel_dp->DP |= DP_SYNC_VS_HIGH; 908 intel_dp->DP |= DP_LINK_TRAIN_OFF; 909 910 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 911 intel_dp->DP |= DP_ENHANCED_FRAMING; 912 913 if (intel_crtc->pipe == 1) 914 intel_dp->DP |= DP_PIPEB_SELECT; 915 916 if (is_cpu_edp(intel_dp)) { 917 /* don't miss out required setting for eDP */ 918 intel_dp->DP |= DP_PLL_ENABLE; 919 if (adjusted_mode->clock < 200000) 920 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 921 else 922 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 923 } 924 } else { 925 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 926 } 927 } 928 929 #define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 930 #define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE) 931 932 #define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 933 #define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 934 935 #define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK) 936 #define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 937 938 static void ironlake_wait_panel_status(struct intel_dp *intel_dp, 939 u32 mask, 940 u32 value) 941 { 942 struct drm_device *dev = intel_dp->base.base.dev; 943 struct drm_i915_private *dev_priv = dev->dev_private; 944 945 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", 946 mask, value, 947 I915_READ(PCH_PP_STATUS), 948 I915_READ(PCH_PP_CONTROL)); 949 950 if (_intel_wait_for(dev, 951 (I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10, "915iwp")) { 952 DRM_ERROR("Panel status timeout: status %08x control %08x\n", 953 I915_READ(PCH_PP_STATUS), 954 I915_READ(PCH_PP_CONTROL)); 955 } 956 } 957 958 static void ironlake_wait_panel_on(struct intel_dp *intel_dp) 959 { 960 DRM_DEBUG_KMS("Wait for panel power on\n"); 961 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE); 962 } 963 964 static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 965 { 966 DRM_DEBUG_KMS("Wait for panel power off time\n"); 967 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE); 968 } 969 970 static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp) 971 { 972 DRM_DEBUG_KMS("Wait for panel power cycle\n"); 973 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE); 974 } 975 976 977 /* Read the current pp_control value, unlocking the register if it 978 * is locked 979 */ 980 981 static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv) 982 { 983 u32 control = I915_READ(PCH_PP_CONTROL); 984 985 control &= ~PANEL_UNLOCK_MASK; 986 control |= PANEL_UNLOCK_REGS; 987 return control; 988 } 989 990 static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) 991 { 992 struct drm_device *dev = intel_dp->base.base.dev; 993 struct drm_i915_private *dev_priv = dev->dev_private; 994 u32 pp; 995 996 if (!is_edp(intel_dp)) 997 return; 998 DRM_DEBUG_KMS("Turn eDP VDD on\n"); 999 1000 if (intel_dp->want_panel_vdd) 1001 kprintf("eDP VDD already requested on\n"); 1002 1003 intel_dp->want_panel_vdd = true; 1004 1005 if (ironlake_edp_have_panel_vdd(intel_dp)) { 1006 DRM_DEBUG_KMS("eDP VDD already on\n"); 1007 return; 1008 } 1009 1010 if (!ironlake_edp_have_panel_power(intel_dp)) 1011 ironlake_wait_panel_power_cycle(intel_dp); 1012 1013 pp = ironlake_get_pp_control(dev_priv); 1014 pp |= EDP_FORCE_VDD; 1015 I915_WRITE(PCH_PP_CONTROL, pp); 1016 POSTING_READ(PCH_PP_CONTROL); 1017 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1018 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1019 1020 /* 1021 * If the panel wasn't on, delay before accessing aux channel 1022 */ 1023 if (!ironlake_edp_have_panel_power(intel_dp)) { 1024 DRM_DEBUG_KMS("eDP was not running\n"); 1025 DELAY(intel_dp->panel_power_up_delay * 1000); 1026 } 1027 } 1028 1029 static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) 1030 { 1031 struct drm_device *dev = intel_dp->base.base.dev; 1032 struct drm_i915_private *dev_priv = dev->dev_private; 1033 u32 pp; 1034 1035 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) { 1036 pp = ironlake_get_pp_control(dev_priv); 1037 pp &= ~EDP_FORCE_VDD; 1038 I915_WRITE(PCH_PP_CONTROL, pp); 1039 POSTING_READ(PCH_PP_CONTROL); 1040 1041 /* Make sure sequencer is idle before allowing subsequent activity */ 1042 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1043 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1044 1045 DELAY(intel_dp->panel_power_down_delay * 1000); 1046 } 1047 } 1048 1049 static void ironlake_panel_vdd_work(void *arg, int pending __unused) 1050 { 1051 struct intel_dp *intel_dp = arg; 1052 struct drm_device *dev = intel_dp->base.base.dev; 1053 1054 lockmgr(&dev->mode_config.mutex, LK_EXCLUSIVE); 1055 ironlake_panel_vdd_off_sync(intel_dp); 1056 lockmgr(&dev->mode_config.mutex, LK_RELEASE); 1057 } 1058 1059 static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) 1060 { 1061 if (!is_edp(intel_dp)) 1062 return; 1063 1064 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd); 1065 if (!intel_dp->want_panel_vdd) 1066 kprintf("eDP VDD not forced on\n"); 1067 1068 intel_dp->want_panel_vdd = false; 1069 1070 if (sync) { 1071 ironlake_panel_vdd_off_sync(intel_dp); 1072 } else { 1073 /* 1074 * Queue the timer to fire a long 1075 * time from now (relative to the power down delay) 1076 * to keep the panel power up across a sequence of operations 1077 */ 1078 struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private; 1079 taskqueue_enqueue_timeout(dev_priv->tq, 1080 &intel_dp->panel_vdd_task, 1081 msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5)); 1082 } 1083 } 1084 1085 static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 1086 { 1087 struct drm_device *dev = intel_dp->base.base.dev; 1088 struct drm_i915_private *dev_priv = dev->dev_private; 1089 u32 pp; 1090 1091 if (!is_edp(intel_dp)) 1092 return; 1093 1094 DRM_DEBUG_KMS("Turn eDP power on\n"); 1095 1096 if (ironlake_edp_have_panel_power(intel_dp)) { 1097 DRM_DEBUG_KMS("eDP power already on\n"); 1098 return; 1099 } 1100 1101 ironlake_wait_panel_power_cycle(intel_dp); 1102 1103 pp = ironlake_get_pp_control(dev_priv); 1104 if (IS_GEN5(dev)) { 1105 /* ILK workaround: disable reset around power sequence */ 1106 pp &= ~PANEL_POWER_RESET; 1107 I915_WRITE(PCH_PP_CONTROL, pp); 1108 POSTING_READ(PCH_PP_CONTROL); 1109 } 1110 1111 pp |= POWER_TARGET_ON; 1112 if (!IS_GEN5(dev)) 1113 pp |= PANEL_POWER_RESET; 1114 1115 I915_WRITE(PCH_PP_CONTROL, pp); 1116 POSTING_READ(PCH_PP_CONTROL); 1117 1118 ironlake_wait_panel_on(intel_dp); 1119 1120 if (IS_GEN5(dev)) { 1121 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1122 I915_WRITE(PCH_PP_CONTROL, pp); 1123 POSTING_READ(PCH_PP_CONTROL); 1124 } 1125 } 1126 1127 static void ironlake_edp_panel_off(struct intel_dp *intel_dp) 1128 { 1129 struct drm_device *dev = intel_dp->base.base.dev; 1130 struct drm_i915_private *dev_priv = dev->dev_private; 1131 u32 pp; 1132 1133 if (!is_edp(intel_dp)) 1134 return; 1135 1136 DRM_DEBUG_KMS("Turn eDP power off\n"); 1137 1138 if (intel_dp->want_panel_vdd) 1139 kprintf("Cannot turn power off while VDD is on\n"); 1140 1141 pp = ironlake_get_pp_control(dev_priv); 1142 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE); 1143 I915_WRITE(PCH_PP_CONTROL, pp); 1144 POSTING_READ(PCH_PP_CONTROL); 1145 1146 ironlake_wait_panel_off(intel_dp); 1147 } 1148 1149 static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1150 { 1151 struct drm_device *dev = intel_dp->base.base.dev; 1152 struct drm_i915_private *dev_priv = dev->dev_private; 1153 u32 pp; 1154 1155 if (!is_edp(intel_dp)) 1156 return; 1157 1158 DRM_DEBUG_KMS("\n"); 1159 /* 1160 * If we enable the backlight right away following a panel power 1161 * on, we may see slight flicker as the panel syncs with the eDP 1162 * link. So delay a bit to make sure the image is solid before 1163 * allowing it to appear. 1164 */ 1165 DELAY(intel_dp->backlight_on_delay * 1000); 1166 pp = ironlake_get_pp_control(dev_priv); 1167 pp |= EDP_BLC_ENABLE; 1168 I915_WRITE(PCH_PP_CONTROL, pp); 1169 POSTING_READ(PCH_PP_CONTROL); 1170 } 1171 1172 static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) 1173 { 1174 struct drm_device *dev = intel_dp->base.base.dev; 1175 struct drm_i915_private *dev_priv = dev->dev_private; 1176 u32 pp; 1177 1178 if (!is_edp(intel_dp)) 1179 return; 1180 1181 DRM_DEBUG_KMS("\n"); 1182 pp = ironlake_get_pp_control(dev_priv); 1183 pp &= ~EDP_BLC_ENABLE; 1184 I915_WRITE(PCH_PP_CONTROL, pp); 1185 POSTING_READ(PCH_PP_CONTROL); 1186 DELAY(intel_dp->backlight_off_delay * 1000); 1187 } 1188 1189 static void ironlake_edp_pll_on(struct drm_encoder *encoder) 1190 { 1191 struct drm_device *dev = encoder->dev; 1192 struct drm_i915_private *dev_priv = dev->dev_private; 1193 u32 dpa_ctl; 1194 1195 DRM_DEBUG_KMS("\n"); 1196 dpa_ctl = I915_READ(DP_A); 1197 dpa_ctl |= DP_PLL_ENABLE; 1198 I915_WRITE(DP_A, dpa_ctl); 1199 POSTING_READ(DP_A); 1200 DELAY(200); 1201 } 1202 1203 static void ironlake_edp_pll_off(struct drm_encoder *encoder) 1204 { 1205 struct drm_device *dev = encoder->dev; 1206 struct drm_i915_private *dev_priv = dev->dev_private; 1207 u32 dpa_ctl; 1208 1209 dpa_ctl = I915_READ(DP_A); 1210 dpa_ctl &= ~DP_PLL_ENABLE; 1211 I915_WRITE(DP_A, dpa_ctl); 1212 POSTING_READ(DP_A); 1213 DELAY(200); 1214 } 1215 1216 /* If the sink supports it, try to set the power state appropriately */ 1217 static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) 1218 { 1219 int ret, i; 1220 1221 /* Should have a valid DPCD by this point */ 1222 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1223 return; 1224 1225 if (mode != DRM_MODE_DPMS_ON) { 1226 ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER, 1227 DP_SET_POWER_D3); 1228 if (ret != 1) 1229 DRM_DEBUG("failed to write sink power state\n"); 1230 } else { 1231 /* 1232 * When turning on, we need to retry for 1ms to give the sink 1233 * time to wake up. 1234 */ 1235 for (i = 0; i < 3; i++) { 1236 ret = intel_dp_aux_native_write_1(intel_dp, 1237 DP_SET_POWER, 1238 DP_SET_POWER_D0); 1239 if (ret == 1) 1240 break; 1241 DELAY(1000); 1242 } 1243 } 1244 } 1245 1246 static void intel_dp_prepare(struct drm_encoder *encoder) 1247 { 1248 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1249 1250 ironlake_edp_backlight_off(intel_dp); 1251 ironlake_edp_panel_off(intel_dp); 1252 1253 /* Wake up the sink first */ 1254 ironlake_edp_panel_vdd_on(intel_dp); 1255 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1256 intel_dp_link_down(intel_dp); 1257 ironlake_edp_panel_vdd_off(intel_dp, false); 1258 1259 /* Make sure the panel is off before trying to 1260 * change the mode 1261 */ 1262 } 1263 1264 static void intel_dp_commit(struct drm_encoder *encoder) 1265 { 1266 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1267 struct drm_device *dev = encoder->dev; 1268 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1269 1270 ironlake_edp_panel_vdd_on(intel_dp); 1271 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1272 intel_dp_start_link_train(intel_dp); 1273 ironlake_edp_panel_on(intel_dp); 1274 ironlake_edp_panel_vdd_off(intel_dp, true); 1275 intel_dp_complete_link_train(intel_dp); 1276 ironlake_edp_backlight_on(intel_dp); 1277 1278 intel_dp->dpms_mode = DRM_MODE_DPMS_ON; 1279 1280 if (HAS_PCH_CPT(dev)) 1281 intel_cpt_verify_modeset(dev, intel_crtc->pipe); 1282 } 1283 1284 static void 1285 intel_dp_dpms(struct drm_encoder *encoder, int mode) 1286 { 1287 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1288 struct drm_device *dev = encoder->dev; 1289 struct drm_i915_private *dev_priv = dev->dev_private; 1290 uint32_t dp_reg = I915_READ(intel_dp->output_reg); 1291 1292 if (mode != DRM_MODE_DPMS_ON) { 1293 ironlake_edp_backlight_off(intel_dp); 1294 ironlake_edp_panel_off(intel_dp); 1295 1296 ironlake_edp_panel_vdd_on(intel_dp); 1297 intel_dp_sink_dpms(intel_dp, mode); 1298 intel_dp_link_down(intel_dp); 1299 ironlake_edp_panel_vdd_off(intel_dp, false); 1300 1301 if (is_cpu_edp(intel_dp)) 1302 ironlake_edp_pll_off(encoder); 1303 } else { 1304 if (is_cpu_edp(intel_dp)) 1305 ironlake_edp_pll_on(encoder); 1306 1307 ironlake_edp_panel_vdd_on(intel_dp); 1308 intel_dp_sink_dpms(intel_dp, mode); 1309 if (!(dp_reg & DP_PORT_EN)) { 1310 intel_dp_start_link_train(intel_dp); 1311 ironlake_edp_panel_on(intel_dp); 1312 ironlake_edp_panel_vdd_off(intel_dp, true); 1313 intel_dp_complete_link_train(intel_dp); 1314 } else 1315 ironlake_edp_panel_vdd_off(intel_dp, false); 1316 ironlake_edp_backlight_on(intel_dp); 1317 } 1318 intel_dp->dpms_mode = mode; 1319 } 1320 /* 1321 * Native read with retry for link status and receiver capability reads for 1322 * cases where the sink may still be asleep. 1323 */ 1324 static bool 1325 intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address, 1326 uint8_t *recv, int recv_bytes) 1327 { 1328 int ret, i; 1329 1330 /* 1331 * Sinks are *supposed* to come up within 1ms from an off state, 1332 * but we're also supposed to retry 3 times per the spec. 1333 */ 1334 for (i = 0; i < 3; i++) { 1335 ret = intel_dp_aux_native_read(intel_dp, address, recv, 1336 recv_bytes); 1337 if (ret == recv_bytes) 1338 return true; 1339 DELAY(1000); 1340 } 1341 1342 return false; 1343 } 1344 1345 /* 1346 * Fetch AUX CH registers 0x202 - 0x207 which contain 1347 * link status information 1348 */ 1349 static bool 1350 intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1351 { 1352 return intel_dp_aux_native_read_retry(intel_dp, 1353 DP_LANE0_1_STATUS, 1354 link_status, 1355 DP_LINK_STATUS_SIZE); 1356 } 1357 1358 static uint8_t 1359 intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1360 int r) 1361 { 1362 return link_status[r - DP_LANE0_1_STATUS]; 1363 } 1364 1365 static uint8_t 1366 intel_get_adjust_request_voltage(uint8_t adjust_request[2], 1367 int lane) 1368 { 1369 int s = ((lane & 1) ? 1370 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1371 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1372 uint8_t l = adjust_request[lane>>1]; 1373 1374 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1375 } 1376 1377 static uint8_t 1378 intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2], 1379 int lane) 1380 { 1381 int s = ((lane & 1) ? 1382 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1383 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1384 uint8_t l = adjust_request[lane>>1]; 1385 1386 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1387 } 1388 1389 1390 #if 0 1391 static char *voltage_names[] = { 1392 "0.4V", "0.6V", "0.8V", "1.2V" 1393 }; 1394 static char *pre_emph_names[] = { 1395 "0dB", "3.5dB", "6dB", "9.5dB" 1396 }; 1397 static char *link_train_names[] = { 1398 "pattern 1", "pattern 2", "idle", "off" 1399 }; 1400 #endif 1401 1402 /* 1403 * These are source-specific values; current Intel hardware supports 1404 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB 1405 */ 1406 1407 static uint8_t 1408 intel_dp_voltage_max(struct intel_dp *intel_dp) 1409 { 1410 struct drm_device *dev = intel_dp->base.base.dev; 1411 1412 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) 1413 return DP_TRAIN_VOLTAGE_SWING_800; 1414 else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1415 return DP_TRAIN_VOLTAGE_SWING_1200; 1416 else 1417 return DP_TRAIN_VOLTAGE_SWING_800; 1418 } 1419 1420 static uint8_t 1421 intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing) 1422 { 1423 struct drm_device *dev = intel_dp->base.base.dev; 1424 1425 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1426 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1427 case DP_TRAIN_VOLTAGE_SWING_400: 1428 return DP_TRAIN_PRE_EMPHASIS_6; 1429 case DP_TRAIN_VOLTAGE_SWING_600: 1430 case DP_TRAIN_VOLTAGE_SWING_800: 1431 return DP_TRAIN_PRE_EMPHASIS_3_5; 1432 default: 1433 return DP_TRAIN_PRE_EMPHASIS_0; 1434 } 1435 } else { 1436 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1437 case DP_TRAIN_VOLTAGE_SWING_400: 1438 return DP_TRAIN_PRE_EMPHASIS_6; 1439 case DP_TRAIN_VOLTAGE_SWING_600: 1440 return DP_TRAIN_PRE_EMPHASIS_6; 1441 case DP_TRAIN_VOLTAGE_SWING_800: 1442 return DP_TRAIN_PRE_EMPHASIS_3_5; 1443 case DP_TRAIN_VOLTAGE_SWING_1200: 1444 default: 1445 return DP_TRAIN_PRE_EMPHASIS_0; 1446 } 1447 } 1448 } 1449 1450 static void 1451 intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1452 { 1453 uint8_t v = 0; 1454 uint8_t p = 0; 1455 int lane; 1456 uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS); 1457 uint8_t voltage_max; 1458 uint8_t preemph_max; 1459 1460 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1461 uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane); 1462 uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane); 1463 1464 if (this_v > v) 1465 v = this_v; 1466 if (this_p > p) 1467 p = this_p; 1468 } 1469 1470 voltage_max = intel_dp_voltage_max(intel_dp); 1471 if (v >= voltage_max) 1472 v = voltage_max | DP_TRAIN_MAX_SWING_REACHED; 1473 1474 preemph_max = intel_dp_pre_emphasis_max(intel_dp, v); 1475 if (p >= preemph_max) 1476 p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1477 1478 for (lane = 0; lane < 4; lane++) 1479 intel_dp->train_set[lane] = v | p; 1480 } 1481 1482 static uint32_t 1483 intel_dp_signal_levels(uint8_t train_set) 1484 { 1485 uint32_t signal_levels = 0; 1486 1487 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 1488 case DP_TRAIN_VOLTAGE_SWING_400: 1489 default: 1490 signal_levels |= DP_VOLTAGE_0_4; 1491 break; 1492 case DP_TRAIN_VOLTAGE_SWING_600: 1493 signal_levels |= DP_VOLTAGE_0_6; 1494 break; 1495 case DP_TRAIN_VOLTAGE_SWING_800: 1496 signal_levels |= DP_VOLTAGE_0_8; 1497 break; 1498 case DP_TRAIN_VOLTAGE_SWING_1200: 1499 signal_levels |= DP_VOLTAGE_1_2; 1500 break; 1501 } 1502 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1503 case DP_TRAIN_PRE_EMPHASIS_0: 1504 default: 1505 signal_levels |= DP_PRE_EMPHASIS_0; 1506 break; 1507 case DP_TRAIN_PRE_EMPHASIS_3_5: 1508 signal_levels |= DP_PRE_EMPHASIS_3_5; 1509 break; 1510 case DP_TRAIN_PRE_EMPHASIS_6: 1511 signal_levels |= DP_PRE_EMPHASIS_6; 1512 break; 1513 case DP_TRAIN_PRE_EMPHASIS_9_5: 1514 signal_levels |= DP_PRE_EMPHASIS_9_5; 1515 break; 1516 } 1517 return signal_levels; 1518 } 1519 1520 /* Gen6's DP voltage swing and pre-emphasis control */ 1521 static uint32_t 1522 intel_gen6_edp_signal_levels(uint8_t train_set) 1523 { 1524 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1525 DP_TRAIN_PRE_EMPHASIS_MASK); 1526 switch (signal_levels) { 1527 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1528 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1529 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1530 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1531 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1532 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1533 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6: 1534 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1535 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1536 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1537 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1538 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1539 case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0: 1540 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1541 default: 1542 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1543 "0x%x\n", signal_levels); 1544 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1545 } 1546 } 1547 1548 /* Gen7's DP voltage swing and pre-emphasis control */ 1549 static uint32_t 1550 intel_gen7_edp_signal_levels(uint8_t train_set) 1551 { 1552 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1553 DP_TRAIN_PRE_EMPHASIS_MASK); 1554 switch (signal_levels) { 1555 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1556 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1557 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1558 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1559 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1560 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1561 1562 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1563 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1564 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1565 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1566 1567 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1568 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1569 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1570 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1571 1572 default: 1573 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1574 "0x%x\n", signal_levels); 1575 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1576 } 1577 } 1578 1579 static uint8_t 1580 intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1581 int lane) 1582 { 1583 int s = (lane & 1) * 4; 1584 uint8_t l = link_status[lane>>1]; 1585 1586 return (l >> s) & 0xf; 1587 } 1588 1589 /* Check for clock recovery is done on all channels */ 1590 static bool 1591 intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1592 { 1593 int lane; 1594 uint8_t lane_status; 1595 1596 for (lane = 0; lane < lane_count; lane++) { 1597 lane_status = intel_get_lane_status(link_status, lane); 1598 if ((lane_status & DP_LANE_CR_DONE) == 0) 1599 return false; 1600 } 1601 return true; 1602 } 1603 1604 /* Check to see if channel eq is done on all channels */ 1605 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1606 DP_LANE_CHANNEL_EQ_DONE|\ 1607 DP_LANE_SYMBOL_LOCKED) 1608 static bool 1609 intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1610 { 1611 uint8_t lane_align; 1612 uint8_t lane_status; 1613 int lane; 1614 1615 lane_align = intel_dp_link_status(link_status, 1616 DP_LANE_ALIGN_STATUS_UPDATED); 1617 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1618 return false; 1619 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1620 lane_status = intel_get_lane_status(link_status, lane); 1621 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1622 return false; 1623 } 1624 return true; 1625 } 1626 1627 static bool 1628 intel_dp_set_link_train(struct intel_dp *intel_dp, 1629 uint32_t dp_reg_value, 1630 uint8_t dp_train_pat) 1631 { 1632 struct drm_device *dev = intel_dp->base.base.dev; 1633 struct drm_i915_private *dev_priv = dev->dev_private; 1634 int ret; 1635 1636 I915_WRITE(intel_dp->output_reg, dp_reg_value); 1637 POSTING_READ(intel_dp->output_reg); 1638 1639 intel_dp_aux_native_write_1(intel_dp, 1640 DP_TRAINING_PATTERN_SET, 1641 dp_train_pat); 1642 1643 ret = intel_dp_aux_native_write(intel_dp, 1644 DP_TRAINING_LANE0_SET, 1645 intel_dp->train_set, 1646 intel_dp->lane_count); 1647 if (ret != intel_dp->lane_count) 1648 return false; 1649 1650 return true; 1651 } 1652 1653 /* Enable corresponding port and start training pattern 1 */ 1654 static void 1655 intel_dp_start_link_train(struct intel_dp *intel_dp) 1656 { 1657 struct drm_device *dev = intel_dp->base.base.dev; 1658 struct drm_i915_private *dev_priv = dev->dev_private; 1659 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1660 int i; 1661 uint8_t voltage; 1662 bool clock_recovery = false; 1663 int voltage_tries, loop_tries; 1664 u32 reg; 1665 uint32_t DP = intel_dp->DP; 1666 1667 /* Enable output, wait for it to become active */ 1668 I915_WRITE(intel_dp->output_reg, intel_dp->DP); 1669 POSTING_READ(intel_dp->output_reg); 1670 intel_wait_for_vblank(dev, intel_crtc->pipe); 1671 1672 /* Write the link configuration data */ 1673 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, 1674 intel_dp->link_configuration, 1675 DP_LINK_CONFIGURATION_SIZE); 1676 1677 DP |= DP_PORT_EN; 1678 1679 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1680 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1681 else 1682 DP &= ~DP_LINK_TRAIN_MASK; 1683 memset(intel_dp->train_set, 0, 4); 1684 voltage = 0xff; 1685 voltage_tries = 0; 1686 loop_tries = 0; 1687 clock_recovery = false; 1688 for (;;) { 1689 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1690 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1691 uint32_t signal_levels; 1692 1693 1694 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1695 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1696 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1697 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1698 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1699 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1700 } else { 1701 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1702 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels); 1703 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1704 } 1705 1706 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1707 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1708 else 1709 reg = DP | DP_LINK_TRAIN_PAT_1; 1710 1711 if (!intel_dp_set_link_train(intel_dp, reg, 1712 DP_TRAINING_PATTERN_1)) 1713 break; 1714 /* Set training pattern 1 */ 1715 1716 DELAY(100); 1717 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1718 DRM_ERROR("failed to get link status\n"); 1719 break; 1720 } 1721 1722 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1723 DRM_DEBUG_KMS("clock recovery OK\n"); 1724 clock_recovery = true; 1725 break; 1726 } 1727 1728 /* Check to see if we've tried the max voltage */ 1729 for (i = 0; i < intel_dp->lane_count; i++) 1730 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1731 break; 1732 if (i == intel_dp->lane_count) { 1733 ++loop_tries; 1734 if (loop_tries == 5) { 1735 DRM_DEBUG_KMS("too many full retries, give up\n"); 1736 break; 1737 } 1738 memset(intel_dp->train_set, 0, 4); 1739 voltage_tries = 0; 1740 continue; 1741 } 1742 1743 /* Check to see if we've tried the same voltage 5 times */ 1744 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1745 ++voltage_tries; 1746 if (voltage_tries == 5) { 1747 DRM_DEBUG_KMS("too many voltage retries, give up\n"); 1748 break; 1749 } 1750 } else 1751 voltage_tries = 0; 1752 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1753 1754 /* Compute new intel_dp->train_set as requested by target */ 1755 intel_get_adjust_train(intel_dp, link_status); 1756 } 1757 1758 intel_dp->DP = DP; 1759 } 1760 1761 static void 1762 intel_dp_complete_link_train(struct intel_dp *intel_dp) 1763 { 1764 struct drm_device *dev = intel_dp->base.base.dev; 1765 struct drm_i915_private *dev_priv = dev->dev_private; 1766 bool channel_eq = false; 1767 int tries, cr_tries; 1768 u32 reg; 1769 uint32_t DP = intel_dp->DP; 1770 1771 /* channel equalization */ 1772 tries = 0; 1773 cr_tries = 0; 1774 channel_eq = false; 1775 for (;;) { 1776 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1777 uint32_t signal_levels; 1778 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1779 1780 if (cr_tries > 5) { 1781 DRM_ERROR("failed to train DP, aborting\n"); 1782 intel_dp_link_down(intel_dp); 1783 break; 1784 } 1785 1786 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1787 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1788 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1789 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1790 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1791 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1792 } else { 1793 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1794 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1795 } 1796 1797 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1798 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1799 else 1800 reg = DP | DP_LINK_TRAIN_PAT_2; 1801 1802 /* channel eq pattern */ 1803 if (!intel_dp_set_link_train(intel_dp, reg, 1804 DP_TRAINING_PATTERN_2)) 1805 break; 1806 1807 DELAY(400); 1808 if (!intel_dp_get_link_status(intel_dp, link_status)) 1809 break; 1810 1811 /* Make sure clock is still ok */ 1812 if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1813 intel_dp_start_link_train(intel_dp); 1814 cr_tries++; 1815 continue; 1816 } 1817 1818 if (intel_channel_eq_ok(intel_dp, link_status)) { 1819 channel_eq = true; 1820 break; 1821 } 1822 1823 /* Try 5 times, then try clock recovery if that fails */ 1824 if (tries > 5) { 1825 intel_dp_link_down(intel_dp); 1826 intel_dp_start_link_train(intel_dp); 1827 tries = 0; 1828 cr_tries++; 1829 continue; 1830 } 1831 1832 /* Compute new intel_dp->train_set as requested by target */ 1833 intel_get_adjust_train(intel_dp, link_status); 1834 ++tries; 1835 } 1836 1837 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1838 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1839 else 1840 reg = DP | DP_LINK_TRAIN_OFF; 1841 1842 I915_WRITE(intel_dp->output_reg, reg); 1843 POSTING_READ(intel_dp->output_reg); 1844 intel_dp_aux_native_write_1(intel_dp, 1845 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1846 } 1847 1848 static void 1849 intel_dp_link_down(struct intel_dp *intel_dp) 1850 { 1851 struct drm_device *dev = intel_dp->base.base.dev; 1852 struct drm_i915_private *dev_priv = dev->dev_private; 1853 uint32_t DP = intel_dp->DP; 1854 1855 if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1856 return; 1857 1858 DRM_DEBUG_KMS("\n"); 1859 1860 if (is_edp(intel_dp)) { 1861 DP &= ~DP_PLL_ENABLE; 1862 I915_WRITE(intel_dp->output_reg, DP); 1863 POSTING_READ(intel_dp->output_reg); 1864 DELAY(100); 1865 } 1866 1867 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) { 1868 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1869 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1870 } else { 1871 DP &= ~DP_LINK_TRAIN_MASK; 1872 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1873 } 1874 POSTING_READ(intel_dp->output_reg); 1875 1876 DELAY(17*1000); 1877 1878 if (is_edp(intel_dp)) { 1879 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1880 DP |= DP_LINK_TRAIN_OFF_CPT; 1881 else 1882 DP |= DP_LINK_TRAIN_OFF; 1883 } 1884 1885 1886 if (!HAS_PCH_CPT(dev) && 1887 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { 1888 struct drm_crtc *crtc = intel_dp->base.base.crtc; 1889 1890 /* Hardware workaround: leaving our transcoder select 1891 * set to transcoder B while it's off will prevent the 1892 * corresponding HDMI output on transcoder A. 1893 * 1894 * Combine this with another hardware workaround: 1895 * transcoder select bit can only be cleared while the 1896 * port is enabled. 1897 */ 1898 DP &= ~DP_PIPEB_SELECT; 1899 I915_WRITE(intel_dp->output_reg, DP); 1900 1901 /* Changes to enable or select take place the vblank 1902 * after being written. 1903 */ 1904 if (crtc == NULL) { 1905 /* We can arrive here never having been attached 1906 * to a CRTC, for instance, due to inheriting 1907 * random state from the BIOS. 1908 * 1909 * If the pipe is not running, play safe and 1910 * wait for the clocks to stabilise before 1911 * continuing. 1912 */ 1913 POSTING_READ(intel_dp->output_reg); 1914 DELAY(50 * 1000); 1915 } else 1916 intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe); 1917 } 1918 1919 DP &= ~DP_AUDIO_OUTPUT_ENABLE; 1920 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1921 POSTING_READ(intel_dp->output_reg); 1922 DELAY(intel_dp->panel_power_down_delay * 1000); 1923 } 1924 1925 static bool 1926 intel_dp_get_dpcd(struct intel_dp *intel_dp) 1927 { 1928 if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd, 1929 sizeof(intel_dp->dpcd)) && 1930 (intel_dp->dpcd[DP_DPCD_REV] != 0)) { 1931 return true; 1932 } 1933 1934 return false; 1935 } 1936 1937 static bool 1938 intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector) 1939 { 1940 int ret; 1941 1942 ret = intel_dp_aux_native_read_retry(intel_dp, 1943 DP_DEVICE_SERVICE_IRQ_VECTOR, 1944 sink_irq_vector, 1); 1945 if (!ret) 1946 return false; 1947 1948 return true; 1949 } 1950 1951 static void 1952 intel_dp_handle_test_request(struct intel_dp *intel_dp) 1953 { 1954 /* NAK by default */ 1955 intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK); 1956 } 1957 1958 /* 1959 * According to DP spec 1960 * 5.1.2: 1961 * 1. Read DPCD 1962 * 2. Configure link according to Receiver Capabilities 1963 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 1964 * 4. Check link status on receipt of hot-plug interrupt 1965 */ 1966 1967 static void 1968 intel_dp_check_link_status(struct intel_dp *intel_dp) 1969 { 1970 u8 sink_irq_vector; 1971 u8 link_status[DP_LINK_STATUS_SIZE]; 1972 1973 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON) 1974 return; 1975 1976 if (!intel_dp->base.base.crtc) 1977 return; 1978 1979 /* Try to read receiver status if the link appears to be up */ 1980 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1981 intel_dp_link_down(intel_dp); 1982 return; 1983 } 1984 1985 /* Now read the DPCD to see if it's actually running */ 1986 if (!intel_dp_get_dpcd(intel_dp)) { 1987 intel_dp_link_down(intel_dp); 1988 return; 1989 } 1990 1991 /* Try to read the source of the interrupt */ 1992 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 1993 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) { 1994 /* Clear interrupt source */ 1995 intel_dp_aux_native_write_1(intel_dp, 1996 DP_DEVICE_SERVICE_IRQ_VECTOR, 1997 sink_irq_vector); 1998 1999 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST) 2000 intel_dp_handle_test_request(intel_dp); 2001 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ)) 2002 DRM_DEBUG_KMS("CP or sink specific irq unhandled\n"); 2003 } 2004 2005 if (!intel_channel_eq_ok(intel_dp, link_status)) { 2006 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", 2007 drm_get_encoder_name(&intel_dp->base.base)); 2008 intel_dp_start_link_train(intel_dp); 2009 intel_dp_complete_link_train(intel_dp); 2010 } 2011 } 2012 2013 static enum drm_connector_status 2014 intel_dp_detect_dpcd(struct intel_dp *intel_dp) 2015 { 2016 if (intel_dp_get_dpcd(intel_dp)) 2017 return connector_status_connected; 2018 return connector_status_disconnected; 2019 } 2020 2021 static enum drm_connector_status 2022 ironlake_dp_detect(struct intel_dp *intel_dp) 2023 { 2024 enum drm_connector_status status; 2025 2026 /* Can't disconnect eDP, but you can close the lid... */ 2027 if (is_edp(intel_dp)) { 2028 status = intel_panel_detect(intel_dp->base.base.dev); 2029 if (status == connector_status_unknown) 2030 status = connector_status_connected; 2031 return status; 2032 } 2033 2034 return intel_dp_detect_dpcd(intel_dp); 2035 } 2036 2037 static enum drm_connector_status 2038 g4x_dp_detect(struct intel_dp *intel_dp) 2039 { 2040 struct drm_device *dev = intel_dp->base.base.dev; 2041 struct drm_i915_private *dev_priv = dev->dev_private; 2042 uint32_t temp, bit; 2043 2044 switch (intel_dp->output_reg) { 2045 case DP_B: 2046 bit = DPB_HOTPLUG_INT_STATUS; 2047 break; 2048 case DP_C: 2049 bit = DPC_HOTPLUG_INT_STATUS; 2050 break; 2051 case DP_D: 2052 bit = DPD_HOTPLUG_INT_STATUS; 2053 break; 2054 default: 2055 return connector_status_unknown; 2056 } 2057 2058 temp = I915_READ(PORT_HOTPLUG_STAT); 2059 2060 if ((temp & bit) == 0) 2061 return connector_status_disconnected; 2062 2063 return intel_dp_detect_dpcd(intel_dp); 2064 } 2065 2066 static struct edid * 2067 intel_dp_get_edid(struct drm_connector *connector, device_t adapter) 2068 { 2069 struct intel_dp *intel_dp = intel_attached_dp(connector); 2070 struct edid *edid; 2071 2072 ironlake_edp_panel_vdd_on(intel_dp); 2073 edid = drm_get_edid(connector, adapter); 2074 ironlake_edp_panel_vdd_off(intel_dp, false); 2075 return edid; 2076 } 2077 2078 static int 2079 intel_dp_get_edid_modes(struct drm_connector *connector, device_t adapter) 2080 { 2081 struct intel_dp *intel_dp = intel_attached_dp(connector); 2082 int ret; 2083 2084 ironlake_edp_panel_vdd_on(intel_dp); 2085 ret = intel_ddc_get_modes(connector, adapter); 2086 ironlake_edp_panel_vdd_off(intel_dp, false); 2087 return ret; 2088 } 2089 2090 2091 /** 2092 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 2093 * 2094 * \return true if DP port is connected. 2095 * \return false if DP port is disconnected. 2096 */ 2097 static enum drm_connector_status 2098 intel_dp_detect(struct drm_connector *connector, bool force) 2099 { 2100 struct intel_dp *intel_dp = intel_attached_dp(connector); 2101 struct drm_device *dev = intel_dp->base.base.dev; 2102 enum drm_connector_status status; 2103 struct edid *edid = NULL; 2104 2105 intel_dp->has_audio = false; 2106 2107 if (HAS_PCH_SPLIT(dev)) 2108 status = ironlake_dp_detect(intel_dp); 2109 else 2110 status = g4x_dp_detect(intel_dp); 2111 if (status != connector_status_connected) 2112 return status; 2113 2114 if (intel_dp->force_audio != HDMI_AUDIO_AUTO) { 2115 intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON); 2116 } else { 2117 edid = intel_dp_get_edid(connector, intel_dp->adapter); 2118 if (edid) { 2119 intel_dp->has_audio = drm_detect_monitor_audio(edid); 2120 drm_free(edid, DRM_MEM_KMS); 2121 } 2122 } 2123 2124 return connector_status_connected; 2125 } 2126 2127 static int intel_dp_get_modes(struct drm_connector *connector) 2128 { 2129 struct intel_dp *intel_dp = intel_attached_dp(connector); 2130 struct drm_device *dev = intel_dp->base.base.dev; 2131 struct drm_i915_private *dev_priv = dev->dev_private; 2132 int ret; 2133 2134 /* We should parse the EDID data and find out if it has an audio sink 2135 */ 2136 2137 ret = intel_dp_get_edid_modes(connector, intel_dp->adapter); 2138 if (ret) { 2139 if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) { 2140 struct drm_display_mode *newmode; 2141 list_for_each_entry(newmode, &connector->probed_modes, 2142 head) { 2143 if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) { 2144 intel_dp->panel_fixed_mode = 2145 drm_mode_duplicate(dev, newmode); 2146 break; 2147 } 2148 } 2149 } 2150 return ret; 2151 } 2152 2153 /* if eDP has no EDID, try to use fixed panel mode from VBT */ 2154 if (is_edp(intel_dp)) { 2155 /* initialize panel mode from VBT if available for eDP */ 2156 if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) { 2157 intel_dp->panel_fixed_mode = 2158 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 2159 if (intel_dp->panel_fixed_mode) { 2160 intel_dp->panel_fixed_mode->type |= 2161 DRM_MODE_TYPE_PREFERRED; 2162 } 2163 } 2164 if (intel_dp->panel_fixed_mode) { 2165 struct drm_display_mode *mode; 2166 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 2167 drm_mode_probed_add(connector, mode); 2168 return 1; 2169 } 2170 } 2171 return 0; 2172 } 2173 2174 static bool 2175 intel_dp_detect_audio(struct drm_connector *connector) 2176 { 2177 struct intel_dp *intel_dp = intel_attached_dp(connector); 2178 struct edid *edid; 2179 bool has_audio = false; 2180 2181 edid = intel_dp_get_edid(connector, intel_dp->adapter); 2182 if (edid) { 2183 has_audio = drm_detect_monitor_audio(edid); 2184 2185 drm_free(edid, DRM_MEM_KMS); 2186 } 2187 2188 return has_audio; 2189 } 2190 2191 static int 2192 intel_dp_set_property(struct drm_connector *connector, 2193 struct drm_property *property, 2194 uint64_t val) 2195 { 2196 struct drm_i915_private *dev_priv = connector->dev->dev_private; 2197 struct intel_dp *intel_dp = intel_attached_dp(connector); 2198 int ret; 2199 2200 ret = drm_connector_property_set_value(connector, property, val); 2201 if (ret) 2202 return ret; 2203 2204 if (property == dev_priv->force_audio_property) { 2205 int i = val; 2206 bool has_audio; 2207 2208 if (i == intel_dp->force_audio) 2209 return 0; 2210 2211 intel_dp->force_audio = i; 2212 2213 if (i == HDMI_AUDIO_AUTO) 2214 has_audio = intel_dp_detect_audio(connector); 2215 else 2216 has_audio = (i == HDMI_AUDIO_ON); 2217 2218 if (has_audio == intel_dp->has_audio) 2219 return 0; 2220 2221 intel_dp->has_audio = has_audio; 2222 goto done; 2223 } 2224 2225 if (property == dev_priv->broadcast_rgb_property) { 2226 if (val == !!intel_dp->color_range) 2227 return 0; 2228 2229 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 2230 goto done; 2231 } 2232 2233 return -EINVAL; 2234 2235 done: 2236 if (intel_dp->base.base.crtc) { 2237 struct drm_crtc *crtc = intel_dp->base.base.crtc; 2238 drm_crtc_helper_set_mode(crtc, &crtc->mode, 2239 crtc->x, crtc->y, 2240 crtc->fb); 2241 } 2242 2243 return 0; 2244 } 2245 2246 static void 2247 intel_dp_destroy(struct drm_connector *connector) 2248 { 2249 struct drm_device *dev = connector->dev; 2250 2251 if (intel_dpd_is_edp(dev)) 2252 intel_panel_destroy_backlight(dev); 2253 2254 #if 0 2255 drm_sysfs_connector_remove(connector); 2256 #endif 2257 drm_connector_cleanup(connector); 2258 drm_free(connector, DRM_MEM_KMS); 2259 } 2260 2261 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 2262 { 2263 struct drm_device *dev; 2264 struct intel_dp *intel_dp; 2265 2266 intel_dp = enc_to_intel_dp(encoder); 2267 dev = encoder->dev; 2268 2269 if (intel_dp->dp_iic_bus != NULL) { 2270 if (intel_dp->adapter != NULL) { 2271 device_delete_child(intel_dp->dp_iic_bus, 2272 intel_dp->adapter); 2273 } 2274 device_delete_child(dev->device, intel_dp->dp_iic_bus); 2275 } 2276 drm_encoder_cleanup(encoder); 2277 if (is_edp(intel_dp)) { 2278 struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private; 2279 2280 taskqueue_cancel_timeout(dev_priv->tq, 2281 &intel_dp->panel_vdd_task, NULL); 2282 taskqueue_drain_timeout(dev_priv->tq, 2283 &intel_dp->panel_vdd_task); 2284 ironlake_panel_vdd_off_sync(intel_dp); 2285 } 2286 drm_free(intel_dp, DRM_MEM_KMS); 2287 } 2288 2289 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 2290 .dpms = intel_dp_dpms, 2291 .mode_fixup = intel_dp_mode_fixup, 2292 .prepare = intel_dp_prepare, 2293 .mode_set = intel_dp_mode_set, 2294 .commit = intel_dp_commit, 2295 }; 2296 2297 static const struct drm_connector_funcs intel_dp_connector_funcs = { 2298 .dpms = drm_helper_connector_dpms, 2299 .detect = intel_dp_detect, 2300 .fill_modes = drm_helper_probe_single_connector_modes, 2301 .set_property = intel_dp_set_property, 2302 .destroy = intel_dp_destroy, 2303 }; 2304 2305 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 2306 .get_modes = intel_dp_get_modes, 2307 .mode_valid = intel_dp_mode_valid, 2308 .best_encoder = intel_best_encoder, 2309 }; 2310 2311 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 2312 .destroy = intel_dp_encoder_destroy, 2313 }; 2314 2315 static void 2316 intel_dp_hot_plug(struct intel_encoder *intel_encoder) 2317 { 2318 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 2319 2320 intel_dp_check_link_status(intel_dp); 2321 } 2322 2323 /* Return which DP Port should be selected for Transcoder DP control */ 2324 int 2325 intel_trans_dp_port_sel(struct drm_crtc *crtc) 2326 { 2327 struct drm_device *dev = crtc->dev; 2328 struct drm_mode_config *mode_config = &dev->mode_config; 2329 struct drm_encoder *encoder; 2330 2331 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 2332 struct intel_dp *intel_dp; 2333 2334 if (encoder->crtc != crtc) 2335 continue; 2336 2337 intel_dp = enc_to_intel_dp(encoder); 2338 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 2339 intel_dp->base.type == INTEL_OUTPUT_EDP) 2340 return intel_dp->output_reg; 2341 } 2342 2343 return -1; 2344 } 2345 2346 /* check the VBT to see whether the eDP is on DP-D port */ 2347 bool intel_dpd_is_edp(struct drm_device *dev) 2348 { 2349 struct drm_i915_private *dev_priv = dev->dev_private; 2350 struct child_device_config *p_child; 2351 int i; 2352 2353 if (!dev_priv->child_dev_num) 2354 return false; 2355 2356 for (i = 0; i < dev_priv->child_dev_num; i++) { 2357 p_child = dev_priv->child_dev + i; 2358 2359 if (p_child->dvo_port == PORT_IDPD && 2360 p_child->device_type == DEVICE_TYPE_eDP) 2361 return true; 2362 } 2363 return false; 2364 } 2365 2366 static void 2367 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) 2368 { 2369 intel_attach_force_audio_property(connector); 2370 intel_attach_broadcast_rgb_property(connector); 2371 } 2372 2373 void 2374 intel_dp_init(struct drm_device *dev, int output_reg) 2375 { 2376 struct drm_i915_private *dev_priv = dev->dev_private; 2377 struct drm_connector *connector; 2378 struct intel_dp *intel_dp; 2379 struct intel_encoder *intel_encoder; 2380 struct intel_connector *intel_connector; 2381 const char *name = NULL; 2382 int type; 2383 2384 intel_dp = kmalloc(sizeof(struct intel_dp), DRM_MEM_KMS, 2385 M_WAITOK | M_ZERO); 2386 2387 intel_dp->output_reg = output_reg; 2388 intel_dp->dpms_mode = -1; 2389 2390 intel_connector = kmalloc(sizeof(struct intel_connector), DRM_MEM_KMS, 2391 M_WAITOK | M_ZERO); 2392 intel_encoder = &intel_dp->base; 2393 2394 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D) 2395 if (intel_dpd_is_edp(dev)) 2396 intel_dp->is_pch_edp = true; 2397 2398 if (output_reg == DP_A || is_pch_edp(intel_dp)) { 2399 type = DRM_MODE_CONNECTOR_eDP; 2400 intel_encoder->type = INTEL_OUTPUT_EDP; 2401 } else { 2402 type = DRM_MODE_CONNECTOR_DisplayPort; 2403 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 2404 } 2405 2406 connector = &intel_connector->base; 2407 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); 2408 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); 2409 2410 connector->polled = DRM_CONNECTOR_POLL_HPD; 2411 2412 if (output_reg == DP_B || output_reg == PCH_DP_B) 2413 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT); 2414 else if (output_reg == DP_C || output_reg == PCH_DP_C) 2415 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT); 2416 else if (output_reg == DP_D || output_reg == PCH_DP_D) 2417 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT); 2418 2419 if (is_edp(intel_dp)) { 2420 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT); 2421 TIMEOUT_TASK_INIT(dev_priv->tq, &intel_dp->panel_vdd_task, 0, 2422 ironlake_panel_vdd_work, intel_dp); 2423 } 2424 2425 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); 2426 connector->interlace_allowed = true; 2427 connector->doublescan_allowed = 0; 2428 2429 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, 2430 DRM_MODE_ENCODER_TMDS); 2431 drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); 2432 2433 intel_connector_attach_encoder(intel_connector, intel_encoder); 2434 #if 0 2435 drm_sysfs_connector_add(connector); 2436 #endif 2437 2438 /* Set up the DDC bus. */ 2439 switch (output_reg) { 2440 case DP_A: 2441 name = "DPDDC-A"; 2442 break; 2443 case DP_B: 2444 case PCH_DP_B: 2445 dev_priv->hotplug_supported_mask |= 2446 HDMIB_HOTPLUG_INT_STATUS; 2447 name = "DPDDC-B"; 2448 break; 2449 case DP_C: 2450 case PCH_DP_C: 2451 dev_priv->hotplug_supported_mask |= 2452 HDMIC_HOTPLUG_INT_STATUS; 2453 name = "DPDDC-C"; 2454 break; 2455 case DP_D: 2456 case PCH_DP_D: 2457 dev_priv->hotplug_supported_mask |= 2458 HDMID_HOTPLUG_INT_STATUS; 2459 name = "DPDDC-D"; 2460 break; 2461 } 2462 2463 /* Cache some DPCD data in the eDP case */ 2464 if (is_edp(intel_dp)) { 2465 bool ret; 2466 struct edp_power_seq cur, vbt; 2467 u32 pp_on, pp_off, pp_div; 2468 2469 pp_on = I915_READ(PCH_PP_ON_DELAYS); 2470 pp_off = I915_READ(PCH_PP_OFF_DELAYS); 2471 pp_div = I915_READ(PCH_PP_DIVISOR); 2472 2473 /* Pull timing values out of registers */ 2474 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2475 PANEL_POWER_UP_DELAY_SHIFT; 2476 2477 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2478 PANEL_LIGHT_ON_DELAY_SHIFT; 2479 2480 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2481 PANEL_LIGHT_OFF_DELAY_SHIFT; 2482 2483 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2484 PANEL_POWER_DOWN_DELAY_SHIFT; 2485 2486 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2487 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; 2488 2489 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2490 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2491 2492 vbt = dev_priv->edp.pps; 2493 2494 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2495 vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); 2496 2497 #define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10) 2498 2499 intel_dp->panel_power_up_delay = get_delay(t1_t3); 2500 intel_dp->backlight_on_delay = get_delay(t8); 2501 intel_dp->backlight_off_delay = get_delay(t9); 2502 intel_dp->panel_power_down_delay = get_delay(t10); 2503 intel_dp->panel_power_cycle_delay = get_delay(t11_t12); 2504 2505 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2506 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2507 intel_dp->panel_power_cycle_delay); 2508 2509 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2510 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2511 2512 ironlake_edp_panel_vdd_on(intel_dp); 2513 ret = intel_dp_get_dpcd(intel_dp); 2514 ironlake_edp_panel_vdd_off(intel_dp, false); 2515 2516 if (ret) { 2517 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2518 dev_priv->no_aux_handshake = 2519 intel_dp->dpcd[DP_MAX_DOWNSPREAD] & 2520 DP_NO_AUX_HANDSHAKE_LINK_TRAINING; 2521 } else { 2522 /* if this fails, presume the device is a ghost */ 2523 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2524 intel_dp_encoder_destroy(&intel_dp->base.base); 2525 intel_dp_destroy(&intel_connector->base); 2526 return; 2527 } 2528 } 2529 2530 intel_dp_i2c_init(intel_dp, intel_connector, name); 2531 2532 intel_encoder->hot_plug = intel_dp_hot_plug; 2533 2534 if (is_edp(intel_dp)) { 2535 dev_priv->int_edp_connector = connector; 2536 intel_panel_setup_backlight(dev); 2537 } 2538 2539 intel_dp_add_properties(intel_dp, connector); 2540 2541 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written 2542 * 0xd. Failure to do so will result in spurious interrupts being 2543 * generated on the port when a cable is not attached. 2544 */ 2545 if (IS_G4X(dev) && !IS_GM45(dev)) { 2546 u32 temp = I915_READ(PEG_BAND_GAP_DATA); 2547 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); 2548 } 2549 } 2550