1 /*
2 * Copyright 2007-11 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/amdgpu_drm.h>
29 #include "amdgpu.h"
30 #include "amdgpu_connectors.h"
31 #include "atom.h"
32 #include "atombios_encoders.h"
33 #include "atombios_dp.h"
34 #include <linux/backlight.h>
35 #include "bif/bif_4_1_d.h"
36
37 u8
amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device * adev)38 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
39 {
40 u8 backlight_level;
41 u32 bios_2_scratch;
42
43 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
44
45 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
46 ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
47
48 return backlight_level;
49 }
50
51 void
amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device * adev,u8 backlight_level)52 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
53 u8 backlight_level)
54 {
55 u32 bios_2_scratch;
56
57 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
58
59 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
60 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
61 ATOM_S2_CURRENT_BL_LEVEL_MASK);
62
63 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
64 }
65
66 u8
amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder * amdgpu_encoder)67 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
68 {
69 struct drm_device *dev = amdgpu_encoder->base.dev;
70 struct amdgpu_device *adev = dev->dev_private;
71
72 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
73 return 0;
74
75 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
76 }
77
78 void
amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder * amdgpu_encoder,u8 level)79 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
80 u8 level)
81 {
82 struct drm_encoder *encoder = &amdgpu_encoder->base;
83 struct drm_device *dev = amdgpu_encoder->base.dev;
84 struct amdgpu_device *adev = dev->dev_private;
85 struct amdgpu_encoder_atom_dig *dig;
86
87 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
88 return;
89
90 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
91 amdgpu_encoder->enc_priv) {
92 dig = amdgpu_encoder->enc_priv;
93 dig->backlight_level = level;
94 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
95
96 switch (amdgpu_encoder->encoder_id) {
97 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
98 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
99 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
101 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
102 if (dig->backlight_level == 0)
103 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
104 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
105 else {
106 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
107 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
108 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
109 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
110 }
111 break;
112 default:
113 break;
114 }
115 }
116 }
117
118 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
119
120 #if 0
121 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
122 {
123 u8 level;
124
125 /* Convert brightness to hardware level */
126 if (bd->props.brightness < 0)
127 level = 0;
128 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
129 level = AMDGPU_MAX_BL_LEVEL;
130 else
131 level = bd->props.brightness;
132
133 return level;
134 }
135
136 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
137 {
138 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
139 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
140
141 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
142 amdgpu_atombios_encoder_backlight_level(bd));
143
144 return 0;
145 }
146
147 static int
148 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
149 {
150 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
151 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
152 struct drm_device *dev = amdgpu_encoder->base.dev;
153 struct amdgpu_device *adev = dev->dev_private;
154
155 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
156 }
157
158 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
159 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
160 .update_status = amdgpu_atombios_encoder_update_backlight_status,
161 };
162 #endif
163
164 /*
165 * Read max backlight level
166 */
167 static int
sysctl_backlight_max(SYSCTL_HANDLER_ARGS)168 sysctl_backlight_max(SYSCTL_HANDLER_ARGS)
169 {
170 int err, val;
171
172 val = AMDGPU_MAX_BL_LEVEL;
173 err = sysctl_handle_int(oidp, &val, 0, req);
174 return(err);
175 }
176
177 /*
178 * Read/write backlight level
179 */
180 static int
sysctl_backlight_handler(SYSCTL_HANDLER_ARGS)181 sysctl_backlight_handler(SYSCTL_HANDLER_ARGS)
182 {
183 struct amdgpu_encoder *encoder;
184 struct amdgpu_encoder_atom_dig *dig;
185 int err, val;
186
187 encoder = (struct amdgpu_encoder *)arg1;
188 dig = encoder->enc_priv;
189 val = dig->backlight_level;
190
191 err = sysctl_handle_int(oidp, &val, 0, req);
192 if (err != 0 || req->newptr == NULL) {
193 return(err);
194 }
195 if (dig->backlight_level != val && val >= 0 &&
196 val <= AMDGPU_MAX_BL_LEVEL) {
197 amdgpu_atombios_encoder_set_backlight_level(encoder, val);
198 }
199
200 return(err);
201 }
202
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * amdgpu_encoder,struct drm_connector * drm_connector)203 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
204 struct drm_connector *drm_connector)
205 {
206 struct drm_device *dev = amdgpu_encoder->base.dev;
207 struct amdgpu_device *adev = dev->dev_private;
208 #if 0
209 struct backlight_device *bd;
210 struct backlight_properties props;
211 struct amdgpu_backlight_privdata *pdata;
212 #endif
213 struct amdgpu_encoder_atom_dig *dig;
214 u8 backlight_level;
215 #if 0
216 char bl_name[16];
217 #endif
218
219 /* Mac laptops with multiple GPUs use the gmux driver for backlight
220 * so don't register a backlight device
221 */
222 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
223 (adev->pdev->device == 0x6741))
224 return;
225
226 if (!amdgpu_encoder->enc_priv)
227 return;
228
229 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
230 return;
231
232 #if 0
233 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), M_DRM, GFP_KERNEL);
234 if (!pdata) {
235 DRM_ERROR("Memory allocation failed\n");
236 goto error;
237 }
238
239 memset(&props, 0, sizeof(props));
240 props.max_brightness = AMDGPU_MAX_BL_LEVEL;
241 props.type = BACKLIGHT_RAW;
242 snprintf(bl_name, sizeof(bl_name),
243 "amdgpu_bl%d", dev->primary->index);
244 bd = backlight_device_register(bl_name, drm_connector->kdev,
245 pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
246 if (IS_ERR(bd)) {
247 DRM_ERROR("Backlight registration failed\n");
248 goto error;
249 }
250
251 pdata->encoder = amdgpu_encoder;
252 #endif
253
254 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
255
256 dig = amdgpu_encoder->enc_priv;
257 #if 0
258 dig->bl_dev = bd;
259
260 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
261 bd->props.power = FB_BLANK_UNBLANK;
262 backlight_update_status(bd);
263 #endif
264
265 DRM_INFO("amdgpu atom DIG backlight initialized\n");
266
267 #ifdef __DragonFly__
268 dig->backlight_level = backlight_level;
269
270 adev->mode_info.bl_encoder = amdgpu_encoder;
271
272 SYSCTL_ADD_PROC(&drm_connector->dev->sysctl->ctx, &sysctl__hw_children,
273 OID_AUTO, "backlight_max",
274 CTLTYPE_INT | CTLFLAG_RD | CTLFLAG_ANYBODY,
275 amdgpu_encoder, sizeof(int),
276 sysctl_backlight_max,
277 "I", "Max backlight level");
278 SYSCTL_ADD_PROC(&drm_connector->dev->sysctl->ctx, &sysctl__hw_children,
279 OID_AUTO, "backlight_level",
280 CTLTYPE_INT | CTLFLAG_RW | CTLFLAG_ANYBODY,
281 amdgpu_encoder, sizeof(int),
282 sysctl_backlight_handler,
283 "I", "Backlight level");
284 #endif
285
286 return;
287
288 #if 0
289 error:
290 kfree(pdata);
291 return;
292 #endif
293 }
294
295 void
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * amdgpu_encoder)296 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
297 {
298 #if 0
299 struct drm_device *dev = amdgpu_encoder->base.dev;
300 struct amdgpu_device *adev = dev->dev_private;
301 struct backlight_device *bd = NULL;
302 struct amdgpu_encoder_atom_dig *dig;
303
304 if (!amdgpu_encoder->enc_priv)
305 return;
306
307 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
308 return;
309
310 dig = amdgpu_encoder->enc_priv;
311 bd = dig->bl_dev;
312 dig->bl_dev = NULL;
313
314 if (bd) {
315 struct amdgpu_legacy_backlight_privdata *pdata;
316
317 pdata = bl_get_data(bd);
318 backlight_device_unregister(bd);
319 kfree(pdata);
320
321 DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
322 }
323 #endif
324 }
325
326 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
327
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * encoder)328 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
329 {
330 }
331
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * encoder)332 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
333 {
334 }
335
336 #endif
337
amdgpu_atombios_encoder_is_digital(struct drm_encoder * encoder)338 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
339 {
340 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
341 switch (amdgpu_encoder->encoder_id) {
342 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
343 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
344 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
345 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
346 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
347 return true;
348 default:
349 return false;
350 }
351 }
352
amdgpu_atombios_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)353 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
354 const struct drm_display_mode *mode,
355 struct drm_display_mode *adjusted_mode)
356 {
357 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
358
359 /* set the active encoder to connector routing */
360 amdgpu_encoder_set_active_device(encoder);
361 drm_mode_set_crtcinfo(adjusted_mode, 0);
362
363 /* hw bug */
364 if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
365 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
366 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
367
368 /* vertical FP must be at least 1 */
369 if (mode->crtc_vsync_start == mode->crtc_vdisplay)
370 adjusted_mode->crtc_vsync_start++;
371
372 /* get the native mode for scaling */
373 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
374 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
375 else if (amdgpu_encoder->rmx_type != RMX_OFF)
376 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
377
378 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
379 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
380 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
381 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
382 }
383
384 return true;
385 }
386
387 static void
amdgpu_atombios_encoder_setup_dac(struct drm_encoder * encoder,int action)388 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
389 {
390 struct drm_device *dev = encoder->dev;
391 struct amdgpu_device *adev = dev->dev_private;
392 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
393 DAC_ENCODER_CONTROL_PS_ALLOCATION args;
394 int index = 0;
395
396 memset(&args, 0, sizeof(args));
397
398 switch (amdgpu_encoder->encoder_id) {
399 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
400 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
401 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
402 break;
403 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
404 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
405 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
406 break;
407 }
408
409 args.ucAction = action;
410 args.ucDacStandard = ATOM_DAC1_PS2;
411 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
412
413 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
414
415 }
416
amdgpu_atombios_encoder_get_bpc(struct drm_encoder * encoder)417 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
418 {
419 int bpc = 8;
420
421 if (encoder->crtc) {
422 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
423 bpc = amdgpu_crtc->bpc;
424 }
425
426 switch (bpc) {
427 case 0:
428 return PANEL_BPC_UNDEFINE;
429 case 6:
430 return PANEL_6BIT_PER_COLOR;
431 case 8:
432 default:
433 return PANEL_8BIT_PER_COLOR;
434 case 10:
435 return PANEL_10BIT_PER_COLOR;
436 case 12:
437 return PANEL_12BIT_PER_COLOR;
438 case 16:
439 return PANEL_16BIT_PER_COLOR;
440 }
441 }
442
443 union dvo_encoder_control {
444 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
445 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
446 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
447 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
448 };
449
450 static void
amdgpu_atombios_encoder_setup_dvo(struct drm_encoder * encoder,int action)451 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
452 {
453 struct drm_device *dev = encoder->dev;
454 struct amdgpu_device *adev = dev->dev_private;
455 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
456 union dvo_encoder_control args;
457 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
458 uint8_t frev, crev;
459
460 memset(&args, 0, sizeof(args));
461
462 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
463 return;
464
465 switch (frev) {
466 case 1:
467 switch (crev) {
468 case 1:
469 /* R4xx, R5xx */
470 args.ext_tmds.sXTmdsEncoder.ucEnable = action;
471
472 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
473 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
474
475 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
476 break;
477 case 2:
478 /* RS600/690/740 */
479 args.dvo.sDVOEncoder.ucAction = action;
480 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
481 /* DFP1, CRT1, TV1 depending on the type of port */
482 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
483
484 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
485 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
486 break;
487 case 3:
488 /* R6xx */
489 args.dvo_v3.ucAction = action;
490 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
491 args.dvo_v3.ucDVOConfig = 0; /* XXX */
492 break;
493 case 4:
494 /* DCE8 */
495 args.dvo_v4.ucAction = action;
496 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
497 args.dvo_v4.ucDVOConfig = 0; /* XXX */
498 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
499 break;
500 default:
501 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
502 break;
503 }
504 break;
505 default:
506 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
507 break;
508 }
509
510 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
511 }
512
amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder * encoder)513 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
514 {
515 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
516 struct drm_connector *connector;
517 struct amdgpu_connector *amdgpu_connector;
518 struct amdgpu_connector_atom_dig *dig_connector;
519
520 /* dp bridges are always DP */
521 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
522 return ATOM_ENCODER_MODE_DP;
523
524 /* DVO is always DVO */
525 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
526 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
527 return ATOM_ENCODER_MODE_DVO;
528
529 connector = amdgpu_get_connector_for_encoder(encoder);
530 /* if we don't have an active device yet, just use one of
531 * the connectors tied to the encoder.
532 */
533 if (!connector)
534 connector = amdgpu_get_connector_for_encoder_init(encoder);
535 amdgpu_connector = to_amdgpu_connector(connector);
536
537 switch (connector->connector_type) {
538 case DRM_MODE_CONNECTOR_DVII:
539 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
540 if (amdgpu_audio != 0) {
541 if (amdgpu_connector->use_digital &&
542 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
543 return ATOM_ENCODER_MODE_HDMI;
544 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
545 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
546 return ATOM_ENCODER_MODE_HDMI;
547 else if (amdgpu_connector->use_digital)
548 return ATOM_ENCODER_MODE_DVI;
549 else
550 return ATOM_ENCODER_MODE_CRT;
551 } else if (amdgpu_connector->use_digital) {
552 return ATOM_ENCODER_MODE_DVI;
553 } else {
554 return ATOM_ENCODER_MODE_CRT;
555 }
556 break;
557 case DRM_MODE_CONNECTOR_DVID:
558 case DRM_MODE_CONNECTOR_HDMIA:
559 default:
560 if (amdgpu_audio != 0) {
561 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
562 return ATOM_ENCODER_MODE_HDMI;
563 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
564 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
565 return ATOM_ENCODER_MODE_HDMI;
566 else
567 return ATOM_ENCODER_MODE_DVI;
568 } else {
569 return ATOM_ENCODER_MODE_DVI;
570 }
571 break;
572 case DRM_MODE_CONNECTOR_LVDS:
573 return ATOM_ENCODER_MODE_LVDS;
574 break;
575 case DRM_MODE_CONNECTOR_DisplayPort:
576 dig_connector = amdgpu_connector->con_priv;
577 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
578 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
579 return ATOM_ENCODER_MODE_DP;
580 } else if (amdgpu_audio != 0) {
581 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
582 return ATOM_ENCODER_MODE_HDMI;
583 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
584 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
585 return ATOM_ENCODER_MODE_HDMI;
586 else
587 return ATOM_ENCODER_MODE_DVI;
588 } else {
589 return ATOM_ENCODER_MODE_DVI;
590 }
591 break;
592 case DRM_MODE_CONNECTOR_eDP:
593 return ATOM_ENCODER_MODE_DP;
594 case DRM_MODE_CONNECTOR_DVIA:
595 case DRM_MODE_CONNECTOR_VGA:
596 return ATOM_ENCODER_MODE_CRT;
597 break;
598 case DRM_MODE_CONNECTOR_Composite:
599 case DRM_MODE_CONNECTOR_SVIDEO:
600 case DRM_MODE_CONNECTOR_9PinDIN:
601 /* fix me */
602 return ATOM_ENCODER_MODE_TV;
603 /*return ATOM_ENCODER_MODE_CV;*/
604 break;
605 }
606 }
607
608 /*
609 * DIG Encoder/Transmitter Setup
610 *
611 * DCE 6.0
612 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
613 * Supports up to 6 digital outputs
614 * - 6 DIG encoder blocks.
615 * - DIG to PHY mapping is hardcoded
616 * DIG1 drives UNIPHY0 link A, A+B
617 * DIG2 drives UNIPHY0 link B
618 * DIG3 drives UNIPHY1 link A, A+B
619 * DIG4 drives UNIPHY1 link B
620 * DIG5 drives UNIPHY2 link A, A+B
621 * DIG6 drives UNIPHY2 link B
622 *
623 * Routing
624 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
625 * Examples:
626 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
627 * crtc1 -> dig1 -> UNIPHY0 link B -> DP
628 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
629 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
630 */
631
632 union dig_encoder_control {
633 DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
634 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
635 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
636 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
637 DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
638 };
639
640 void
amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder * encoder,int action,int panel_mode)641 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
642 int action, int panel_mode)
643 {
644 struct drm_device *dev = encoder->dev;
645 struct amdgpu_device *adev = dev->dev_private;
646 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
647 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
648 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
649 union dig_encoder_control args;
650 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
651 uint8_t frev, crev;
652 int dp_clock = 0;
653 int dp_lane_count = 0;
654 int hpd_id = AMDGPU_HPD_NONE;
655
656 if (connector) {
657 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
658 struct amdgpu_connector_atom_dig *dig_connector =
659 amdgpu_connector->con_priv;
660
661 dp_clock = dig_connector->dp_clock;
662 dp_lane_count = dig_connector->dp_lane_count;
663 hpd_id = amdgpu_connector->hpd.hpd;
664 }
665
666 /* no dig encoder assigned */
667 if (dig->dig_encoder == -1)
668 return;
669
670 memset(&args, 0, sizeof(args));
671
672 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
673 return;
674
675 switch (frev) {
676 case 1:
677 switch (crev) {
678 case 1:
679 args.v1.ucAction = action;
680 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
681 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
682 args.v3.ucPanelMode = panel_mode;
683 else
684 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
685
686 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
687 args.v1.ucLaneNum = dp_lane_count;
688 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
689 args.v1.ucLaneNum = 8;
690 else
691 args.v1.ucLaneNum = 4;
692
693 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
694 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
695 switch (amdgpu_encoder->encoder_id) {
696 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
697 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
698 break;
699 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
700 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
701 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
702 break;
703 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
704 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
705 break;
706 }
707 if (dig->linkb)
708 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
709 else
710 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
711 break;
712 case 2:
713 case 3:
714 args.v3.ucAction = action;
715 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
716 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
717 args.v3.ucPanelMode = panel_mode;
718 else
719 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
720
721 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
722 args.v3.ucLaneNum = dp_lane_count;
723 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
724 args.v3.ucLaneNum = 8;
725 else
726 args.v3.ucLaneNum = 4;
727
728 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
729 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
730 args.v3.acConfig.ucDigSel = dig->dig_encoder;
731 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
732 break;
733 case 4:
734 args.v4.ucAction = action;
735 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
736 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
737 args.v4.ucPanelMode = panel_mode;
738 else
739 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
740
741 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
742 args.v4.ucLaneNum = dp_lane_count;
743 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
744 args.v4.ucLaneNum = 8;
745 else
746 args.v4.ucLaneNum = 4;
747
748 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
749 if (dp_clock == 540000)
750 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
751 else if (dp_clock == 324000)
752 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
753 else if (dp_clock == 270000)
754 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
755 else
756 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
757 }
758 args.v4.acConfig.ucDigSel = dig->dig_encoder;
759 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
760 if (hpd_id == AMDGPU_HPD_NONE)
761 args.v4.ucHPD_ID = 0;
762 else
763 args.v4.ucHPD_ID = hpd_id + 1;
764 break;
765 case 5:
766 switch (action) {
767 case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
768 args.v5.asDPPanelModeParam.ucAction = action;
769 args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
770 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
771 break;
772 case ATOM_ENCODER_CMD_STREAM_SETUP:
773 args.v5.asStreamParam.ucAction = action;
774 args.v5.asStreamParam.ucDigId = dig->dig_encoder;
775 args.v5.asStreamParam.ucDigMode =
776 amdgpu_atombios_encoder_get_encoder_mode(encoder);
777 if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
778 args.v5.asStreamParam.ucLaneNum = dp_lane_count;
779 else if (amdgpu_dig_monitor_is_duallink(encoder,
780 amdgpu_encoder->pixel_clock))
781 args.v5.asStreamParam.ucLaneNum = 8;
782 else
783 args.v5.asStreamParam.ucLaneNum = 4;
784 args.v5.asStreamParam.ulPixelClock =
785 cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
786 args.v5.asStreamParam.ucBitPerColor =
787 amdgpu_atombios_encoder_get_bpc(encoder);
788 args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
789 break;
790 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
791 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
792 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
793 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
794 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
795 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
796 case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
797 case ATOM_ENCODER_CMD_DP_VIDEO_ON:
798 args.v5.asCmdParam.ucAction = action;
799 args.v5.asCmdParam.ucDigId = dig->dig_encoder;
800 break;
801 default:
802 DRM_ERROR("Unsupported action 0x%x\n", action);
803 break;
804 }
805 break;
806 default:
807 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
808 break;
809 }
810 break;
811 default:
812 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
813 break;
814 }
815
816 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
817
818 }
819
820 union dig_transmitter_control {
821 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
822 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
823 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
824 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
825 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
826 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
827 };
828
829 void
amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder * encoder,int action,uint8_t lane_num,uint8_t lane_set)830 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
831 uint8_t lane_num, uint8_t lane_set)
832 {
833 struct drm_device *dev = encoder->dev;
834 struct amdgpu_device *adev = dev->dev_private;
835 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
836 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
837 struct drm_connector *connector;
838 union dig_transmitter_control args;
839 int index = 0;
840 uint8_t frev, crev;
841 bool is_dp = false;
842 int pll_id = 0;
843 int dp_clock = 0;
844 int dp_lane_count = 0;
845 int connector_object_id = 0;
846 int igp_lane_info = 0;
847 int dig_encoder = dig->dig_encoder;
848 int hpd_id = AMDGPU_HPD_NONE;
849
850 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
851 connector = amdgpu_get_connector_for_encoder_init(encoder);
852 /* just needed to avoid bailing in the encoder check. the encoder
853 * isn't used for init
854 */
855 dig_encoder = 0;
856 } else
857 connector = amdgpu_get_connector_for_encoder(encoder);
858
859 if (connector) {
860 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
861 struct amdgpu_connector_atom_dig *dig_connector =
862 amdgpu_connector->con_priv;
863
864 hpd_id = amdgpu_connector->hpd.hpd;
865 dp_clock = dig_connector->dp_clock;
866 dp_lane_count = dig_connector->dp_lane_count;
867 connector_object_id =
868 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
869 }
870
871 if (encoder->crtc) {
872 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
873 pll_id = amdgpu_crtc->pll_id;
874 }
875
876 /* no dig encoder assigned */
877 if (dig_encoder == -1)
878 return;
879
880 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
881 is_dp = true;
882
883 memset(&args, 0, sizeof(args));
884
885 switch (amdgpu_encoder->encoder_id) {
886 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
887 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
888 break;
889 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
890 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
891 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
892 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
893 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
894 break;
895 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
896 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
897 break;
898 }
899
900 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
901 return;
902
903 switch (frev) {
904 case 1:
905 switch (crev) {
906 case 1:
907 args.v1.ucAction = action;
908 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
909 args.v1.usInitInfo = cpu_to_le16(connector_object_id);
910 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
911 args.v1.asMode.ucLaneSel = lane_num;
912 args.v1.asMode.ucLaneSet = lane_set;
913 } else {
914 if (is_dp)
915 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
916 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
917 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
918 else
919 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
920 }
921
922 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
923
924 if (dig_encoder)
925 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
926 else
927 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
928
929 if ((adev->flags & AMD_IS_APU) &&
930 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
931 if (is_dp ||
932 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
933 if (igp_lane_info & 0x1)
934 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
935 else if (igp_lane_info & 0x2)
936 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
937 else if (igp_lane_info & 0x4)
938 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
939 else if (igp_lane_info & 0x8)
940 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
941 } else {
942 if (igp_lane_info & 0x3)
943 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
944 else if (igp_lane_info & 0xc)
945 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
946 }
947 }
948
949 if (dig->linkb)
950 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
951 else
952 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
953
954 if (is_dp)
955 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
956 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
957 if (dig->coherent_mode)
958 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
959 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
960 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
961 }
962 break;
963 case 2:
964 args.v2.ucAction = action;
965 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
966 args.v2.usInitInfo = cpu_to_le16(connector_object_id);
967 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
968 args.v2.asMode.ucLaneSel = lane_num;
969 args.v2.asMode.ucLaneSet = lane_set;
970 } else {
971 if (is_dp)
972 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
973 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
974 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
975 else
976 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
977 }
978
979 args.v2.acConfig.ucEncoderSel = dig_encoder;
980 if (dig->linkb)
981 args.v2.acConfig.ucLinkSel = 1;
982
983 switch (amdgpu_encoder->encoder_id) {
984 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
985 args.v2.acConfig.ucTransmitterSel = 0;
986 break;
987 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
988 args.v2.acConfig.ucTransmitterSel = 1;
989 break;
990 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
991 args.v2.acConfig.ucTransmitterSel = 2;
992 break;
993 }
994
995 if (is_dp) {
996 args.v2.acConfig.fCoherentMode = 1;
997 args.v2.acConfig.fDPConnector = 1;
998 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
999 if (dig->coherent_mode)
1000 args.v2.acConfig.fCoherentMode = 1;
1001 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1002 args.v2.acConfig.fDualLinkConnector = 1;
1003 }
1004 break;
1005 case 3:
1006 args.v3.ucAction = action;
1007 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
1008 args.v3.usInitInfo = cpu_to_le16(connector_object_id);
1009 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1010 args.v3.asMode.ucLaneSel = lane_num;
1011 args.v3.asMode.ucLaneSet = lane_set;
1012 } else {
1013 if (is_dp)
1014 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
1015 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1016 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1017 else
1018 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1019 }
1020
1021 if (is_dp)
1022 args.v3.ucLaneNum = dp_lane_count;
1023 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1024 args.v3.ucLaneNum = 8;
1025 else
1026 args.v3.ucLaneNum = 4;
1027
1028 if (dig->linkb)
1029 args.v3.acConfig.ucLinkSel = 1;
1030 if (dig_encoder & 1)
1031 args.v3.acConfig.ucEncoderSel = 1;
1032
1033 /* Select the PLL for the PHY
1034 * DP PHY should be clocked from external src if there is
1035 * one.
1036 */
1037 /* On DCE4, if there is an external clock, it generates the DP ref clock */
1038 if (is_dp && adev->clock.dp_extclk)
1039 args.v3.acConfig.ucRefClkSource = 2; /* external src */
1040 else
1041 args.v3.acConfig.ucRefClkSource = pll_id;
1042
1043 switch (amdgpu_encoder->encoder_id) {
1044 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1045 args.v3.acConfig.ucTransmitterSel = 0;
1046 break;
1047 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1048 args.v3.acConfig.ucTransmitterSel = 1;
1049 break;
1050 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1051 args.v3.acConfig.ucTransmitterSel = 2;
1052 break;
1053 }
1054
1055 if (is_dp)
1056 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
1057 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1058 if (dig->coherent_mode)
1059 args.v3.acConfig.fCoherentMode = 1;
1060 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1061 args.v3.acConfig.fDualLinkConnector = 1;
1062 }
1063 break;
1064 case 4:
1065 args.v4.ucAction = action;
1066 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
1067 args.v4.usInitInfo = cpu_to_le16(connector_object_id);
1068 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1069 args.v4.asMode.ucLaneSel = lane_num;
1070 args.v4.asMode.ucLaneSet = lane_set;
1071 } else {
1072 if (is_dp)
1073 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1074 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1075 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1076 else
1077 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1078 }
1079
1080 if (is_dp)
1081 args.v4.ucLaneNum = dp_lane_count;
1082 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1083 args.v4.ucLaneNum = 8;
1084 else
1085 args.v4.ucLaneNum = 4;
1086
1087 if (dig->linkb)
1088 args.v4.acConfig.ucLinkSel = 1;
1089 if (dig_encoder & 1)
1090 args.v4.acConfig.ucEncoderSel = 1;
1091
1092 /* Select the PLL for the PHY
1093 * DP PHY should be clocked from external src if there is
1094 * one.
1095 */
1096 /* On DCE5 DCPLL usually generates the DP ref clock */
1097 if (is_dp) {
1098 if (adev->clock.dp_extclk)
1099 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1100 else
1101 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1102 } else
1103 args.v4.acConfig.ucRefClkSource = pll_id;
1104
1105 switch (amdgpu_encoder->encoder_id) {
1106 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1107 args.v4.acConfig.ucTransmitterSel = 0;
1108 break;
1109 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1110 args.v4.acConfig.ucTransmitterSel = 1;
1111 break;
1112 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1113 args.v4.acConfig.ucTransmitterSel = 2;
1114 break;
1115 }
1116
1117 if (is_dp)
1118 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1119 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1120 if (dig->coherent_mode)
1121 args.v4.acConfig.fCoherentMode = 1;
1122 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1123 args.v4.acConfig.fDualLinkConnector = 1;
1124 }
1125 break;
1126 case 5:
1127 args.v5.ucAction = action;
1128 if (is_dp)
1129 args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1130 else
1131 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1132
1133 switch (amdgpu_encoder->encoder_id) {
1134 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1135 if (dig->linkb)
1136 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1137 else
1138 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1139 break;
1140 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1141 if (dig->linkb)
1142 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1143 else
1144 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1145 break;
1146 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1147 if (dig->linkb)
1148 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1149 else
1150 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1151 break;
1152 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1153 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1154 break;
1155 }
1156 if (is_dp)
1157 args.v5.ucLaneNum = dp_lane_count;
1158 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1159 args.v5.ucLaneNum = 8;
1160 else
1161 args.v5.ucLaneNum = 4;
1162 args.v5.ucConnObjId = connector_object_id;
1163 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1164
1165 if (is_dp && adev->clock.dp_extclk)
1166 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1167 else
1168 args.v5.asConfig.ucPhyClkSrcId = pll_id;
1169
1170 if (is_dp)
1171 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1172 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1173 if (dig->coherent_mode)
1174 args.v5.asConfig.ucCoherentMode = 1;
1175 }
1176 if (hpd_id == AMDGPU_HPD_NONE)
1177 args.v5.asConfig.ucHPDSel = 0;
1178 else
1179 args.v5.asConfig.ucHPDSel = hpd_id + 1;
1180 args.v5.ucDigEncoderSel = 1 << dig_encoder;
1181 args.v5.ucDPLaneSet = lane_set;
1182 break;
1183 case 6:
1184 args.v6.ucAction = action;
1185 if (is_dp)
1186 args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1187 else
1188 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1189
1190 switch (amdgpu_encoder->encoder_id) {
1191 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1192 if (dig->linkb)
1193 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1194 else
1195 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1196 break;
1197 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1198 if (dig->linkb)
1199 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1200 else
1201 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1202 break;
1203 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1204 if (dig->linkb)
1205 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1206 else
1207 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1208 break;
1209 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1210 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1211 break;
1212 }
1213 if (is_dp)
1214 args.v6.ucLaneNum = dp_lane_count;
1215 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1216 args.v6.ucLaneNum = 8;
1217 else
1218 args.v6.ucLaneNum = 4;
1219 args.v6.ucConnObjId = connector_object_id;
1220 if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1221 args.v6.ucDPLaneSet = lane_set;
1222 else
1223 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1224
1225 if (hpd_id == AMDGPU_HPD_NONE)
1226 args.v6.ucHPDSel = 0;
1227 else
1228 args.v6.ucHPDSel = hpd_id + 1;
1229 args.v6.ucDigEncoderSel = 1 << dig_encoder;
1230 break;
1231 default:
1232 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1233 break;
1234 }
1235 break;
1236 default:
1237 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1238 break;
1239 }
1240
1241 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1242 }
1243
1244 bool
amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector * connector,int action)1245 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1246 int action)
1247 {
1248 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1249 struct drm_device *dev = amdgpu_connector->base.dev;
1250 struct amdgpu_device *adev = dev->dev_private;
1251 union dig_transmitter_control args;
1252 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1253 uint8_t frev, crev;
1254
1255 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1256 goto done;
1257
1258 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1259 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1260 goto done;
1261
1262 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1263 goto done;
1264
1265 memset(&args, 0, sizeof(args));
1266
1267 args.v1.ucAction = action;
1268
1269 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1270
1271 /* wait for the panel to power up */
1272 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1273 int i;
1274
1275 for (i = 0; i < 300; i++) {
1276 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1277 return true;
1278 mdelay(1);
1279 }
1280 return false;
1281 }
1282 done:
1283 return true;
1284 }
1285
1286 union external_encoder_control {
1287 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1288 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1289 };
1290
1291 static void
amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder * encoder,struct drm_encoder * ext_encoder,int action)1292 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1293 struct drm_encoder *ext_encoder,
1294 int action)
1295 {
1296 struct drm_device *dev = encoder->dev;
1297 struct amdgpu_device *adev = dev->dev_private;
1298 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1299 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1300 union external_encoder_control args;
1301 struct drm_connector *connector;
1302 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1303 u8 frev, crev;
1304 int dp_clock = 0;
1305 int dp_lane_count = 0;
1306 int connector_object_id = 0;
1307 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1308
1309 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1310 connector = amdgpu_get_connector_for_encoder_init(encoder);
1311 else
1312 connector = amdgpu_get_connector_for_encoder(encoder);
1313
1314 if (connector) {
1315 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1316 struct amdgpu_connector_atom_dig *dig_connector =
1317 amdgpu_connector->con_priv;
1318
1319 dp_clock = dig_connector->dp_clock;
1320 dp_lane_count = dig_connector->dp_lane_count;
1321 connector_object_id =
1322 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1323 }
1324
1325 memset(&args, 0, sizeof(args));
1326
1327 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1328 return;
1329
1330 switch (frev) {
1331 case 1:
1332 /* no params on frev 1 */
1333 break;
1334 case 2:
1335 switch (crev) {
1336 case 1:
1337 case 2:
1338 args.v1.sDigEncoder.ucAction = action;
1339 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1340 args.v1.sDigEncoder.ucEncoderMode =
1341 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1342
1343 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1344 if (dp_clock == 270000)
1345 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1346 args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1347 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1348 args.v1.sDigEncoder.ucLaneNum = 8;
1349 else
1350 args.v1.sDigEncoder.ucLaneNum = 4;
1351 break;
1352 case 3:
1353 args.v3.sExtEncoder.ucAction = action;
1354 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1355 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1356 else
1357 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1358 args.v3.sExtEncoder.ucEncoderMode =
1359 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1360
1361 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1362 if (dp_clock == 270000)
1363 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1364 else if (dp_clock == 540000)
1365 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1366 args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1367 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1368 args.v3.sExtEncoder.ucLaneNum = 8;
1369 else
1370 args.v3.sExtEncoder.ucLaneNum = 4;
1371 switch (ext_enum) {
1372 case GRAPH_OBJECT_ENUM_ID1:
1373 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1374 break;
1375 case GRAPH_OBJECT_ENUM_ID2:
1376 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1377 break;
1378 case GRAPH_OBJECT_ENUM_ID3:
1379 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1380 break;
1381 }
1382 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1383 break;
1384 default:
1385 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1386 return;
1387 }
1388 break;
1389 default:
1390 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1391 return;
1392 }
1393 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1394 }
1395
1396 static void
amdgpu_atombios_encoder_setup_dig(struct drm_encoder * encoder,int action)1397 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1398 {
1399 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1400 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1401 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1402 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1403 struct amdgpu_connector *amdgpu_connector = NULL;
1404 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1405
1406 if (connector) {
1407 amdgpu_connector = to_amdgpu_connector(connector);
1408 amdgpu_dig_connector = amdgpu_connector->con_priv;
1409 }
1410
1411 if (action == ATOM_ENABLE) {
1412 if (!connector)
1413 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1414 else
1415 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1416
1417 /* setup and enable the encoder */
1418 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1419 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1420 ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1421 dig->panel_mode);
1422 if (ext_encoder)
1423 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1424 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1425 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1426 connector) {
1427 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1428 amdgpu_atombios_encoder_set_edp_panel_power(connector,
1429 ATOM_TRANSMITTER_ACTION_POWER_ON);
1430 amdgpu_dig_connector->edp_on = true;
1431 }
1432 }
1433 /* enable the transmitter */
1434 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1435 ATOM_TRANSMITTER_ACTION_ENABLE,
1436 0, 0);
1437 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1438 connector) {
1439 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1440 amdgpu_atombios_dp_link_train(encoder, connector);
1441 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1442 }
1443 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1444 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1445 if (ext_encoder)
1446 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1447 } else {
1448 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1449 connector)
1450 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1451 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1452 if (ext_encoder)
1453 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1454 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1455 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1456 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1457
1458 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1459 connector)
1460 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1461 /* disable the transmitter */
1462 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1463 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1464 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1465 connector) {
1466 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1467 amdgpu_atombios_encoder_set_edp_panel_power(connector,
1468 ATOM_TRANSMITTER_ACTION_POWER_OFF);
1469 amdgpu_dig_connector->edp_on = false;
1470 }
1471 }
1472 }
1473 }
1474
1475 void
amdgpu_atombios_encoder_dpms(struct drm_encoder * encoder,int mode)1476 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1477 {
1478 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1479
1480 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1481 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1482 amdgpu_encoder->active_device);
1483 switch (amdgpu_encoder->encoder_id) {
1484 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1485 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1486 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1487 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1488 switch (mode) {
1489 case DRM_MODE_DPMS_ON:
1490 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1491 break;
1492 case DRM_MODE_DPMS_STANDBY:
1493 case DRM_MODE_DPMS_SUSPEND:
1494 case DRM_MODE_DPMS_OFF:
1495 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1496 break;
1497 }
1498 break;
1499 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1500 switch (mode) {
1501 case DRM_MODE_DPMS_ON:
1502 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1503 break;
1504 case DRM_MODE_DPMS_STANDBY:
1505 case DRM_MODE_DPMS_SUSPEND:
1506 case DRM_MODE_DPMS_OFF:
1507 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1508 break;
1509 }
1510 break;
1511 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1512 switch (mode) {
1513 case DRM_MODE_DPMS_ON:
1514 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1515 break;
1516 case DRM_MODE_DPMS_STANDBY:
1517 case DRM_MODE_DPMS_SUSPEND:
1518 case DRM_MODE_DPMS_OFF:
1519 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1520 break;
1521 }
1522 break;
1523 default:
1524 return;
1525 }
1526 }
1527
1528 union crtc_source_param {
1529 SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1530 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1531 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1532 };
1533
1534 void
amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder * encoder)1535 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1536 {
1537 struct drm_device *dev = encoder->dev;
1538 struct amdgpu_device *adev = dev->dev_private;
1539 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1540 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1541 union crtc_source_param args;
1542 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1543 uint8_t frev, crev;
1544 struct amdgpu_encoder_atom_dig *dig;
1545
1546 memset(&args, 0, sizeof(args));
1547
1548 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1549 return;
1550
1551 switch (frev) {
1552 case 1:
1553 switch (crev) {
1554 case 1:
1555 default:
1556 args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1557 switch (amdgpu_encoder->encoder_id) {
1558 case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1559 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1560 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1561 break;
1562 case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1563 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1564 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1565 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1566 else
1567 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1568 break;
1569 case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1570 case ENCODER_OBJECT_ID_INTERNAL_DDI:
1571 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1572 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1573 break;
1574 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1575 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1576 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1577 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1578 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1579 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1580 else
1581 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1582 break;
1583 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1584 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1585 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1586 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1587 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1588 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1589 else
1590 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1591 break;
1592 }
1593 break;
1594 case 2:
1595 args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1596 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1597 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1598
1599 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1600 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1601 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1602 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1603 else
1604 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1605 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1606 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1607 } else {
1608 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1609 }
1610 switch (amdgpu_encoder->encoder_id) {
1611 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1612 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1613 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1614 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1615 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1616 dig = amdgpu_encoder->enc_priv;
1617 switch (dig->dig_encoder) {
1618 case 0:
1619 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1620 break;
1621 case 1:
1622 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1623 break;
1624 case 2:
1625 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1626 break;
1627 case 3:
1628 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1629 break;
1630 case 4:
1631 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1632 break;
1633 case 5:
1634 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1635 break;
1636 case 6:
1637 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1638 break;
1639 }
1640 break;
1641 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1642 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1643 break;
1644 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1645 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1646 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1647 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1648 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1649 else
1650 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1651 break;
1652 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1653 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1654 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1655 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1656 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1657 else
1658 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1659 break;
1660 }
1661 break;
1662 case 3:
1663 args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1664 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1665 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1666
1667 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1668 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1669 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1670 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1671 else
1672 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1673 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1674 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1675 } else {
1676 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1677 }
1678 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1679 switch (amdgpu_encoder->encoder_id) {
1680 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1681 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1682 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1683 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1684 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1685 dig = amdgpu_encoder->enc_priv;
1686 switch (dig->dig_encoder) {
1687 case 0:
1688 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1689 break;
1690 case 1:
1691 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1692 break;
1693 case 2:
1694 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1695 break;
1696 case 3:
1697 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1698 break;
1699 case 4:
1700 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1701 break;
1702 case 5:
1703 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1704 break;
1705 case 6:
1706 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1707 break;
1708 }
1709 break;
1710 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1711 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1712 break;
1713 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1714 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1715 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1716 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1717 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1718 else
1719 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1720 break;
1721 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1722 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1723 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1724 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1725 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1726 else
1727 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1728 break;
1729 }
1730 break;
1731 }
1732 break;
1733 default:
1734 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1735 return;
1736 }
1737
1738 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1739 }
1740
1741 /* This only needs to be called once at startup */
1742 void
amdgpu_atombios_encoder_init_dig(struct amdgpu_device * adev)1743 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1744 {
1745 struct drm_device *dev = adev->ddev;
1746 struct drm_encoder *encoder;
1747
1748 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1749 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1750 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1751
1752 switch (amdgpu_encoder->encoder_id) {
1753 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1754 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1755 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1756 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1757 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1758 0, 0);
1759 break;
1760 }
1761
1762 if (ext_encoder)
1763 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1764 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1765 }
1766 }
1767
1768 static bool
amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder * encoder,struct drm_connector * connector)1769 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1770 struct drm_connector *connector)
1771 {
1772 struct drm_device *dev = encoder->dev;
1773 struct amdgpu_device *adev = dev->dev_private;
1774 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1775 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1776
1777 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1778 ATOM_DEVICE_CV_SUPPORT |
1779 ATOM_DEVICE_CRT_SUPPORT)) {
1780 DAC_LOAD_DETECTION_PS_ALLOCATION args;
1781 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1782 uint8_t frev, crev;
1783
1784 memset(&args, 0, sizeof(args));
1785
1786 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1787 return false;
1788
1789 args.sDacload.ucMisc = 0;
1790
1791 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1792 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1793 args.sDacload.ucDacType = ATOM_DAC_A;
1794 else
1795 args.sDacload.ucDacType = ATOM_DAC_B;
1796
1797 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1798 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1799 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1800 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1801 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1802 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1803 if (crev >= 3)
1804 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1805 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1806 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1807 if (crev >= 3)
1808 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1809 }
1810
1811 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1812
1813 return true;
1814 } else
1815 return false;
1816 }
1817
1818 enum drm_connector_status
amdgpu_atombios_encoder_dac_detect(struct drm_encoder * encoder,struct drm_connector * connector)1819 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1820 struct drm_connector *connector)
1821 {
1822 struct drm_device *dev = encoder->dev;
1823 struct amdgpu_device *adev = dev->dev_private;
1824 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1825 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1826 uint32_t bios_0_scratch;
1827
1828 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1829 DRM_DEBUG_KMS("detect returned false \n");
1830 return connector_status_unknown;
1831 }
1832
1833 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1834
1835 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1836 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1837 if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1838 return connector_status_connected;
1839 }
1840 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1841 if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1842 return connector_status_connected;
1843 }
1844 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1845 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1846 return connector_status_connected;
1847 }
1848 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1849 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1850 return connector_status_connected; /* CTV */
1851 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1852 return connector_status_connected; /* STV */
1853 }
1854 return connector_status_disconnected;
1855 }
1856
1857 enum drm_connector_status
amdgpu_atombios_encoder_dig_detect(struct drm_encoder * encoder,struct drm_connector * connector)1858 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1859 struct drm_connector *connector)
1860 {
1861 struct drm_device *dev = encoder->dev;
1862 struct amdgpu_device *adev = dev->dev_private;
1863 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1864 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1865 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1866 u32 bios_0_scratch;
1867
1868 if (!ext_encoder)
1869 return connector_status_unknown;
1870
1871 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1872 return connector_status_unknown;
1873
1874 /* load detect on the dp bridge */
1875 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1876 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1877
1878 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1879
1880 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1881 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1882 if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1883 return connector_status_connected;
1884 }
1885 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1886 if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1887 return connector_status_connected;
1888 }
1889 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1890 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1891 return connector_status_connected;
1892 }
1893 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1894 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1895 return connector_status_connected; /* CTV */
1896 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1897 return connector_status_connected; /* STV */
1898 }
1899 return connector_status_disconnected;
1900 }
1901
1902 void
amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder * encoder)1903 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1904 {
1905 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1906
1907 if (ext_encoder)
1908 /* ddc_setup on the dp bridge */
1909 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1910 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1911
1912 }
1913
1914 void
amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector * connector,struct drm_encoder * encoder,bool connected)1915 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1916 struct drm_encoder *encoder,
1917 bool connected)
1918 {
1919 struct drm_device *dev = connector->dev;
1920 struct amdgpu_device *adev = dev->dev_private;
1921 struct amdgpu_connector *amdgpu_connector =
1922 to_amdgpu_connector(connector);
1923 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1924 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1925
1926 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1927 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1928 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1929
1930 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1931 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1932 if (connected) {
1933 DRM_DEBUG_KMS("LCD1 connected\n");
1934 bios_0_scratch |= ATOM_S0_LCD1;
1935 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1936 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1937 } else {
1938 DRM_DEBUG_KMS("LCD1 disconnected\n");
1939 bios_0_scratch &= ~ATOM_S0_LCD1;
1940 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1941 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1942 }
1943 }
1944 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1945 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1946 if (connected) {
1947 DRM_DEBUG_KMS("CRT1 connected\n");
1948 bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1949 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1950 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1951 } else {
1952 DRM_DEBUG_KMS("CRT1 disconnected\n");
1953 bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1954 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1955 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1956 }
1957 }
1958 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1959 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1960 if (connected) {
1961 DRM_DEBUG_KMS("CRT2 connected\n");
1962 bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1963 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1964 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1965 } else {
1966 DRM_DEBUG_KMS("CRT2 disconnected\n");
1967 bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1968 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1969 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1970 }
1971 }
1972 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1973 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1974 if (connected) {
1975 DRM_DEBUG_KMS("DFP1 connected\n");
1976 bios_0_scratch |= ATOM_S0_DFP1;
1977 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1978 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1979 } else {
1980 DRM_DEBUG_KMS("DFP1 disconnected\n");
1981 bios_0_scratch &= ~ATOM_S0_DFP1;
1982 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1983 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1984 }
1985 }
1986 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1987 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1988 if (connected) {
1989 DRM_DEBUG_KMS("DFP2 connected\n");
1990 bios_0_scratch |= ATOM_S0_DFP2;
1991 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1992 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1993 } else {
1994 DRM_DEBUG_KMS("DFP2 disconnected\n");
1995 bios_0_scratch &= ~ATOM_S0_DFP2;
1996 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1997 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1998 }
1999 }
2000 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
2001 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
2002 if (connected) {
2003 DRM_DEBUG_KMS("DFP3 connected\n");
2004 bios_0_scratch |= ATOM_S0_DFP3;
2005 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
2006 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
2007 } else {
2008 DRM_DEBUG_KMS("DFP3 disconnected\n");
2009 bios_0_scratch &= ~ATOM_S0_DFP3;
2010 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
2011 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
2012 }
2013 }
2014 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
2015 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
2016 if (connected) {
2017 DRM_DEBUG_KMS("DFP4 connected\n");
2018 bios_0_scratch |= ATOM_S0_DFP4;
2019 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
2020 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
2021 } else {
2022 DRM_DEBUG_KMS("DFP4 disconnected\n");
2023 bios_0_scratch &= ~ATOM_S0_DFP4;
2024 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
2025 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
2026 }
2027 }
2028 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
2029 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
2030 if (connected) {
2031 DRM_DEBUG_KMS("DFP5 connected\n");
2032 bios_0_scratch |= ATOM_S0_DFP5;
2033 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
2034 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
2035 } else {
2036 DRM_DEBUG_KMS("DFP5 disconnected\n");
2037 bios_0_scratch &= ~ATOM_S0_DFP5;
2038 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
2039 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
2040 }
2041 }
2042 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
2043 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
2044 if (connected) {
2045 DRM_DEBUG_KMS("DFP6 connected\n");
2046 bios_0_scratch |= ATOM_S0_DFP6;
2047 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
2048 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
2049 } else {
2050 DRM_DEBUG_KMS("DFP6 disconnected\n");
2051 bios_0_scratch &= ~ATOM_S0_DFP6;
2052 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
2053 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
2054 }
2055 }
2056
2057 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
2058 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
2059 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
2060 }
2061
2062 union lvds_info {
2063 struct _ATOM_LVDS_INFO info;
2064 struct _ATOM_LVDS_INFO_V12 info_12;
2065 };
2066
2067 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder * encoder)2068 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
2069 {
2070 struct drm_device *dev = encoder->base.dev;
2071 struct amdgpu_device *adev = dev->dev_private;
2072 struct amdgpu_mode_info *mode_info = &adev->mode_info;
2073 int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2074 uint16_t data_offset, misc;
2075 union lvds_info *lvds_info;
2076 uint8_t frev, crev;
2077 struct amdgpu_encoder_atom_dig *lvds = NULL;
2078 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2079
2080 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2081 &frev, &crev, &data_offset)) {
2082 lvds_info =
2083 (union lvds_info *)(mode_info->atom_context->bios + data_offset);
2084 lvds =
2085 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2086
2087 if (!lvds)
2088 return NULL;
2089
2090 lvds->native_mode.clock =
2091 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2092 lvds->native_mode.hdisplay =
2093 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2094 lvds->native_mode.vdisplay =
2095 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2096 lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2097 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2098 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2099 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2100 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2101 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2102 lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2103 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2104 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2105 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2106 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2107 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2108 lvds->panel_pwr_delay =
2109 le16_to_cpu(lvds_info->info.usOffDelayInMs);
2110 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2111
2112 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2113 if (misc & ATOM_VSYNC_POLARITY)
2114 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2115 if (misc & ATOM_HSYNC_POLARITY)
2116 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2117 if (misc & ATOM_COMPOSITESYNC)
2118 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2119 if (misc & ATOM_INTERLACE)
2120 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2121 if (misc & ATOM_DOUBLE_CLOCK_MODE)
2122 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2123
2124 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2125 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2126
2127 /* set crtc values */
2128 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2129
2130 lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2131
2132 encoder->native_mode = lvds->native_mode;
2133
2134 if (encoder_enum == 2)
2135 lvds->linkb = true;
2136 else
2137 lvds->linkb = false;
2138
2139 /* parse the lcd record table */
2140 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2141 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2142 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2143 bool bad_record = false;
2144 u8 *record;
2145
2146 if ((frev == 1) && (crev < 2))
2147 /* absolute */
2148 record = (u8 *)(mode_info->atom_context->bios +
2149 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2150 else
2151 /* relative */
2152 record = (u8 *)(mode_info->atom_context->bios +
2153 data_offset +
2154 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2155 while (*record != ATOM_RECORD_END_TYPE) {
2156 switch (*record) {
2157 case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2158 record += sizeof(ATOM_PATCH_RECORD_MODE);
2159 break;
2160 case LCD_RTS_RECORD_TYPE:
2161 record += sizeof(ATOM_LCD_RTS_RECORD);
2162 break;
2163 case LCD_CAP_RECORD_TYPE:
2164 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2165 break;
2166 case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2167 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2168 if (fake_edid_record->ucFakeEDIDLength) {
2169 struct edid *edid;
2170 int edid_size =
2171 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2172 edid = kmalloc(edid_size, M_DRM, GFP_KERNEL);
2173 if (edid) {
2174 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2175 fake_edid_record->ucFakeEDIDLength);
2176
2177 if (drm_edid_is_valid(edid)) {
2178 adev->mode_info.bios_hardcoded_edid = edid;
2179 adev->mode_info.bios_hardcoded_edid_size = edid_size;
2180 } else
2181 kfree(edid);
2182 }
2183 }
2184 record += fake_edid_record->ucFakeEDIDLength ?
2185 fake_edid_record->ucFakeEDIDLength + 2 :
2186 sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2187 break;
2188 case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2189 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2190 lvds->native_mode.width_mm = panel_res_record->usHSize;
2191 lvds->native_mode.height_mm = panel_res_record->usVSize;
2192 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2193 break;
2194 default:
2195 DRM_ERROR("Bad LCD record %d\n", *record);
2196 bad_record = true;
2197 break;
2198 }
2199 if (bad_record)
2200 break;
2201 }
2202 }
2203 }
2204 return lvds;
2205 }
2206
2207 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder * amdgpu_encoder)2208 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2209 {
2210 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2211 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2212
2213 if (!dig)
2214 return NULL;
2215
2216 /* coherent mode by default */
2217 dig->coherent_mode = true;
2218 dig->dig_encoder = -1;
2219
2220 if (encoder_enum == 2)
2221 dig->linkb = true;
2222 else
2223 dig->linkb = false;
2224
2225 return dig;
2226 }
2227
2228