1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
4 * Copyright (c) 2014-2021 The Linux Foundation. All rights reserved.
5 * Copyright (C) 2013 Red Hat
6 * Author: Rob Clark <robdclark@gmail.com>
7 */
8
9 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
10 #include <linux/sort.h>
11 #include <linux/debugfs.h>
12 #include <linux/ktime.h>
13 #include <linux/bits.h>
14
15 #include <drm/drm_atomic.h>
16 #include <drm/drm_blend.h>
17 #include <drm/drm_crtc.h>
18 #include <drm/drm_flip_work.h>
19 #include <drm/drm_framebuffer.h>
20 #include <drm/drm_mode.h>
21 #include <drm/drm_probe_helper.h>
22 #include <drm/drm_rect.h>
23 #include <drm/drm_vblank.h>
24 #include <drm/drm_self_refresh_helper.h>
25
26 #include "dpu_kms.h"
27 #include "dpu_hw_lm.h"
28 #include "dpu_hw_ctl.h"
29 #include "dpu_hw_dspp.h"
30 #include "dpu_crtc.h"
31 #include "dpu_plane.h"
32 #include "dpu_encoder.h"
33 #include "dpu_vbif.h"
34 #include "dpu_core_perf.h"
35 #include "dpu_trace.h"
36
37 /* layer mixer index on dpu_crtc */
38 #define LEFT_MIXER 0
39 #define RIGHT_MIXER 1
40
41 /* timeout in ms waiting for frame done */
42 #define DPU_CRTC_FRAME_DONE_TIMEOUT_MS 60
43
44 #define CONVERT_S3_15(val) \
45 (((((u64)val) & ~BIT_ULL(63)) >> 17) & GENMASK_ULL(17, 0))
46
_dpu_crtc_get_kms(struct drm_crtc * crtc)47 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc)
48 {
49 struct msm_drm_private *priv = crtc->dev->dev_private;
50
51 return to_dpu_kms(priv->kms);
52 }
53
get_encoder_from_crtc(struct drm_crtc * crtc)54 static struct drm_encoder *get_encoder_from_crtc(struct drm_crtc *crtc)
55 {
56 struct drm_device *dev = crtc->dev;
57 struct drm_encoder *encoder;
58
59 drm_for_each_encoder(encoder, dev)
60 if (encoder->crtc == crtc)
61 return encoder;
62
63 return NULL;
64 }
65
dpu_crtc_parse_crc_source(const char * src_name)66 static enum dpu_crtc_crc_source dpu_crtc_parse_crc_source(const char *src_name)
67 {
68 if (!src_name ||
69 !strcmp(src_name, "none"))
70 return DPU_CRTC_CRC_SOURCE_NONE;
71 if (!strcmp(src_name, "auto") ||
72 !strcmp(src_name, "lm"))
73 return DPU_CRTC_CRC_SOURCE_LAYER_MIXER;
74 if (!strcmp(src_name, "encoder"))
75 return DPU_CRTC_CRC_SOURCE_ENCODER;
76
77 return DPU_CRTC_CRC_SOURCE_INVALID;
78 }
79
dpu_crtc_verify_crc_source(struct drm_crtc * crtc,const char * src_name,size_t * values_cnt)80 static int dpu_crtc_verify_crc_source(struct drm_crtc *crtc,
81 const char *src_name, size_t *values_cnt)
82 {
83 enum dpu_crtc_crc_source source = dpu_crtc_parse_crc_source(src_name);
84 struct dpu_crtc_state *crtc_state = to_dpu_crtc_state(crtc->state);
85
86 if (source < 0) {
87 DRM_DEBUG_DRIVER("Invalid source %s for CRTC%d\n", src_name, crtc->index);
88 return -EINVAL;
89 }
90
91 if (source == DPU_CRTC_CRC_SOURCE_LAYER_MIXER) {
92 *values_cnt = crtc_state->num_mixers;
93 } else if (source == DPU_CRTC_CRC_SOURCE_ENCODER) {
94 struct drm_encoder *drm_enc;
95
96 *values_cnt = 0;
97
98 drm_for_each_encoder_mask(drm_enc, crtc->dev, crtc->state->encoder_mask)
99 *values_cnt += dpu_encoder_get_crc_values_cnt(drm_enc);
100 }
101
102 return 0;
103 }
104
dpu_crtc_setup_lm_misr(struct dpu_crtc_state * crtc_state)105 static void dpu_crtc_setup_lm_misr(struct dpu_crtc_state *crtc_state)
106 {
107 struct dpu_crtc_mixer *m;
108 int i;
109
110 for (i = 0; i < crtc_state->num_mixers; ++i) {
111 m = &crtc_state->mixers[i];
112
113 if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
114 continue;
115
116 /* Calculate MISR over 1 frame */
117 m->hw_lm->ops.setup_misr(m->hw_lm);
118 }
119 }
120
dpu_crtc_setup_encoder_misr(struct drm_crtc * crtc)121 static void dpu_crtc_setup_encoder_misr(struct drm_crtc *crtc)
122 {
123 struct drm_encoder *drm_enc;
124
125 drm_for_each_encoder_mask(drm_enc, crtc->dev, crtc->state->encoder_mask)
126 dpu_encoder_setup_misr(drm_enc);
127 }
128
dpu_crtc_set_crc_source(struct drm_crtc * crtc,const char * src_name)129 static int dpu_crtc_set_crc_source(struct drm_crtc *crtc, const char *src_name)
130 {
131 enum dpu_crtc_crc_source source = dpu_crtc_parse_crc_source(src_name);
132 enum dpu_crtc_crc_source current_source;
133 struct dpu_crtc_state *crtc_state;
134 struct drm_device *drm_dev = crtc->dev;
135
136 bool was_enabled;
137 bool enable = false;
138 int ret = 0;
139
140 if (source < 0) {
141 DRM_DEBUG_DRIVER("Invalid CRC source %s for CRTC%d\n", src_name, crtc->index);
142 return -EINVAL;
143 }
144
145 ret = drm_modeset_lock(&crtc->mutex, NULL);
146
147 if (ret)
148 return ret;
149
150 enable = (source != DPU_CRTC_CRC_SOURCE_NONE);
151 crtc_state = to_dpu_crtc_state(crtc->state);
152
153 spin_lock_irq(&drm_dev->event_lock);
154 current_source = crtc_state->crc_source;
155 spin_unlock_irq(&drm_dev->event_lock);
156
157 was_enabled = (current_source != DPU_CRTC_CRC_SOURCE_NONE);
158
159 if (!was_enabled && enable) {
160 ret = drm_crtc_vblank_get(crtc);
161
162 if (ret)
163 goto cleanup;
164
165 } else if (was_enabled && !enable) {
166 drm_crtc_vblank_put(crtc);
167 }
168
169 spin_lock_irq(&drm_dev->event_lock);
170 crtc_state->crc_source = source;
171 spin_unlock_irq(&drm_dev->event_lock);
172
173 crtc_state->crc_frame_skip_count = 0;
174
175 if (source == DPU_CRTC_CRC_SOURCE_LAYER_MIXER)
176 dpu_crtc_setup_lm_misr(crtc_state);
177 else if (source == DPU_CRTC_CRC_SOURCE_ENCODER)
178 dpu_crtc_setup_encoder_misr(crtc);
179 else
180 ret = -EINVAL;
181
182 cleanup:
183 drm_modeset_unlock(&crtc->mutex);
184
185 return ret;
186 }
187
dpu_crtc_get_vblank_counter(struct drm_crtc * crtc)188 static u32 dpu_crtc_get_vblank_counter(struct drm_crtc *crtc)
189 {
190 struct drm_encoder *encoder = get_encoder_from_crtc(crtc);
191 if (!encoder) {
192 DRM_ERROR("no encoder found for crtc %d\n", crtc->index);
193 return 0;
194 }
195
196 return dpu_encoder_get_vsync_count(encoder);
197 }
198
dpu_crtc_get_lm_crc(struct drm_crtc * crtc,struct dpu_crtc_state * crtc_state)199 static int dpu_crtc_get_lm_crc(struct drm_crtc *crtc,
200 struct dpu_crtc_state *crtc_state)
201 {
202 struct dpu_crtc_mixer *m;
203 u32 crcs[CRTC_DUAL_MIXERS];
204
205 int rc = 0;
206 int i;
207
208 BUILD_BUG_ON(ARRAY_SIZE(crcs) != ARRAY_SIZE(crtc_state->mixers));
209
210 for (i = 0; i < crtc_state->num_mixers; ++i) {
211
212 m = &crtc_state->mixers[i];
213
214 if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
215 continue;
216
217 rc = m->hw_lm->ops.collect_misr(m->hw_lm, &crcs[i]);
218
219 if (rc) {
220 if (rc != -ENODATA)
221 DRM_DEBUG_DRIVER("MISR read failed\n");
222 return rc;
223 }
224 }
225
226 return drm_crtc_add_crc_entry(crtc, true,
227 drm_crtc_accurate_vblank_count(crtc), crcs);
228 }
229
dpu_crtc_get_encoder_crc(struct drm_crtc * crtc)230 static int dpu_crtc_get_encoder_crc(struct drm_crtc *crtc)
231 {
232 struct drm_encoder *drm_enc;
233 int rc, pos = 0;
234 u32 crcs[INTF_MAX];
235
236 drm_for_each_encoder_mask(drm_enc, crtc->dev, crtc->state->encoder_mask) {
237 rc = dpu_encoder_get_crc(drm_enc, crcs, pos);
238 if (rc < 0) {
239 if (rc != -ENODATA)
240 DRM_DEBUG_DRIVER("MISR read failed\n");
241
242 return rc;
243 }
244
245 pos += rc;
246 }
247
248 return drm_crtc_add_crc_entry(crtc, true,
249 drm_crtc_accurate_vblank_count(crtc), crcs);
250 }
251
dpu_crtc_get_crc(struct drm_crtc * crtc)252 static int dpu_crtc_get_crc(struct drm_crtc *crtc)
253 {
254 struct dpu_crtc_state *crtc_state = to_dpu_crtc_state(crtc->state);
255
256 /* Skip first 2 frames in case of "uncooked" CRCs */
257 if (crtc_state->crc_frame_skip_count < 2) {
258 crtc_state->crc_frame_skip_count++;
259 return 0;
260 }
261
262 if (crtc_state->crc_source == DPU_CRTC_CRC_SOURCE_LAYER_MIXER)
263 return dpu_crtc_get_lm_crc(crtc, crtc_state);
264 else if (crtc_state->crc_source == DPU_CRTC_CRC_SOURCE_ENCODER)
265 return dpu_crtc_get_encoder_crc(crtc);
266
267 return -EINVAL;
268 }
269
dpu_crtc_get_scanout_position(struct drm_crtc * crtc,bool in_vblank_irq,int * vpos,int * hpos,ktime_t * stime,ktime_t * etime,const struct drm_display_mode * mode)270 static bool dpu_crtc_get_scanout_position(struct drm_crtc *crtc,
271 bool in_vblank_irq,
272 int *vpos, int *hpos,
273 ktime_t *stime, ktime_t *etime,
274 const struct drm_display_mode *mode)
275 {
276 unsigned int pipe = crtc->index;
277 struct drm_encoder *encoder;
278 int line, vsw, vbp, vactive_start, vactive_end, vfp_end;
279
280 encoder = get_encoder_from_crtc(crtc);
281 if (!encoder) {
282 DRM_ERROR("no encoder found for crtc %d\n", pipe);
283 return false;
284 }
285
286 vsw = mode->crtc_vsync_end - mode->crtc_vsync_start;
287 vbp = mode->crtc_vtotal - mode->crtc_vsync_end;
288
289 /*
290 * the line counter is 1 at the start of the VSYNC pulse and VTOTAL at
291 * the end of VFP. Translate the porch values relative to the line
292 * counter positions.
293 */
294
295 vactive_start = vsw + vbp + 1;
296 vactive_end = vactive_start + mode->crtc_vdisplay;
297
298 /* last scan line before VSYNC */
299 vfp_end = mode->crtc_vtotal;
300
301 if (stime)
302 *stime = ktime_get();
303
304 line = dpu_encoder_get_linecount(encoder);
305
306 if (line < vactive_start)
307 line -= vactive_start;
308 else if (line > vactive_end)
309 line = line - vfp_end - vactive_start;
310 else
311 line -= vactive_start;
312
313 *vpos = line;
314 *hpos = 0;
315
316 if (etime)
317 *etime = ktime_get();
318
319 return true;
320 }
321
_dpu_crtc_setup_blend_cfg(struct dpu_crtc_mixer * mixer,struct dpu_plane_state * pstate,const struct msm_format * format)322 static void _dpu_crtc_setup_blend_cfg(struct dpu_crtc_mixer *mixer,
323 struct dpu_plane_state *pstate, const struct msm_format *format)
324 {
325 struct dpu_hw_mixer *lm = mixer->hw_lm;
326 uint32_t blend_op;
327 uint32_t fg_alpha, bg_alpha;
328
329 fg_alpha = pstate->base.alpha >> 8;
330 bg_alpha = 0xff - fg_alpha;
331
332 /* default to opaque blending */
333 if (pstate->base.pixel_blend_mode == DRM_MODE_BLEND_PIXEL_NONE ||
334 !format->alpha_enable) {
335 blend_op = DPU_BLEND_FG_ALPHA_FG_CONST |
336 DPU_BLEND_BG_ALPHA_BG_CONST;
337 } else if (pstate->base.pixel_blend_mode == DRM_MODE_BLEND_PREMULTI) {
338 blend_op = DPU_BLEND_FG_ALPHA_FG_CONST |
339 DPU_BLEND_BG_ALPHA_FG_PIXEL;
340 if (fg_alpha != 0xff) {
341 bg_alpha = fg_alpha;
342 blend_op |= DPU_BLEND_BG_MOD_ALPHA |
343 DPU_BLEND_BG_INV_MOD_ALPHA;
344 } else {
345 blend_op |= DPU_BLEND_BG_INV_ALPHA;
346 }
347 } else {
348 /* coverage blending */
349 blend_op = DPU_BLEND_FG_ALPHA_FG_PIXEL |
350 DPU_BLEND_BG_ALPHA_FG_PIXEL;
351 if (fg_alpha != 0xff) {
352 bg_alpha = fg_alpha;
353 blend_op |= DPU_BLEND_FG_MOD_ALPHA |
354 DPU_BLEND_FG_INV_MOD_ALPHA |
355 DPU_BLEND_BG_MOD_ALPHA |
356 DPU_BLEND_BG_INV_MOD_ALPHA;
357 } else {
358 blend_op |= DPU_BLEND_BG_INV_ALPHA;
359 }
360 }
361
362 lm->ops.setup_blend_config(lm, pstate->stage,
363 fg_alpha, bg_alpha, blend_op);
364
365 DRM_DEBUG_ATOMIC("format:%p4cc, alpha_en:%u blend_op:0x%x\n",
366 &format->pixel_format, format->alpha_enable, blend_op);
367 }
368
_dpu_crtc_program_lm_output_roi(struct drm_crtc * crtc)369 static void _dpu_crtc_program_lm_output_roi(struct drm_crtc *crtc)
370 {
371 struct dpu_crtc_state *crtc_state;
372 int lm_idx, lm_horiz_position;
373
374 crtc_state = to_dpu_crtc_state(crtc->state);
375
376 lm_horiz_position = 0;
377 for (lm_idx = 0; lm_idx < crtc_state->num_mixers; lm_idx++) {
378 const struct drm_rect *lm_roi = &crtc_state->lm_bounds[lm_idx];
379 struct dpu_hw_mixer *hw_lm = crtc_state->mixers[lm_idx].hw_lm;
380 struct dpu_hw_mixer_cfg cfg;
381
382 if (!lm_roi || !drm_rect_visible(lm_roi))
383 continue;
384
385 cfg.out_width = drm_rect_width(lm_roi);
386 cfg.out_height = drm_rect_height(lm_roi);
387 cfg.right_mixer = lm_horiz_position++;
388 cfg.flags = 0;
389 hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
390 }
391 }
392
_dpu_crtc_blend_setup_pipe(struct drm_crtc * crtc,struct drm_plane * plane,struct dpu_crtc_mixer * mixer,u32 num_mixers,enum dpu_stage stage,const struct msm_format * format,uint64_t modifier,struct dpu_sw_pipe * pipe,unsigned int stage_idx,struct dpu_hw_stage_cfg * stage_cfg)393 static void _dpu_crtc_blend_setup_pipe(struct drm_crtc *crtc,
394 struct drm_plane *plane,
395 struct dpu_crtc_mixer *mixer,
396 u32 num_mixers,
397 enum dpu_stage stage,
398 const struct msm_format *format,
399 uint64_t modifier,
400 struct dpu_sw_pipe *pipe,
401 unsigned int stage_idx,
402 struct dpu_hw_stage_cfg *stage_cfg
403 )
404 {
405 uint32_t lm_idx;
406 enum dpu_sspp sspp_idx;
407 struct drm_plane_state *state;
408
409 sspp_idx = pipe->sspp->idx;
410
411 state = plane->state;
412
413 trace_dpu_crtc_setup_mixer(DRMID(crtc), DRMID(plane),
414 state, to_dpu_plane_state(state), stage_idx,
415 format->pixel_format,
416 modifier);
417
418 DRM_DEBUG_ATOMIC("crtc %d stage:%d - plane %d sspp %d fb %d multirect_idx %d\n",
419 crtc->base.id,
420 stage,
421 plane->base.id,
422 sspp_idx - SSPP_NONE,
423 state->fb ? state->fb->base.id : -1,
424 pipe->multirect_index);
425
426 stage_cfg->stage[stage][stage_idx] = sspp_idx;
427 stage_cfg->multirect_index[stage][stage_idx] = pipe->multirect_index;
428
429 /* blend config update */
430 for (lm_idx = 0; lm_idx < num_mixers; lm_idx++)
431 mixer[lm_idx].lm_ctl->ops.update_pending_flush_sspp(mixer[lm_idx].lm_ctl, sspp_idx);
432 }
433
_dpu_crtc_blend_setup_mixer(struct drm_crtc * crtc,struct dpu_crtc * dpu_crtc,struct dpu_crtc_mixer * mixer,struct dpu_hw_stage_cfg * stage_cfg)434 static void _dpu_crtc_blend_setup_mixer(struct drm_crtc *crtc,
435 struct dpu_crtc *dpu_crtc, struct dpu_crtc_mixer *mixer,
436 struct dpu_hw_stage_cfg *stage_cfg)
437 {
438 struct drm_plane *plane;
439 struct drm_framebuffer *fb;
440 struct drm_plane_state *state;
441 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
442 struct dpu_plane_state *pstate = NULL;
443 const struct msm_format *format;
444 struct dpu_hw_ctl *ctl = mixer->lm_ctl;
445
446 uint32_t lm_idx;
447 bool bg_alpha_enable = false;
448 DECLARE_BITMAP(fetch_active, SSPP_MAX);
449
450 memset(fetch_active, 0, sizeof(fetch_active));
451 drm_atomic_crtc_for_each_plane(plane, crtc) {
452 state = plane->state;
453 if (!state)
454 continue;
455
456 if (!state->visible)
457 continue;
458
459 pstate = to_dpu_plane_state(state);
460 fb = state->fb;
461
462 format = msm_framebuffer_format(pstate->base.fb);
463
464 if (pstate->stage == DPU_STAGE_BASE && format->alpha_enable)
465 bg_alpha_enable = true;
466
467 set_bit(pstate->pipe.sspp->idx, fetch_active);
468 _dpu_crtc_blend_setup_pipe(crtc, plane,
469 mixer, cstate->num_mixers,
470 pstate->stage,
471 format, fb ? fb->modifier : 0,
472 &pstate->pipe, 0, stage_cfg);
473
474 if (pstate->r_pipe.sspp) {
475 set_bit(pstate->r_pipe.sspp->idx, fetch_active);
476 _dpu_crtc_blend_setup_pipe(crtc, plane,
477 mixer, cstate->num_mixers,
478 pstate->stage,
479 format, fb ? fb->modifier : 0,
480 &pstate->r_pipe, 1, stage_cfg);
481 }
482
483 /* blend config update */
484 for (lm_idx = 0; lm_idx < cstate->num_mixers; lm_idx++) {
485 _dpu_crtc_setup_blend_cfg(mixer + lm_idx, pstate, format);
486
487 if (bg_alpha_enable && !format->alpha_enable)
488 mixer[lm_idx].mixer_op_mode = 0;
489 else
490 mixer[lm_idx].mixer_op_mode |=
491 1 << pstate->stage;
492 }
493 }
494
495 if (ctl->ops.set_active_pipes)
496 ctl->ops.set_active_pipes(ctl, fetch_active);
497
498 _dpu_crtc_program_lm_output_roi(crtc);
499 }
500
501 /**
502 * _dpu_crtc_blend_setup - configure crtc mixers
503 * @crtc: Pointer to drm crtc structure
504 */
_dpu_crtc_blend_setup(struct drm_crtc * crtc)505 static void _dpu_crtc_blend_setup(struct drm_crtc *crtc)
506 {
507 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
508 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
509 struct dpu_crtc_mixer *mixer = cstate->mixers;
510 struct dpu_hw_ctl *ctl;
511 struct dpu_hw_mixer *lm;
512 struct dpu_hw_stage_cfg stage_cfg;
513 int i;
514
515 DRM_DEBUG_ATOMIC("%s\n", dpu_crtc->name);
516
517 for (i = 0; i < cstate->num_mixers; i++) {
518 mixer[i].mixer_op_mode = 0;
519 if (mixer[i].lm_ctl->ops.clear_all_blendstages)
520 mixer[i].lm_ctl->ops.clear_all_blendstages(
521 mixer[i].lm_ctl);
522 }
523
524 /* initialize stage cfg */
525 memset(&stage_cfg, 0, sizeof(struct dpu_hw_stage_cfg));
526
527 _dpu_crtc_blend_setup_mixer(crtc, dpu_crtc, mixer, &stage_cfg);
528
529 for (i = 0; i < cstate->num_mixers; i++) {
530 ctl = mixer[i].lm_ctl;
531 lm = mixer[i].hw_lm;
532
533 lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
534
535 /* stage config flush mask */
536 ctl->ops.update_pending_flush_mixer(ctl,
537 mixer[i].hw_lm->idx);
538
539 DRM_DEBUG_ATOMIC("lm %d, op_mode 0x%X, ctl %d\n",
540 mixer[i].hw_lm->idx - LM_0,
541 mixer[i].mixer_op_mode,
542 ctl->idx - CTL_0);
543
544 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
545 &stage_cfg);
546 }
547 }
548
549 /**
550 * _dpu_crtc_complete_flip - signal pending page_flip events
551 * Any pending vblank events are added to the vblank_event_list
552 * so that the next vblank interrupt shall signal them.
553 * However PAGE_FLIP events are not handled through the vblank_event_list.
554 * This API signals any pending PAGE_FLIP events requested through
555 * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the dpu_crtc->event.
556 * @crtc: Pointer to drm crtc structure
557 */
_dpu_crtc_complete_flip(struct drm_crtc * crtc)558 static void _dpu_crtc_complete_flip(struct drm_crtc *crtc)
559 {
560 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
561 struct drm_device *dev = crtc->dev;
562 unsigned long flags;
563
564 spin_lock_irqsave(&dev->event_lock, flags);
565 if (dpu_crtc->event) {
566 DRM_DEBUG_VBL("%s: send event: %pK\n", dpu_crtc->name,
567 dpu_crtc->event);
568 trace_dpu_crtc_complete_flip(DRMID(crtc));
569 drm_crtc_send_vblank_event(crtc, dpu_crtc->event);
570 dpu_crtc->event = NULL;
571 }
572 spin_unlock_irqrestore(&dev->event_lock, flags);
573 }
574
dpu_crtc_get_intf_mode(struct drm_crtc * crtc)575 enum dpu_intf_mode dpu_crtc_get_intf_mode(struct drm_crtc *crtc)
576 {
577 struct drm_encoder *encoder;
578
579 /*
580 * TODO: This function is called from dpu debugfs and as part of atomic
581 * check. When called from debugfs, the crtc->mutex must be held to
582 * read crtc->state. However reading crtc->state from atomic check isn't
583 * allowed (unless you have a good reason, a big comment, and a deep
584 * understanding of how the atomic/modeset locks work (<- and this is
585 * probably not possible)). So we'll keep the WARN_ON here for now, but
586 * really we need to figure out a better way to track our operating mode
587 */
588 WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
589
590 /* TODO: Returns the first INTF_MODE, could there be multiple values? */
591 drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
592 return dpu_encoder_get_intf_mode(encoder);
593
594 return INTF_MODE_NONE;
595 }
596
dpu_crtc_vblank_callback(struct drm_crtc * crtc)597 void dpu_crtc_vblank_callback(struct drm_crtc *crtc)
598 {
599 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
600
601 /* keep statistics on vblank callback - with auto reset via debugfs */
602 if (ktime_compare(dpu_crtc->vblank_cb_time, ktime_set(0, 0)) == 0)
603 dpu_crtc->vblank_cb_time = ktime_get();
604 else
605 dpu_crtc->vblank_cb_count++;
606
607 dpu_crtc_get_crc(crtc);
608
609 drm_crtc_handle_vblank(crtc);
610 trace_dpu_crtc_vblank_cb(DRMID(crtc));
611 }
612
dpu_crtc_frame_event_work(struct kthread_work * work)613 static void dpu_crtc_frame_event_work(struct kthread_work *work)
614 {
615 struct dpu_crtc_frame_event *fevent = container_of(work,
616 struct dpu_crtc_frame_event, work);
617 struct drm_crtc *crtc = fevent->crtc;
618 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
619 unsigned long flags;
620 bool frame_done = false;
621
622 DPU_ATRACE_BEGIN("crtc_frame_event");
623
624 DRM_DEBUG_ATOMIC("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
625 ktime_to_ns(fevent->ts));
626
627 if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
628 | DPU_ENCODER_FRAME_EVENT_ERROR
629 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
630
631 if (atomic_read(&dpu_crtc->frame_pending) < 1) {
632 /* ignore vblank when not pending */
633 } else if (atomic_dec_return(&dpu_crtc->frame_pending) == 0) {
634 /* release bandwidth and other resources */
635 trace_dpu_crtc_frame_event_done(DRMID(crtc),
636 fevent->event);
637 dpu_core_perf_crtc_release_bw(crtc);
638 } else {
639 trace_dpu_crtc_frame_event_more_pending(DRMID(crtc),
640 fevent->event);
641 }
642
643 if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
644 | DPU_ENCODER_FRAME_EVENT_ERROR))
645 frame_done = true;
646 }
647
648 if (fevent->event & DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)
649 DPU_ERROR("crtc%d ts:%lld received panel dead event\n",
650 crtc->base.id, ktime_to_ns(fevent->ts));
651
652 if (frame_done)
653 complete_all(&dpu_crtc->frame_done_comp);
654
655 spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
656 list_add_tail(&fevent->list, &dpu_crtc->frame_event_list);
657 spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
658 DPU_ATRACE_END("crtc_frame_event");
659 }
660
661 /*
662 * dpu_crtc_frame_event_cb - crtc frame event callback API. CRTC module
663 * registers this API to encoder for all frame event callbacks like
664 * frame_error, frame_done, idle_timeout, etc. Encoder may call different events
665 * from different context - IRQ, user thread, commit_thread, etc. Each event
666 * should be carefully reviewed and should be processed in proper task context
667 * to avoid schedulin delay or properly manage the irq context's bottom half
668 * processing.
669 */
dpu_crtc_frame_event_cb(void * data,u32 event)670 static void dpu_crtc_frame_event_cb(void *data, u32 event)
671 {
672 struct drm_crtc *crtc = (struct drm_crtc *)data;
673 struct dpu_crtc *dpu_crtc;
674 struct msm_drm_private *priv;
675 struct dpu_crtc_frame_event *fevent;
676 unsigned long flags;
677 u32 crtc_id;
678
679 /* Nothing to do on idle event */
680 if (event & DPU_ENCODER_FRAME_EVENT_IDLE)
681 return;
682
683 dpu_crtc = to_dpu_crtc(crtc);
684 priv = crtc->dev->dev_private;
685 crtc_id = drm_crtc_index(crtc);
686
687 trace_dpu_crtc_frame_event_cb(DRMID(crtc), event);
688
689 spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
690 fevent = list_first_entry_or_null(&dpu_crtc->frame_event_list,
691 struct dpu_crtc_frame_event, list);
692 if (fevent)
693 list_del_init(&fevent->list);
694 spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
695
696 if (!fevent) {
697 DRM_ERROR_RATELIMITED("crtc%d event %d overflow\n", crtc->base.id, event);
698 return;
699 }
700
701 fevent->event = event;
702 fevent->crtc = crtc;
703 fevent->ts = ktime_get();
704 kthread_queue_work(priv->event_thread[crtc_id].worker, &fevent->work);
705 }
706
dpu_crtc_complete_commit(struct drm_crtc * crtc)707 void dpu_crtc_complete_commit(struct drm_crtc *crtc)
708 {
709 trace_dpu_crtc_complete_commit(DRMID(crtc));
710 dpu_core_perf_crtc_update(crtc, 0);
711 _dpu_crtc_complete_flip(crtc);
712 }
713
_dpu_crtc_setup_lm_bounds(struct drm_crtc * crtc,struct drm_crtc_state * state)714 static void _dpu_crtc_setup_lm_bounds(struct drm_crtc *crtc,
715 struct drm_crtc_state *state)
716 {
717 struct dpu_crtc_state *cstate = to_dpu_crtc_state(state);
718 struct drm_display_mode *adj_mode = &state->adjusted_mode;
719 u32 crtc_split_width = adj_mode->hdisplay / cstate->num_mixers;
720 int i;
721
722 for (i = 0; i < cstate->num_mixers; i++) {
723 struct drm_rect *r = &cstate->lm_bounds[i];
724 r->x1 = crtc_split_width * i;
725 r->y1 = 0;
726 r->x2 = r->x1 + crtc_split_width;
727 r->y2 = adj_mode->vdisplay;
728
729 trace_dpu_crtc_setup_lm_bounds(DRMID(crtc), i, r);
730 }
731 }
732
_dpu_crtc_get_pcc_coeff(struct drm_crtc_state * state,struct dpu_hw_pcc_cfg * cfg)733 static void _dpu_crtc_get_pcc_coeff(struct drm_crtc_state *state,
734 struct dpu_hw_pcc_cfg *cfg)
735 {
736 struct drm_color_ctm *ctm;
737
738 memset(cfg, 0, sizeof(struct dpu_hw_pcc_cfg));
739
740 ctm = (struct drm_color_ctm *)state->ctm->data;
741
742 if (!ctm)
743 return;
744
745 cfg->r.r = CONVERT_S3_15(ctm->matrix[0]);
746 cfg->g.r = CONVERT_S3_15(ctm->matrix[1]);
747 cfg->b.r = CONVERT_S3_15(ctm->matrix[2]);
748
749 cfg->r.g = CONVERT_S3_15(ctm->matrix[3]);
750 cfg->g.g = CONVERT_S3_15(ctm->matrix[4]);
751 cfg->b.g = CONVERT_S3_15(ctm->matrix[5]);
752
753 cfg->r.b = CONVERT_S3_15(ctm->matrix[6]);
754 cfg->g.b = CONVERT_S3_15(ctm->matrix[7]);
755 cfg->b.b = CONVERT_S3_15(ctm->matrix[8]);
756 }
757
_dpu_crtc_setup_cp_blocks(struct drm_crtc * crtc)758 static void _dpu_crtc_setup_cp_blocks(struct drm_crtc *crtc)
759 {
760 struct drm_crtc_state *state = crtc->state;
761 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
762 struct dpu_crtc_mixer *mixer = cstate->mixers;
763 struct dpu_hw_pcc_cfg cfg;
764 struct dpu_hw_ctl *ctl;
765 struct dpu_hw_dspp *dspp;
766 int i;
767
768
769 if (!state->color_mgmt_changed && !drm_atomic_crtc_needs_modeset(state))
770 return;
771
772 for (i = 0; i < cstate->num_mixers; i++) {
773 ctl = mixer[i].lm_ctl;
774 dspp = mixer[i].hw_dspp;
775
776 if (!dspp || !dspp->ops.setup_pcc)
777 continue;
778
779 if (!state->ctm) {
780 dspp->ops.setup_pcc(dspp, NULL);
781 } else {
782 _dpu_crtc_get_pcc_coeff(state, &cfg);
783 dspp->ops.setup_pcc(dspp, &cfg);
784 }
785
786 /* stage config flush mask */
787 ctl->ops.update_pending_flush_dspp(ctl,
788 mixer[i].hw_dspp->idx, DPU_DSPP_PCC);
789 }
790 }
791
dpu_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_atomic_state * state)792 static void dpu_crtc_atomic_begin(struct drm_crtc *crtc,
793 struct drm_atomic_state *state)
794 {
795 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
796 struct drm_encoder *encoder;
797
798 if (!crtc->state->enable) {
799 DRM_DEBUG_ATOMIC("crtc%d -> enable %d, skip atomic_begin\n",
800 crtc->base.id, crtc->state->enable);
801 return;
802 }
803
804 DRM_DEBUG_ATOMIC("crtc%d\n", crtc->base.id);
805
806 _dpu_crtc_setup_lm_bounds(crtc, crtc->state);
807
808 /* encoder will trigger pending mask now */
809 drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
810 dpu_encoder_trigger_kickoff_pending(encoder);
811
812 /*
813 * If no mixers have been allocated in dpu_crtc_atomic_check(),
814 * it means we are trying to flush a CRTC whose state is disabled:
815 * nothing else needs to be done.
816 */
817 if (unlikely(!cstate->num_mixers))
818 return;
819
820 _dpu_crtc_blend_setup(crtc);
821
822 _dpu_crtc_setup_cp_blocks(crtc);
823
824 /*
825 * PP_DONE irq is only used by command mode for now.
826 * It is better to request pending before FLUSH and START trigger
827 * to make sure no pp_done irq missed.
828 * This is safe because no pp_done will happen before SW trigger
829 * in command mode.
830 */
831 }
832
dpu_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_atomic_state * state)833 static void dpu_crtc_atomic_flush(struct drm_crtc *crtc,
834 struct drm_atomic_state *state)
835 {
836 struct dpu_crtc *dpu_crtc;
837 struct drm_device *dev;
838 struct drm_plane *plane;
839 struct msm_drm_private *priv;
840 unsigned long flags;
841 struct dpu_crtc_state *cstate;
842
843 if (!crtc->state->enable) {
844 DRM_DEBUG_ATOMIC("crtc%d -> enable %d, skip atomic_flush\n",
845 crtc->base.id, crtc->state->enable);
846 return;
847 }
848
849 DRM_DEBUG_ATOMIC("crtc%d\n", crtc->base.id);
850
851 dpu_crtc = to_dpu_crtc(crtc);
852 cstate = to_dpu_crtc_state(crtc->state);
853 dev = crtc->dev;
854 priv = dev->dev_private;
855
856 if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
857 DPU_ERROR("invalid crtc index[%d]\n", crtc->index);
858 return;
859 }
860
861 WARN_ON(dpu_crtc->event);
862 spin_lock_irqsave(&dev->event_lock, flags);
863 dpu_crtc->event = crtc->state->event;
864 crtc->state->event = NULL;
865 spin_unlock_irqrestore(&dev->event_lock, flags);
866
867 /*
868 * If no mixers has been allocated in dpu_crtc_atomic_check(),
869 * it means we are trying to flush a CRTC whose state is disabled:
870 * nothing else needs to be done.
871 */
872 if (unlikely(!cstate->num_mixers))
873 return;
874
875 /* update performance setting before crtc kickoff */
876 dpu_core_perf_crtc_update(crtc, 1);
877
878 /*
879 * Final plane updates: Give each plane a chance to complete all
880 * required writes/flushing before crtc's "flush
881 * everything" call below.
882 */
883 drm_atomic_crtc_for_each_plane(plane, crtc) {
884 if (dpu_crtc->smmu_state.transition_error)
885 dpu_plane_set_error(plane, true);
886 dpu_plane_flush(plane);
887 }
888
889 /* Kickoff will be scheduled by outer layer */
890 }
891
892 /**
893 * dpu_crtc_destroy_state - state destroy hook
894 * @crtc: drm CRTC
895 * @state: CRTC state object to release
896 */
dpu_crtc_destroy_state(struct drm_crtc * crtc,struct drm_crtc_state * state)897 static void dpu_crtc_destroy_state(struct drm_crtc *crtc,
898 struct drm_crtc_state *state)
899 {
900 struct dpu_crtc_state *cstate = to_dpu_crtc_state(state);
901
902 DRM_DEBUG_ATOMIC("crtc%d\n", crtc->base.id);
903
904 __drm_atomic_helper_crtc_destroy_state(state);
905
906 kfree(cstate);
907 }
908
_dpu_crtc_wait_for_frame_done(struct drm_crtc * crtc)909 static int _dpu_crtc_wait_for_frame_done(struct drm_crtc *crtc)
910 {
911 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
912 int ret, rc = 0;
913
914 if (!atomic_read(&dpu_crtc->frame_pending)) {
915 DRM_DEBUG_ATOMIC("no frames pending\n");
916 return 0;
917 }
918
919 DPU_ATRACE_BEGIN("frame done completion wait");
920 ret = wait_for_completion_timeout(&dpu_crtc->frame_done_comp,
921 msecs_to_jiffies(DPU_CRTC_FRAME_DONE_TIMEOUT_MS));
922 if (!ret) {
923 DRM_ERROR("frame done wait timed out, ret:%d\n", ret);
924 rc = -ETIMEDOUT;
925 }
926 DPU_ATRACE_END("frame done completion wait");
927
928 return rc;
929 }
930
dpu_crtc_commit_kickoff(struct drm_crtc * crtc)931 void dpu_crtc_commit_kickoff(struct drm_crtc *crtc)
932 {
933 struct drm_encoder *encoder;
934 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
935 struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc);
936 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
937
938 /*
939 * If no mixers has been allocated in dpu_crtc_atomic_check(),
940 * it means we are trying to start a CRTC whose state is disabled:
941 * nothing else needs to be done.
942 */
943 if (unlikely(!cstate->num_mixers))
944 return;
945
946 DPU_ATRACE_BEGIN("crtc_commit");
947
948 drm_for_each_encoder_mask(encoder, crtc->dev,
949 crtc->state->encoder_mask) {
950 if (!dpu_encoder_is_valid_for_commit(encoder)) {
951 DRM_DEBUG_ATOMIC("invalid FB not kicking off crtc\n");
952 goto end;
953 }
954 }
955 /*
956 * Encoder will flush/start now, unless it has a tx pending. If so, it
957 * may delay and flush at an irq event (e.g. ppdone)
958 */
959 drm_for_each_encoder_mask(encoder, crtc->dev,
960 crtc->state->encoder_mask)
961 dpu_encoder_prepare_for_kickoff(encoder);
962
963 if (atomic_inc_return(&dpu_crtc->frame_pending) == 1) {
964 /* acquire bandwidth and other resources */
965 DRM_DEBUG_ATOMIC("crtc%d first commit\n", crtc->base.id);
966 } else
967 DRM_DEBUG_ATOMIC("crtc%d commit\n", crtc->base.id);
968
969 dpu_crtc->play_count++;
970
971 dpu_vbif_clear_errors(dpu_kms);
972
973 drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
974 dpu_encoder_kickoff(encoder);
975
976 reinit_completion(&dpu_crtc->frame_done_comp);
977
978 end:
979 DPU_ATRACE_END("crtc_commit");
980 }
981
dpu_crtc_reset(struct drm_crtc * crtc)982 static void dpu_crtc_reset(struct drm_crtc *crtc)
983 {
984 struct dpu_crtc_state *cstate = kzalloc(sizeof(*cstate), GFP_KERNEL);
985
986 if (crtc->state)
987 dpu_crtc_destroy_state(crtc, crtc->state);
988
989 if (cstate)
990 __drm_atomic_helper_crtc_reset(crtc, &cstate->base);
991 else
992 __drm_atomic_helper_crtc_reset(crtc, NULL);
993 }
994
995 /**
996 * dpu_crtc_duplicate_state - state duplicate hook
997 * @crtc: Pointer to drm crtc structure
998 */
dpu_crtc_duplicate_state(struct drm_crtc * crtc)999 static struct drm_crtc_state *dpu_crtc_duplicate_state(struct drm_crtc *crtc)
1000 {
1001 struct dpu_crtc_state *cstate, *old_cstate = to_dpu_crtc_state(crtc->state);
1002
1003 cstate = kmemdup(old_cstate, sizeof(*old_cstate), GFP_KERNEL);
1004 if (!cstate) {
1005 DPU_ERROR("failed to allocate state\n");
1006 return NULL;
1007 }
1008
1009 /* duplicate base helper */
1010 __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
1011
1012 return &cstate->base;
1013 }
1014
dpu_crtc_atomic_print_state(struct drm_printer * p,const struct drm_crtc_state * state)1015 static void dpu_crtc_atomic_print_state(struct drm_printer *p,
1016 const struct drm_crtc_state *state)
1017 {
1018 const struct dpu_crtc_state *cstate = to_dpu_crtc_state(state);
1019 int i;
1020
1021 for (i = 0; i < cstate->num_mixers; i++) {
1022 drm_printf(p, "\tlm[%d]=%d\n", i, cstate->mixers[i].hw_lm->idx - LM_0);
1023 drm_printf(p, "\tctl[%d]=%d\n", i, cstate->mixers[i].lm_ctl->idx - CTL_0);
1024 if (cstate->mixers[i].hw_dspp)
1025 drm_printf(p, "\tdspp[%d]=%d\n", i, cstate->mixers[i].hw_dspp->idx - DSPP_0);
1026 }
1027 }
1028
dpu_crtc_disable(struct drm_crtc * crtc,struct drm_atomic_state * state)1029 static void dpu_crtc_disable(struct drm_crtc *crtc,
1030 struct drm_atomic_state *state)
1031 {
1032 struct drm_crtc_state *old_crtc_state = drm_atomic_get_old_crtc_state(state,
1033 crtc);
1034 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1035 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc->state);
1036 struct drm_encoder *encoder;
1037 unsigned long flags;
1038 bool release_bandwidth = false;
1039
1040 DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1041
1042 /* If disable is triggered while in self refresh mode,
1043 * reset the encoder software state so that in enable
1044 * it won't trigger a warn while assigning crtc.
1045 */
1046 if (old_crtc_state->self_refresh_active) {
1047 drm_for_each_encoder_mask(encoder, crtc->dev,
1048 old_crtc_state->encoder_mask) {
1049 dpu_encoder_assign_crtc(encoder, NULL);
1050 }
1051 return;
1052 }
1053
1054 /* Disable/save vblank irq handling */
1055 drm_crtc_vblank_off(crtc);
1056
1057 drm_for_each_encoder_mask(encoder, crtc->dev,
1058 old_crtc_state->encoder_mask) {
1059 /* in video mode, we hold an extra bandwidth reference
1060 * as we cannot drop bandwidth at frame-done if any
1061 * crtc is being used in video mode.
1062 */
1063 if (dpu_encoder_get_intf_mode(encoder) == INTF_MODE_VIDEO)
1064 release_bandwidth = true;
1065
1066 /*
1067 * If disable is triggered during psr active(e.g: screen dim in PSR),
1068 * we will need encoder->crtc connection to process the device sleep &
1069 * preserve it during psr sequence.
1070 */
1071 if (!crtc->state->self_refresh_active)
1072 dpu_encoder_assign_crtc(encoder, NULL);
1073 }
1074
1075 /* wait for frame_event_done completion */
1076 if (_dpu_crtc_wait_for_frame_done(crtc))
1077 DPU_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
1078 crtc->base.id,
1079 atomic_read(&dpu_crtc->frame_pending));
1080
1081 trace_dpu_crtc_disable(DRMID(crtc), false, dpu_crtc);
1082 dpu_crtc->enabled = false;
1083
1084 if (atomic_read(&dpu_crtc->frame_pending)) {
1085 trace_dpu_crtc_disable_frame_pending(DRMID(crtc),
1086 atomic_read(&dpu_crtc->frame_pending));
1087 if (release_bandwidth)
1088 dpu_core_perf_crtc_release_bw(crtc);
1089 atomic_set(&dpu_crtc->frame_pending, 0);
1090 }
1091
1092 dpu_core_perf_crtc_update(crtc, 0);
1093
1094 drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
1095 dpu_encoder_register_frame_event_callback(encoder, NULL, NULL);
1096
1097 memset(cstate->mixers, 0, sizeof(cstate->mixers));
1098 cstate->num_mixers = 0;
1099
1100 /* disable clk & bw control until clk & bw properties are set */
1101 cstate->bw_control = false;
1102 cstate->bw_split_vote = false;
1103
1104 if (crtc->state->event && !crtc->state->active) {
1105 spin_lock_irqsave(&crtc->dev->event_lock, flags);
1106 drm_crtc_send_vblank_event(crtc, crtc->state->event);
1107 crtc->state->event = NULL;
1108 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
1109 }
1110
1111 pm_runtime_put_sync(crtc->dev->dev);
1112 }
1113
dpu_crtc_enable(struct drm_crtc * crtc,struct drm_atomic_state * state)1114 static void dpu_crtc_enable(struct drm_crtc *crtc,
1115 struct drm_atomic_state *state)
1116 {
1117 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1118 struct drm_encoder *encoder;
1119 bool request_bandwidth = false;
1120 struct drm_crtc_state *old_crtc_state;
1121
1122 old_crtc_state = drm_atomic_get_old_crtc_state(state, crtc);
1123
1124 pm_runtime_get_sync(crtc->dev->dev);
1125
1126 DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1127
1128 drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask) {
1129 /* in video mode, we hold an extra bandwidth reference
1130 * as we cannot drop bandwidth at frame-done if any
1131 * crtc is being used in video mode.
1132 */
1133 if (dpu_encoder_get_intf_mode(encoder) == INTF_MODE_VIDEO)
1134 request_bandwidth = true;
1135 dpu_encoder_register_frame_event_callback(encoder,
1136 dpu_crtc_frame_event_cb, (void *)crtc);
1137 }
1138
1139 if (request_bandwidth)
1140 atomic_inc(&_dpu_crtc_get_kms(crtc)->bandwidth_ref);
1141
1142 trace_dpu_crtc_enable(DRMID(crtc), true, dpu_crtc);
1143 dpu_crtc->enabled = true;
1144
1145 if (!old_crtc_state->self_refresh_active) {
1146 drm_for_each_encoder_mask(encoder, crtc->dev, crtc->state->encoder_mask)
1147 dpu_encoder_assign_crtc(encoder, crtc);
1148 }
1149
1150 /* Enable/restore vblank irq handling */
1151 drm_crtc_vblank_on(crtc);
1152 }
1153
dpu_crtc_needs_dirtyfb(struct drm_crtc_state * cstate)1154 static bool dpu_crtc_needs_dirtyfb(struct drm_crtc_state *cstate)
1155 {
1156 struct drm_crtc *crtc = cstate->crtc;
1157 struct drm_encoder *encoder;
1158
1159 if (cstate->self_refresh_active)
1160 return true;
1161
1162 drm_for_each_encoder_mask (encoder, crtc->dev, cstate->encoder_mask) {
1163 if (dpu_encoder_get_intf_mode(encoder) == INTF_MODE_CMD) {
1164 return true;
1165 }
1166 }
1167
1168 return false;
1169 }
1170
dpu_crtc_atomic_check(struct drm_crtc * crtc,struct drm_atomic_state * state)1171 static int dpu_crtc_atomic_check(struct drm_crtc *crtc,
1172 struct drm_atomic_state *state)
1173 {
1174 struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
1175 crtc);
1176 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1177 struct dpu_crtc_state *cstate = to_dpu_crtc_state(crtc_state);
1178
1179 const struct drm_plane_state *pstate;
1180 struct drm_plane *plane;
1181
1182 int rc = 0;
1183
1184 bool needs_dirtyfb = dpu_crtc_needs_dirtyfb(crtc_state);
1185
1186 if (!crtc_state->enable || !drm_atomic_crtc_effectively_active(crtc_state)) {
1187 DRM_DEBUG_ATOMIC("crtc%d -> enable %d, active %d, skip atomic_check\n",
1188 crtc->base.id, crtc_state->enable,
1189 crtc_state->active);
1190 memset(&cstate->new_perf, 0, sizeof(cstate->new_perf));
1191 return 0;
1192 }
1193
1194 DRM_DEBUG_ATOMIC("%s: check\n", dpu_crtc->name);
1195
1196 /* force a full mode set if active state changed */
1197 if (crtc_state->active_changed)
1198 crtc_state->mode_changed = true;
1199
1200 if (cstate->num_mixers)
1201 _dpu_crtc_setup_lm_bounds(crtc, crtc_state);
1202
1203 /* FIXME: move this to dpu_plane_atomic_check? */
1204 drm_atomic_crtc_state_for_each_plane_state(plane, pstate, crtc_state) {
1205 struct dpu_plane_state *dpu_pstate = to_dpu_plane_state(pstate);
1206
1207 if (IS_ERR_OR_NULL(pstate)) {
1208 rc = PTR_ERR(pstate);
1209 DPU_ERROR("%s: failed to get plane%d state, %d\n",
1210 dpu_crtc->name, plane->base.id, rc);
1211 return rc;
1212 }
1213
1214 if (!pstate->visible)
1215 continue;
1216
1217 dpu_pstate->needs_dirtyfb = needs_dirtyfb;
1218 }
1219
1220 atomic_inc(&_dpu_crtc_get_kms(crtc)->bandwidth_ref);
1221
1222 rc = dpu_core_perf_crtc_check(crtc, crtc_state);
1223 if (rc) {
1224 DPU_ERROR("crtc%d failed performance check %d\n",
1225 crtc->base.id, rc);
1226 return rc;
1227 }
1228
1229 return 0;
1230 }
1231
dpu_crtc_vblank(struct drm_crtc * crtc,bool en)1232 int dpu_crtc_vblank(struct drm_crtc *crtc, bool en)
1233 {
1234 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1235 struct drm_encoder *enc;
1236
1237 trace_dpu_crtc_vblank(DRMID(&dpu_crtc->base), en, dpu_crtc);
1238
1239 /*
1240 * Normally we would iterate through encoder_mask in crtc state to find
1241 * attached encoders. In this case, we might be disabling vblank _after_
1242 * encoder_mask has been cleared.
1243 *
1244 * Instead, we "assign" a crtc to the encoder in enable and clear it in
1245 * disable (which is also after encoder_mask is cleared). So instead of
1246 * using encoder mask, we'll ask the encoder to toggle itself iff it's
1247 * currently assigned to our crtc.
1248 *
1249 * Note also that this function cannot be called while crtc is disabled
1250 * since we use drm_crtc_vblank_on/off. So we don't need to worry
1251 * about the assigned crtcs being inconsistent with the current state
1252 * (which means no need to worry about modeset locks).
1253 */
1254 list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
1255 trace_dpu_crtc_vblank_enable(DRMID(crtc), DRMID(enc), en,
1256 dpu_crtc);
1257
1258 dpu_encoder_toggle_vblank_for_crtc(enc, crtc, en);
1259 }
1260
1261 return 0;
1262 }
1263
1264 #ifdef CONFIG_DEBUG_FS
_dpu_debugfs_status_show(struct seq_file * s,void * data)1265 static int _dpu_debugfs_status_show(struct seq_file *s, void *data)
1266 {
1267 struct dpu_crtc *dpu_crtc;
1268 struct dpu_plane_state *pstate = NULL;
1269 struct dpu_crtc_mixer *m;
1270
1271 struct drm_crtc *crtc;
1272 struct drm_plane *plane;
1273 struct drm_display_mode *mode;
1274 struct drm_framebuffer *fb;
1275 struct drm_plane_state *state;
1276 struct dpu_crtc_state *cstate;
1277
1278 int i, out_width;
1279
1280 dpu_crtc = s->private;
1281 crtc = &dpu_crtc->base;
1282
1283 drm_modeset_lock_all(crtc->dev);
1284 cstate = to_dpu_crtc_state(crtc->state);
1285
1286 mode = &crtc->state->adjusted_mode;
1287 out_width = mode->hdisplay / cstate->num_mixers;
1288
1289 seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
1290 mode->hdisplay, mode->vdisplay);
1291
1292 seq_puts(s, "\n");
1293
1294 for (i = 0; i < cstate->num_mixers; ++i) {
1295 m = &cstate->mixers[i];
1296 seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
1297 m->hw_lm->idx - LM_0, m->lm_ctl->idx - CTL_0,
1298 out_width, mode->vdisplay);
1299 }
1300
1301 seq_puts(s, "\n");
1302
1303 drm_atomic_crtc_for_each_plane(plane, crtc) {
1304 pstate = to_dpu_plane_state(plane->state);
1305 state = plane->state;
1306
1307 if (!pstate || !state)
1308 continue;
1309
1310 seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
1311 pstate->stage);
1312
1313 if (plane->state->fb) {
1314 fb = plane->state->fb;
1315
1316 seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u ",
1317 fb->base.id, (char *) &fb->format->format,
1318 fb->width, fb->height);
1319 for (i = 0; i < ARRAY_SIZE(fb->format->cpp); ++i)
1320 seq_printf(s, "cpp[%d]:%u ",
1321 i, fb->format->cpp[i]);
1322 seq_puts(s, "\n\t");
1323
1324 seq_printf(s, "modifier:%8llu ", fb->modifier);
1325 seq_puts(s, "\n");
1326
1327 seq_puts(s, "\t");
1328 for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
1329 seq_printf(s, "pitches[%d]:%8u ", i,
1330 fb->pitches[i]);
1331 seq_puts(s, "\n");
1332
1333 seq_puts(s, "\t");
1334 for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
1335 seq_printf(s, "offsets[%d]:%8u ", i,
1336 fb->offsets[i]);
1337 seq_puts(s, "\n");
1338 }
1339
1340 seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
1341 state->src_x, state->src_y, state->src_w, state->src_h);
1342
1343 seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
1344 state->crtc_x, state->crtc_y, state->crtc_w,
1345 state->crtc_h);
1346 seq_printf(s, "\tsspp[0]:%s\n",
1347 pstate->pipe.sspp->cap->name);
1348 seq_printf(s, "\tmultirect[0]: mode: %d index: %d\n",
1349 pstate->pipe.multirect_mode, pstate->pipe.multirect_index);
1350 if (pstate->r_pipe.sspp) {
1351 seq_printf(s, "\tsspp[1]:%s\n",
1352 pstate->r_pipe.sspp->cap->name);
1353 seq_printf(s, "\tmultirect[1]: mode: %d index: %d\n",
1354 pstate->r_pipe.multirect_mode, pstate->r_pipe.multirect_index);
1355 }
1356
1357 seq_puts(s, "\n");
1358 }
1359 if (dpu_crtc->vblank_cb_count) {
1360 ktime_t diff = ktime_sub(ktime_get(), dpu_crtc->vblank_cb_time);
1361 s64 diff_ms = ktime_to_ms(diff);
1362 s64 fps = diff_ms ? div_s64(
1363 dpu_crtc->vblank_cb_count * 1000, diff_ms) : 0;
1364
1365 seq_printf(s,
1366 "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
1367 fps, dpu_crtc->vblank_cb_count,
1368 ktime_to_ms(diff), dpu_crtc->play_count);
1369
1370 /* reset time & count for next measurement */
1371 dpu_crtc->vblank_cb_count = 0;
1372 dpu_crtc->vblank_cb_time = ktime_set(0, 0);
1373 }
1374
1375 drm_modeset_unlock_all(crtc->dev);
1376
1377 return 0;
1378 }
1379
1380 DEFINE_SHOW_ATTRIBUTE(_dpu_debugfs_status);
1381
dpu_crtc_debugfs_state_show(struct seq_file * s,void * v)1382 static int dpu_crtc_debugfs_state_show(struct seq_file *s, void *v)
1383 {
1384 struct drm_crtc *crtc = s->private;
1385 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1386
1387 seq_printf(s, "client type: %d\n", dpu_crtc_get_client_type(crtc));
1388 seq_printf(s, "intf_mode: %d\n", dpu_crtc_get_intf_mode(crtc));
1389 seq_printf(s, "core_clk_rate: %llu\n",
1390 dpu_crtc->cur_perf.core_clk_rate);
1391 seq_printf(s, "bw_ctl: %llu\n", dpu_crtc->cur_perf.bw_ctl);
1392 seq_printf(s, "max_per_pipe_ib: %llu\n",
1393 dpu_crtc->cur_perf.max_per_pipe_ib);
1394
1395 return 0;
1396 }
1397 DEFINE_SHOW_ATTRIBUTE(dpu_crtc_debugfs_state);
1398
_dpu_crtc_init_debugfs(struct drm_crtc * crtc)1399 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
1400 {
1401 struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1402
1403 debugfs_create_file("status", 0400,
1404 crtc->debugfs_entry,
1405 dpu_crtc, &_dpu_debugfs_status_fops);
1406 debugfs_create_file("state", 0600,
1407 crtc->debugfs_entry,
1408 &dpu_crtc->base,
1409 &dpu_crtc_debugfs_state_fops);
1410
1411 return 0;
1412 }
1413 #else
_dpu_crtc_init_debugfs(struct drm_crtc * crtc)1414 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
1415 {
1416 return 0;
1417 }
1418 #endif /* CONFIG_DEBUG_FS */
1419
dpu_crtc_late_register(struct drm_crtc * crtc)1420 static int dpu_crtc_late_register(struct drm_crtc *crtc)
1421 {
1422 return _dpu_crtc_init_debugfs(crtc);
1423 }
1424
1425 static const struct drm_crtc_funcs dpu_crtc_funcs = {
1426 .set_config = drm_atomic_helper_set_config,
1427 .page_flip = drm_atomic_helper_page_flip,
1428 .reset = dpu_crtc_reset,
1429 .atomic_duplicate_state = dpu_crtc_duplicate_state,
1430 .atomic_destroy_state = dpu_crtc_destroy_state,
1431 .atomic_print_state = dpu_crtc_atomic_print_state,
1432 .late_register = dpu_crtc_late_register,
1433 .verify_crc_source = dpu_crtc_verify_crc_source,
1434 .set_crc_source = dpu_crtc_set_crc_source,
1435 .enable_vblank = msm_crtc_enable_vblank,
1436 .disable_vblank = msm_crtc_disable_vblank,
1437 .get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
1438 .get_vblank_counter = dpu_crtc_get_vblank_counter,
1439 };
1440
1441 static const struct drm_crtc_helper_funcs dpu_crtc_helper_funcs = {
1442 .atomic_disable = dpu_crtc_disable,
1443 .atomic_enable = dpu_crtc_enable,
1444 .atomic_check = dpu_crtc_atomic_check,
1445 .atomic_begin = dpu_crtc_atomic_begin,
1446 .atomic_flush = dpu_crtc_atomic_flush,
1447 .get_scanout_position = dpu_crtc_get_scanout_position,
1448 };
1449
1450 /* initialize crtc */
dpu_crtc_init(struct drm_device * dev,struct drm_plane * plane,struct drm_plane * cursor)1451 struct drm_crtc *dpu_crtc_init(struct drm_device *dev, struct drm_plane *plane,
1452 struct drm_plane *cursor)
1453 {
1454 struct msm_drm_private *priv = dev->dev_private;
1455 struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
1456 struct drm_crtc *crtc = NULL;
1457 struct dpu_crtc *dpu_crtc;
1458 int i, ret;
1459
1460 dpu_crtc = drmm_crtc_alloc_with_planes(dev, struct dpu_crtc, base,
1461 plane, cursor,
1462 &dpu_crtc_funcs,
1463 NULL);
1464
1465 if (IS_ERR(dpu_crtc))
1466 return ERR_CAST(dpu_crtc);
1467
1468 crtc = &dpu_crtc->base;
1469 crtc->dev = dev;
1470
1471 spin_lock_init(&dpu_crtc->spin_lock);
1472 atomic_set(&dpu_crtc->frame_pending, 0);
1473
1474 init_completion(&dpu_crtc->frame_done_comp);
1475
1476 INIT_LIST_HEAD(&dpu_crtc->frame_event_list);
1477
1478 for (i = 0; i < ARRAY_SIZE(dpu_crtc->frame_events); i++) {
1479 INIT_LIST_HEAD(&dpu_crtc->frame_events[i].list);
1480 list_add(&dpu_crtc->frame_events[i].list,
1481 &dpu_crtc->frame_event_list);
1482 kthread_init_work(&dpu_crtc->frame_events[i].work,
1483 dpu_crtc_frame_event_work);
1484 }
1485
1486 drm_crtc_helper_add(crtc, &dpu_crtc_helper_funcs);
1487
1488 if (dpu_kms->catalog->dspp_count)
1489 drm_crtc_enable_color_mgmt(crtc, 0, true, 0);
1490
1491 /* save user friendly CRTC name for later */
1492 snprintf(dpu_crtc->name, DPU_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
1493
1494 /* initialize event handling */
1495 spin_lock_init(&dpu_crtc->event_lock);
1496
1497 ret = drm_self_refresh_helper_init(crtc);
1498 if (ret) {
1499 DPU_ERROR("Failed to initialize %s with self-refresh helpers %d\n",
1500 crtc->name, ret);
1501 return ERR_PTR(ret);
1502 }
1503
1504 DRM_DEBUG_KMS("%s: successfully initialized crtc\n", dpu_crtc->name);
1505 return crtc;
1506 }
1507