xref: /linux/drivers/gpu/drm/sprd/sprd_dpu.c (revision 0be3ff0c)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2020 Unisoc Inc.
4  */
5 
6 #include <linux/component.h>
7 #include <linux/delay.h>
8 #include <linux/dma-buf.h>
9 #include <linux/io.h>
10 #include <linux/module.h>
11 #include <linux/of.h>
12 #include <linux/of_address.h>
13 #include <linux/of_device.h>
14 #include <linux/of_graph.h>
15 #include <linux/of_irq.h>
16 #include <linux/wait.h>
17 #include <linux/workqueue.h>
18 
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_crtc_helper.h>
21 #include <drm/drm_fb_cma_helper.h>
22 #include <drm/drm_gem_cma_helper.h>
23 #include <drm/drm_gem_framebuffer_helper.h>
24 #include <drm/drm_plane_helper.h>
25 
26 #include "sprd_drm.h"
27 #include "sprd_dpu.h"
28 #include "sprd_dsi.h"
29 
30 /* Global control registers */
31 #define REG_DPU_CTRL	0x04
32 #define REG_DPU_CFG0	0x08
33 #define REG_PANEL_SIZE	0x20
34 #define REG_BLEND_SIZE	0x24
35 #define REG_BG_COLOR	0x2C
36 
37 /* Layer0 control registers */
38 #define REG_LAY_BASE_ADDR0	0x30
39 #define REG_LAY_BASE_ADDR1	0x34
40 #define REG_LAY_BASE_ADDR2	0x38
41 #define REG_LAY_CTRL		0x40
42 #define REG_LAY_SIZE		0x44
43 #define REG_LAY_PITCH		0x48
44 #define REG_LAY_POS		0x4C
45 #define REG_LAY_ALPHA		0x50
46 #define REG_LAY_CROP_START	0x5C
47 
48 /* Interrupt control registers */
49 #define REG_DPU_INT_EN		0x1E0
50 #define REG_DPU_INT_CLR		0x1E4
51 #define REG_DPU_INT_STS		0x1E8
52 
53 /* DPI control registers */
54 #define REG_DPI_CTRL		0x1F0
55 #define REG_DPI_H_TIMING	0x1F4
56 #define REG_DPI_V_TIMING	0x1F8
57 
58 /* MMU control registers */
59 #define REG_MMU_EN			0x800
60 #define REG_MMU_VPN_RANGE		0x80C
61 #define REG_MMU_PPN1			0x83C
62 #define REG_MMU_RANGE1			0x840
63 #define REG_MMU_PPN2			0x844
64 #define REG_MMU_RANGE2			0x848
65 
66 /* Global control bits */
67 #define BIT_DPU_RUN			BIT(0)
68 #define BIT_DPU_STOP			BIT(1)
69 #define BIT_DPU_REG_UPDATE		BIT(2)
70 #define BIT_DPU_IF_EDPI			BIT(0)
71 
72 /* Layer control bits */
73 #define BIT_DPU_LAY_EN				BIT(0)
74 #define BIT_DPU_LAY_LAYER_ALPHA			(0x01 << 2)
75 #define BIT_DPU_LAY_COMBO_ALPHA			(0x02 << 2)
76 #define BIT_DPU_LAY_FORMAT_YUV422_2PLANE		(0x00 << 4)
77 #define BIT_DPU_LAY_FORMAT_YUV420_2PLANE		(0x01 << 4)
78 #define BIT_DPU_LAY_FORMAT_YUV420_3PLANE		(0x02 << 4)
79 #define BIT_DPU_LAY_FORMAT_ARGB8888			(0x03 << 4)
80 #define BIT_DPU_LAY_FORMAT_RGB565			(0x04 << 4)
81 #define BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3		(0x00 << 8)
82 #define BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0		(0x01 << 8)
83 #define BIT_DPU_LAY_NO_SWITCH			(0x00 << 10)
84 #define BIT_DPU_LAY_RB_OR_UV_SWITCH		(0x01 << 10)
85 #define BIT_DPU_LAY_MODE_BLEND_NORMAL		(0x00 << 16)
86 #define BIT_DPU_LAY_MODE_BLEND_PREMULT		(0x01 << 16)
87 #define BIT_DPU_LAY_ROTATION_0		(0x00 << 20)
88 #define BIT_DPU_LAY_ROTATION_90		(0x01 << 20)
89 #define BIT_DPU_LAY_ROTATION_180	(0x02 << 20)
90 #define BIT_DPU_LAY_ROTATION_270	(0x03 << 20)
91 #define BIT_DPU_LAY_ROTATION_0_M	(0x04 << 20)
92 #define BIT_DPU_LAY_ROTATION_90_M	(0x05 << 20)
93 #define BIT_DPU_LAY_ROTATION_180_M	(0x06 << 20)
94 #define BIT_DPU_LAY_ROTATION_270_M	(0x07 << 20)
95 
96 /* Interrupt control & status bits */
97 #define BIT_DPU_INT_DONE		BIT(0)
98 #define BIT_DPU_INT_TE			BIT(1)
99 #define BIT_DPU_INT_ERR			BIT(2)
100 #define BIT_DPU_INT_UPDATE_DONE		BIT(4)
101 #define BIT_DPU_INT_VSYNC		BIT(5)
102 
103 /* DPI control bits */
104 #define BIT_DPU_EDPI_TE_EN		BIT(8)
105 #define BIT_DPU_EDPI_FROM_EXTERNAL_PAD	BIT(10)
106 #define BIT_DPU_DPI_HALT_EN		BIT(16)
107 
108 static const u32 layer_fmts[] = {
109 	DRM_FORMAT_XRGB8888,
110 	DRM_FORMAT_XBGR8888,
111 	DRM_FORMAT_ARGB8888,
112 	DRM_FORMAT_ABGR8888,
113 	DRM_FORMAT_RGBA8888,
114 	DRM_FORMAT_BGRA8888,
115 	DRM_FORMAT_RGBX8888,
116 	DRM_FORMAT_RGB565,
117 	DRM_FORMAT_BGR565,
118 	DRM_FORMAT_NV12,
119 	DRM_FORMAT_NV21,
120 	DRM_FORMAT_NV16,
121 	DRM_FORMAT_NV61,
122 	DRM_FORMAT_YUV420,
123 	DRM_FORMAT_YVU420,
124 };
125 
126 struct sprd_plane {
127 	struct drm_plane base;
128 };
129 
130 static int dpu_wait_stop_done(struct sprd_dpu *dpu)
131 {
132 	struct dpu_context *ctx = &dpu->ctx;
133 	int rc;
134 
135 	if (ctx->stopped)
136 		return 0;
137 
138 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_stop,
139 					      msecs_to_jiffies(500));
140 	ctx->evt_stop = false;
141 
142 	ctx->stopped = true;
143 
144 	if (!rc) {
145 		drm_err(dpu->drm, "dpu wait for stop done time out!\n");
146 		return -ETIMEDOUT;
147 	}
148 
149 	return 0;
150 }
151 
152 static int dpu_wait_update_done(struct sprd_dpu *dpu)
153 {
154 	struct dpu_context *ctx = &dpu->ctx;
155 	int rc;
156 
157 	ctx->evt_update = false;
158 
159 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_update,
160 					      msecs_to_jiffies(500));
161 
162 	if (!rc) {
163 		drm_err(dpu->drm, "dpu wait for reg update done time out!\n");
164 		return -ETIMEDOUT;
165 	}
166 
167 	return 0;
168 }
169 
170 static u32 drm_format_to_dpu(struct drm_framebuffer *fb)
171 {
172 	u32 format = 0;
173 
174 	switch (fb->format->format) {
175 	case DRM_FORMAT_BGRA8888:
176 		/* BGRA8888 -> ARGB8888 */
177 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
178 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
179 		break;
180 	case DRM_FORMAT_RGBX8888:
181 	case DRM_FORMAT_RGBA8888:
182 		/* RGBA8888 -> ABGR8888 */
183 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
184 		fallthrough;
185 	case DRM_FORMAT_ABGR8888:
186 		/* RB switch */
187 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
188 		fallthrough;
189 	case DRM_FORMAT_ARGB8888:
190 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
191 		break;
192 	case DRM_FORMAT_XBGR8888:
193 		/* RB switch */
194 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
195 		fallthrough;
196 	case DRM_FORMAT_XRGB8888:
197 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
198 		break;
199 	case DRM_FORMAT_BGR565:
200 		/* RB switch */
201 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
202 		fallthrough;
203 	case DRM_FORMAT_RGB565:
204 		format |= BIT_DPU_LAY_FORMAT_RGB565;
205 		break;
206 	case DRM_FORMAT_NV12:
207 		/* 2-Lane: Yuv420 */
208 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
209 		/* Y endian */
210 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
211 		/* UV endian */
212 		format |= BIT_DPU_LAY_NO_SWITCH;
213 		break;
214 	case DRM_FORMAT_NV21:
215 		/* 2-Lane: Yuv420 */
216 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
217 		/* Y endian */
218 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
219 		/* UV endian */
220 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
221 		break;
222 	case DRM_FORMAT_NV16:
223 		/* 2-Lane: Yuv422 */
224 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
225 		/* Y endian */
226 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
227 		/* UV endian */
228 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
229 		break;
230 	case DRM_FORMAT_NV61:
231 		/* 2-Lane: Yuv422 */
232 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
233 		/* Y endian */
234 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
235 		/* UV endian */
236 		format |= BIT_DPU_LAY_NO_SWITCH;
237 		break;
238 	case DRM_FORMAT_YUV420:
239 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
240 		/* Y endian */
241 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
242 		/* UV endian */
243 		format |= BIT_DPU_LAY_NO_SWITCH;
244 		break;
245 	case DRM_FORMAT_YVU420:
246 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
247 		/* Y endian */
248 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
249 		/* UV endian */
250 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
251 		break;
252 	default:
253 		break;
254 	}
255 
256 	return format;
257 }
258 
259 static u32 drm_rotation_to_dpu(struct drm_plane_state *state)
260 {
261 	u32 rotation = 0;
262 
263 	switch (state->rotation) {
264 	default:
265 	case DRM_MODE_ROTATE_0:
266 		rotation = BIT_DPU_LAY_ROTATION_0;
267 		break;
268 	case DRM_MODE_ROTATE_90:
269 		rotation = BIT_DPU_LAY_ROTATION_90;
270 		break;
271 	case DRM_MODE_ROTATE_180:
272 		rotation = BIT_DPU_LAY_ROTATION_180;
273 		break;
274 	case DRM_MODE_ROTATE_270:
275 		rotation = BIT_DPU_LAY_ROTATION_270;
276 		break;
277 	case DRM_MODE_REFLECT_Y:
278 		rotation = BIT_DPU_LAY_ROTATION_180_M;
279 		break;
280 	case (DRM_MODE_REFLECT_Y | DRM_MODE_ROTATE_90):
281 		rotation = BIT_DPU_LAY_ROTATION_90_M;
282 		break;
283 	case DRM_MODE_REFLECT_X:
284 		rotation = BIT_DPU_LAY_ROTATION_0_M;
285 		break;
286 	case (DRM_MODE_REFLECT_X | DRM_MODE_ROTATE_90):
287 		rotation = BIT_DPU_LAY_ROTATION_270_M;
288 		break;
289 	}
290 
291 	return rotation;
292 }
293 
294 static u32 drm_blend_to_dpu(struct drm_plane_state *state)
295 {
296 	u32 blend = 0;
297 
298 	switch (state->pixel_blend_mode) {
299 	case DRM_MODE_BLEND_COVERAGE:
300 		/* alpha mode select - combo alpha */
301 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
302 		/* Normal mode */
303 		blend |= BIT_DPU_LAY_MODE_BLEND_NORMAL;
304 		break;
305 	case DRM_MODE_BLEND_PREMULTI:
306 		/* alpha mode select - combo alpha */
307 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
308 		/* Pre-mult mode */
309 		blend |= BIT_DPU_LAY_MODE_BLEND_PREMULT;
310 		break;
311 	case DRM_MODE_BLEND_PIXEL_NONE:
312 	default:
313 		/* don't do blending, maybe RGBX */
314 		/* alpha mode select - layer alpha */
315 		blend |= BIT_DPU_LAY_LAYER_ALPHA;
316 		break;
317 	}
318 
319 	return blend;
320 }
321 
322 static void sprd_dpu_layer(struct sprd_dpu *dpu, struct drm_plane_state *state)
323 {
324 	struct dpu_context *ctx = &dpu->ctx;
325 	struct drm_gem_cma_object *cma_obj;
326 	struct drm_framebuffer *fb = state->fb;
327 	u32 addr, size, offset, pitch, blend, format, rotation;
328 	u32 src_x = state->src_x >> 16;
329 	u32 src_y = state->src_y >> 16;
330 	u32 src_w = state->src_w >> 16;
331 	u32 src_h = state->src_h >> 16;
332 	u32 dst_x = state->crtc_x;
333 	u32 dst_y = state->crtc_y;
334 	u32 alpha = state->alpha;
335 	u32 index = state->zpos;
336 	int i;
337 
338 	offset = (dst_x & 0xffff) | (dst_y << 16);
339 	size = (src_w & 0xffff) | (src_h << 16);
340 
341 	for (i = 0; i < fb->format->num_planes; i++) {
342 		cma_obj = drm_fb_cma_get_gem_obj(fb, i);
343 		addr = cma_obj->paddr + fb->offsets[i];
344 
345 		if (i == 0)
346 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR0, addr, index);
347 		else if (i == 1)
348 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR1, addr, index);
349 		else
350 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR2, addr, index);
351 	}
352 
353 	if (fb->format->num_planes == 3) {
354 		/* UV pitch is 1/2 of Y pitch */
355 		pitch = (fb->pitches[0] / fb->format->cpp[0]) |
356 				(fb->pitches[0] / fb->format->cpp[0] << 15);
357 	} else {
358 		pitch = fb->pitches[0] / fb->format->cpp[0];
359 	}
360 
361 	layer_reg_wr(ctx, REG_LAY_POS, offset, index);
362 	layer_reg_wr(ctx, REG_LAY_SIZE, size, index);
363 	layer_reg_wr(ctx, REG_LAY_CROP_START,
364 		     src_y << 16 | src_x, index);
365 	layer_reg_wr(ctx, REG_LAY_ALPHA, alpha, index);
366 	layer_reg_wr(ctx, REG_LAY_PITCH, pitch, index);
367 
368 	format = drm_format_to_dpu(fb);
369 	blend = drm_blend_to_dpu(state);
370 	rotation = drm_rotation_to_dpu(state);
371 
372 	layer_reg_wr(ctx, REG_LAY_CTRL, BIT_DPU_LAY_EN |
373 				format |
374 				blend |
375 				rotation,
376 				index);
377 }
378 
379 static void sprd_dpu_flip(struct sprd_dpu *dpu)
380 {
381 	struct dpu_context *ctx = &dpu->ctx;
382 
383 	/*
384 	 * Make sure the dpu is in stop status. DPU has no shadow
385 	 * registers in EDPI mode. So the config registers can only be
386 	 * updated in the rising edge of DPU_RUN bit.
387 	 */
388 	if (ctx->if_type == SPRD_DPU_IF_EDPI)
389 		dpu_wait_stop_done(dpu);
390 
391 	/* update trigger and wait */
392 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
393 		if (!ctx->stopped) {
394 			dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_REG_UPDATE);
395 			dpu_wait_update_done(dpu);
396 		}
397 
398 		dpu_reg_set(ctx, REG_DPU_INT_EN, BIT_DPU_INT_ERR);
399 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
400 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
401 
402 		ctx->stopped = false;
403 	}
404 }
405 
406 static void sprd_dpu_init(struct sprd_dpu *dpu)
407 {
408 	struct dpu_context *ctx = &dpu->ctx;
409 	u32 int_mask = 0;
410 
411 	writel(0x00, ctx->base + REG_BG_COLOR);
412 	writel(0x00, ctx->base + REG_MMU_EN);
413 	writel(0x00, ctx->base + REG_MMU_PPN1);
414 	writel(0xffff, ctx->base + REG_MMU_RANGE1);
415 	writel(0x00, ctx->base + REG_MMU_PPN2);
416 	writel(0xffff, ctx->base + REG_MMU_RANGE2);
417 	writel(0x1ffff, ctx->base + REG_MMU_VPN_RANGE);
418 
419 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
420 		/* use dpi as interface */
421 		dpu_reg_clr(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
422 		/* disable Halt function for SPRD DSI */
423 		dpu_reg_clr(ctx, REG_DPI_CTRL, BIT_DPU_DPI_HALT_EN);
424 		/* select te from external pad */
425 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
426 
427 		/* enable dpu update done INT */
428 		int_mask |= BIT_DPU_INT_UPDATE_DONE;
429 		/* enable dpu done INT */
430 		int_mask |= BIT_DPU_INT_DONE;
431 		/* enable dpu dpi vsync */
432 		int_mask |= BIT_DPU_INT_VSYNC;
433 		/* enable dpu TE INT */
434 		int_mask |= BIT_DPU_INT_TE;
435 		/* enable underflow err INT */
436 		int_mask |= BIT_DPU_INT_ERR;
437 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
438 		/* use edpi as interface */
439 		dpu_reg_set(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
440 		/* use external te */
441 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
442 		/* enable te */
443 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_TE_EN);
444 
445 		/* enable stop done INT */
446 		int_mask |= BIT_DPU_INT_DONE;
447 		/* enable TE INT */
448 		int_mask |= BIT_DPU_INT_TE;
449 	}
450 
451 	writel(int_mask, ctx->base + REG_DPU_INT_EN);
452 }
453 
454 static void sprd_dpu_fini(struct sprd_dpu *dpu)
455 {
456 	struct dpu_context *ctx = &dpu->ctx;
457 
458 	writel(0x00, ctx->base + REG_DPU_INT_EN);
459 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
460 }
461 
462 static void sprd_dpi_init(struct sprd_dpu *dpu)
463 {
464 	struct dpu_context *ctx = &dpu->ctx;
465 	u32 reg_val;
466 	u32 size;
467 
468 	size = (ctx->vm.vactive << 16) | ctx->vm.hactive;
469 	writel(size, ctx->base + REG_PANEL_SIZE);
470 	writel(size, ctx->base + REG_BLEND_SIZE);
471 
472 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
473 		/* set dpi timing */
474 		reg_val = ctx->vm.hsync_len << 0 |
475 			  ctx->vm.hback_porch << 8 |
476 			  ctx->vm.hfront_porch << 20;
477 		writel(reg_val, ctx->base + REG_DPI_H_TIMING);
478 
479 		reg_val = ctx->vm.vsync_len << 0 |
480 			  ctx->vm.vback_porch << 8 |
481 			  ctx->vm.vfront_porch << 20;
482 		writel(reg_val, ctx->base + REG_DPI_V_TIMING);
483 	}
484 }
485 
486 void sprd_dpu_run(struct sprd_dpu *dpu)
487 {
488 	struct dpu_context *ctx = &dpu->ctx;
489 
490 	dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
491 
492 	ctx->stopped = false;
493 }
494 
495 void sprd_dpu_stop(struct sprd_dpu *dpu)
496 {
497 	struct dpu_context *ctx = &dpu->ctx;
498 
499 	if (ctx->if_type == SPRD_DPU_IF_DPI)
500 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_STOP);
501 
502 	dpu_wait_stop_done(dpu);
503 }
504 
505 static int sprd_plane_atomic_check(struct drm_plane *plane,
506 				   struct drm_atomic_state *state)
507 {
508 	struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state,
509 									     plane);
510 	struct drm_crtc_state *crtc_state;
511 	u32 fmt;
512 
513 	if (!plane_state->fb || !plane_state->crtc)
514 		return 0;
515 
516 	fmt = drm_format_to_dpu(plane_state->fb);
517 	if (!fmt)
518 		return -EINVAL;
519 
520 	crtc_state = drm_atomic_get_crtc_state(plane_state->state, plane_state->crtc);
521 	if (IS_ERR(crtc_state))
522 		return PTR_ERR(crtc_state);
523 
524 	return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
525 						  DRM_PLANE_HELPER_NO_SCALING,
526 						  DRM_PLANE_HELPER_NO_SCALING,
527 						  true, true);
528 }
529 
530 static void sprd_plane_atomic_update(struct drm_plane *drm_plane,
531 				     struct drm_atomic_state *state)
532 {
533 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state,
534 									   drm_plane);
535 	struct sprd_dpu *dpu = to_sprd_crtc(new_state->crtc);
536 
537 	/* start configure dpu layers */
538 	sprd_dpu_layer(dpu, new_state);
539 }
540 
541 static void sprd_plane_atomic_disable(struct drm_plane *drm_plane,
542 				      struct drm_atomic_state *state)
543 {
544 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
545 									   drm_plane);
546 	struct sprd_dpu *dpu = to_sprd_crtc(old_state->crtc);
547 
548 	layer_reg_wr(&dpu->ctx, REG_LAY_CTRL, 0x00, old_state->zpos);
549 }
550 
551 static void sprd_plane_create_properties(struct sprd_plane *plane, int index)
552 {
553 	unsigned int supported_modes = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
554 				       BIT(DRM_MODE_BLEND_PREMULTI) |
555 				       BIT(DRM_MODE_BLEND_COVERAGE);
556 
557 	/* create rotation property */
558 	drm_plane_create_rotation_property(&plane->base,
559 					   DRM_MODE_ROTATE_0,
560 					   DRM_MODE_ROTATE_MASK |
561 					   DRM_MODE_REFLECT_MASK);
562 
563 	/* create alpha property */
564 	drm_plane_create_alpha_property(&plane->base);
565 
566 	/* create blend mode property */
567 	drm_plane_create_blend_mode_property(&plane->base, supported_modes);
568 
569 	/* create zpos property */
570 	drm_plane_create_zpos_immutable_property(&plane->base, index);
571 }
572 
573 static const struct drm_plane_helper_funcs sprd_plane_helper_funcs = {
574 	.atomic_check = sprd_plane_atomic_check,
575 	.atomic_update = sprd_plane_atomic_update,
576 	.atomic_disable = sprd_plane_atomic_disable,
577 };
578 
579 static const struct drm_plane_funcs sprd_plane_funcs = {
580 	.update_plane = drm_atomic_helper_update_plane,
581 	.disable_plane	= drm_atomic_helper_disable_plane,
582 	.destroy = drm_plane_cleanup,
583 	.reset = drm_atomic_helper_plane_reset,
584 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
585 	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
586 };
587 
588 static struct sprd_plane *sprd_planes_init(struct drm_device *drm)
589 {
590 	struct sprd_plane *plane, *primary;
591 	enum drm_plane_type plane_type;
592 	int i;
593 
594 	for (i = 0; i < 6; i++) {
595 		plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
596 					DRM_PLANE_TYPE_OVERLAY;
597 
598 		plane = drmm_universal_plane_alloc(drm, struct sprd_plane, base,
599 						   1, &sprd_plane_funcs,
600 						   layer_fmts, ARRAY_SIZE(layer_fmts),
601 						   NULL, plane_type, NULL);
602 		if (IS_ERR(plane)) {
603 			drm_err(drm, "failed to init drm plane: %d\n", i);
604 			return plane;
605 		}
606 
607 		drm_plane_helper_add(&plane->base, &sprd_plane_helper_funcs);
608 
609 		sprd_plane_create_properties(plane, i);
610 
611 		if (i == 0)
612 			primary = plane;
613 	}
614 
615 	return primary;
616 }
617 
618 static void sprd_crtc_mode_set_nofb(struct drm_crtc *crtc)
619 {
620 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
621 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
622 	struct drm_encoder *encoder;
623 	struct sprd_dsi *dsi;
624 
625 	drm_display_mode_to_videomode(mode, &dpu->ctx.vm);
626 
627 	drm_for_each_encoder_mask(encoder, crtc->dev,
628 				  crtc->state->encoder_mask) {
629 		dsi = encoder_to_dsi(encoder);
630 
631 		if (dsi->slave->mode_flags & MIPI_DSI_MODE_VIDEO)
632 			dpu->ctx.if_type = SPRD_DPU_IF_DPI;
633 		else
634 			dpu->ctx.if_type = SPRD_DPU_IF_EDPI;
635 	}
636 
637 	sprd_dpi_init(dpu);
638 }
639 
640 static void sprd_crtc_atomic_enable(struct drm_crtc *crtc,
641 				    struct drm_atomic_state *state)
642 {
643 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
644 
645 	sprd_dpu_init(dpu);
646 
647 	drm_crtc_vblank_on(&dpu->base);
648 }
649 
650 static void sprd_crtc_atomic_disable(struct drm_crtc *crtc,
651 				     struct drm_atomic_state *state)
652 {
653 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
654 	struct drm_device *drm = dpu->base.dev;
655 
656 	drm_crtc_vblank_off(&dpu->base);
657 
658 	sprd_dpu_fini(dpu);
659 
660 	spin_lock_irq(&drm->event_lock);
661 	if (crtc->state->event) {
662 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
663 		crtc->state->event = NULL;
664 	}
665 	spin_unlock_irq(&drm->event_lock);
666 }
667 
668 static void sprd_crtc_atomic_flush(struct drm_crtc *crtc,
669 				   struct drm_atomic_state *state)
670 
671 {
672 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
673 	struct drm_device *drm = dpu->base.dev;
674 
675 	sprd_dpu_flip(dpu);
676 
677 	spin_lock_irq(&drm->event_lock);
678 	if (crtc->state->event) {
679 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
680 		crtc->state->event = NULL;
681 	}
682 	spin_unlock_irq(&drm->event_lock);
683 }
684 
685 static int sprd_crtc_enable_vblank(struct drm_crtc *crtc)
686 {
687 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
688 
689 	dpu_reg_set(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
690 
691 	return 0;
692 }
693 
694 static void sprd_crtc_disable_vblank(struct drm_crtc *crtc)
695 {
696 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
697 
698 	dpu_reg_clr(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
699 }
700 
701 static const struct drm_crtc_helper_funcs sprd_crtc_helper_funcs = {
702 	.mode_set_nofb	= sprd_crtc_mode_set_nofb,
703 	.atomic_flush	= sprd_crtc_atomic_flush,
704 	.atomic_enable	= sprd_crtc_atomic_enable,
705 	.atomic_disable	= sprd_crtc_atomic_disable,
706 };
707 
708 static const struct drm_crtc_funcs sprd_crtc_funcs = {
709 	.destroy	= drm_crtc_cleanup,
710 	.set_config	= drm_atomic_helper_set_config,
711 	.page_flip	= drm_atomic_helper_page_flip,
712 	.reset		= drm_atomic_helper_crtc_reset,
713 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
714 	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
715 	.enable_vblank	= sprd_crtc_enable_vblank,
716 	.disable_vblank	= sprd_crtc_disable_vblank,
717 };
718 
719 static struct sprd_dpu *sprd_crtc_init(struct drm_device *drm,
720 				       struct drm_plane *primary, struct device *dev)
721 {
722 	struct device_node *port;
723 	struct sprd_dpu *dpu;
724 
725 	dpu = drmm_crtc_alloc_with_planes(drm, struct sprd_dpu, base,
726 					  primary, NULL,
727 					&sprd_crtc_funcs, NULL);
728 	if (IS_ERR(dpu)) {
729 		drm_err(drm, "failed to init crtc\n");
730 		return dpu;
731 	}
732 	drm_crtc_helper_add(&dpu->base, &sprd_crtc_helper_funcs);
733 
734 	/*
735 	 * set crtc port so that drm_of_find_possible_crtcs call works
736 	 */
737 	port = of_graph_get_port_by_id(dev->of_node, 0);
738 	if (!port) {
739 		drm_err(drm, "failed to found crtc output port for %s\n",
740 			dev->of_node->full_name);
741 		return ERR_PTR(-EINVAL);
742 	}
743 	dpu->base.port = port;
744 	of_node_put(port);
745 
746 	return dpu;
747 }
748 
749 static irqreturn_t sprd_dpu_isr(int irq, void *data)
750 {
751 	struct sprd_dpu *dpu = data;
752 	struct dpu_context *ctx = &dpu->ctx;
753 	u32 reg_val, int_mask = 0;
754 
755 	reg_val = readl(ctx->base + REG_DPU_INT_STS);
756 
757 	/* disable err interrupt */
758 	if (reg_val & BIT_DPU_INT_ERR) {
759 		int_mask |= BIT_DPU_INT_ERR;
760 		drm_warn(dpu->drm, "Warning: dpu underflow!\n");
761 	}
762 
763 	/* dpu update done isr */
764 	if (reg_val & BIT_DPU_INT_UPDATE_DONE) {
765 		ctx->evt_update = true;
766 		wake_up_interruptible_all(&ctx->wait_queue);
767 	}
768 
769 	/* dpu stop done isr */
770 	if (reg_val & BIT_DPU_INT_DONE) {
771 		ctx->evt_stop = true;
772 		wake_up_interruptible_all(&ctx->wait_queue);
773 	}
774 
775 	if (reg_val & BIT_DPU_INT_VSYNC)
776 		drm_crtc_handle_vblank(&dpu->base);
777 
778 	writel(reg_val, ctx->base + REG_DPU_INT_CLR);
779 	dpu_reg_clr(ctx, REG_DPU_INT_EN, int_mask);
780 
781 	return IRQ_HANDLED;
782 }
783 
784 static int sprd_dpu_context_init(struct sprd_dpu *dpu,
785 				 struct device *dev)
786 {
787 	struct platform_device *pdev = to_platform_device(dev);
788 	struct dpu_context *ctx = &dpu->ctx;
789 	struct resource *res;
790 	int ret;
791 
792 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
793 	if (!res) {
794 		dev_err(dev, "failed to get I/O resource\n");
795 		return -EINVAL;
796 	}
797 
798 	ctx->base = devm_ioremap(dev, res->start, resource_size(res));
799 	if (!ctx->base) {
800 		dev_err(dev, "failed to map dpu registers\n");
801 		return -EFAULT;
802 	}
803 
804 	ctx->irq = platform_get_irq(pdev, 0);
805 	if (ctx->irq < 0) {
806 		dev_err(dev, "failed to get dpu irq\n");
807 		return ctx->irq;
808 	}
809 
810 	/* disable and clear interrupts before register dpu IRQ. */
811 	writel(0x00, ctx->base + REG_DPU_INT_EN);
812 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
813 
814 	ret = devm_request_irq(dev, ctx->irq, sprd_dpu_isr,
815 			       IRQF_TRIGGER_NONE, "DPU", dpu);
816 	if (ret) {
817 		dev_err(dev, "failed to register dpu irq handler\n");
818 		return ret;
819 	}
820 
821 	init_waitqueue_head(&ctx->wait_queue);
822 
823 	return 0;
824 }
825 
826 static int sprd_dpu_bind(struct device *dev, struct device *master, void *data)
827 {
828 	struct drm_device *drm = data;
829 	struct sprd_dpu *dpu;
830 	struct sprd_plane *plane;
831 	int ret;
832 
833 	plane = sprd_planes_init(drm);
834 	if (IS_ERR(plane))
835 		return PTR_ERR(plane);
836 
837 	dpu = sprd_crtc_init(drm, &plane->base, dev);
838 	if (IS_ERR(dpu))
839 		return PTR_ERR(dpu);
840 
841 	dpu->drm = drm;
842 	dev_set_drvdata(dev, dpu);
843 
844 	ret = sprd_dpu_context_init(dpu, dev);
845 	if (ret)
846 		return ret;
847 
848 	return 0;
849 }
850 
851 static const struct component_ops dpu_component_ops = {
852 	.bind = sprd_dpu_bind,
853 };
854 
855 static const struct of_device_id dpu_match_table[] = {
856 	{ .compatible = "sprd,sharkl3-dpu" },
857 	{ /* sentinel */ },
858 };
859 MODULE_DEVICE_TABLE(of, dpu_match_table);
860 
861 static int sprd_dpu_probe(struct platform_device *pdev)
862 {
863 	return component_add(&pdev->dev, &dpu_component_ops);
864 }
865 
866 static int sprd_dpu_remove(struct platform_device *pdev)
867 {
868 	component_del(&pdev->dev, &dpu_component_ops);
869 
870 	return 0;
871 }
872 
873 struct platform_driver sprd_dpu_driver = {
874 	.probe = sprd_dpu_probe,
875 	.remove = sprd_dpu_remove,
876 	.driver = {
877 		.name = "sprd-dpu-drv",
878 		.of_match_table = dpu_match_table,
879 	},
880 };
881 
882 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
883 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
884 MODULE_DESCRIPTION("Unisoc Display Controller Driver");
885 MODULE_LICENSE("GPL v2");
886