1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) 2017 Samsung Electronics Co.Ltd
4  * Author:
5  *	Andrzej Pietrasiewicz <andrzejtp2010@gmail.com>
6  */
7 
8 #include <linux/clk.h>
9 #include <linux/component.h>
10 #include <linux/err.h>
11 #include <linux/interrupt.h>
12 #include <linux/io.h>
13 #include <linux/kernel.h>
14 #include <linux/of.h>
15 #include <linux/platform_device.h>
16 #include <linux/pm_runtime.h>
17 
18 #include <drm/drm_blend.h>
19 #include <drm/drm_fourcc.h>
20 #include <drm/exynos_drm.h>
21 
22 #include "exynos_drm_drv.h"
23 #include "exynos_drm_fb.h"
24 #include "exynos_drm_ipp.h"
25 #include "regs-scaler.h"
26 
27 #define scaler_read(offset)		readl(scaler->regs + (offset))
28 #define scaler_write(cfg, offset)	writel(cfg, scaler->regs + (offset))
29 #define SCALER_MAX_CLK			4
30 #define SCALER_AUTOSUSPEND_DELAY	2000
31 #define SCALER_RESET_WAIT_RETRIES	100
32 
33 struct scaler_data {
34 	const char	*clk_name[SCALER_MAX_CLK];
35 	unsigned int	num_clk;
36 	const struct exynos_drm_ipp_formats *formats;
37 	unsigned int	num_formats;
38 };
39 
40 struct scaler_context {
41 	struct exynos_drm_ipp		ipp;
42 	struct drm_device		*drm_dev;
43 	void				*dma_priv;
44 	struct device			*dev;
45 	void __iomem			*regs;
46 	struct clk			*clock[SCALER_MAX_CLK];
47 	struct exynos_drm_ipp_task	*task;
48 	const struct scaler_data	*scaler_data;
49 };
50 
51 struct scaler_format {
52 	u32	drm_fmt;
53 	u32	internal_fmt;
54 	u32	chroma_tile_w;
55 	u32	chroma_tile_h;
56 };
57 
58 static const struct scaler_format scaler_formats[] = {
59 	{ DRM_FORMAT_NV12, SCALER_YUV420_2P_UV, 8, 8 },
60 	{ DRM_FORMAT_NV21, SCALER_YUV420_2P_VU, 8, 8 },
61 	{ DRM_FORMAT_YUV420, SCALER_YUV420_3P, 8, 8 },
62 	{ DRM_FORMAT_YUYV, SCALER_YUV422_1P_YUYV, 16, 16 },
63 	{ DRM_FORMAT_UYVY, SCALER_YUV422_1P_UYVY, 16, 16 },
64 	{ DRM_FORMAT_YVYU, SCALER_YUV422_1P_YVYU, 16, 16 },
65 	{ DRM_FORMAT_NV16, SCALER_YUV422_2P_UV, 8, 16 },
66 	{ DRM_FORMAT_NV61, SCALER_YUV422_2P_VU, 8, 16 },
67 	{ DRM_FORMAT_YUV422, SCALER_YUV422_3P, 8, 16 },
68 	{ DRM_FORMAT_NV24, SCALER_YUV444_2P_UV, 16, 16 },
69 	{ DRM_FORMAT_NV42, SCALER_YUV444_2P_VU, 16, 16 },
70 	{ DRM_FORMAT_YUV444, SCALER_YUV444_3P, 16, 16 },
71 	{ DRM_FORMAT_RGB565, SCALER_RGB_565, 0, 0 },
72 	{ DRM_FORMAT_XRGB1555, SCALER_ARGB1555, 0, 0 },
73 	{ DRM_FORMAT_ARGB1555, SCALER_ARGB1555, 0, 0 },
74 	{ DRM_FORMAT_XRGB4444, SCALER_ARGB4444, 0, 0 },
75 	{ DRM_FORMAT_ARGB4444, SCALER_ARGB4444, 0, 0 },
76 	{ DRM_FORMAT_XRGB8888, SCALER_ARGB8888, 0, 0 },
77 	{ DRM_FORMAT_ARGB8888, SCALER_ARGB8888, 0, 0 },
78 	{ DRM_FORMAT_RGBX8888, SCALER_RGBA8888, 0, 0 },
79 	{ DRM_FORMAT_RGBA8888, SCALER_RGBA8888, 0, 0 },
80 };
81 
82 static const struct scaler_format *scaler_get_format(u32 drm_fmt)
83 {
84 	int i;
85 
86 	for (i = 0; i < ARRAY_SIZE(scaler_formats); i++)
87 		if (scaler_formats[i].drm_fmt == drm_fmt)
88 			return &scaler_formats[i];
89 
90 	return NULL;
91 }
92 
93 static inline int scaler_reset(struct scaler_context *scaler)
94 {
95 	int retry = SCALER_RESET_WAIT_RETRIES;
96 
97 	scaler_write(SCALER_CFG_SOFT_RESET, SCALER_CFG);
98 	do {
99 		cpu_relax();
100 	} while (--retry > 1 &&
101 		 scaler_read(SCALER_CFG) & SCALER_CFG_SOFT_RESET);
102 	do {
103 		cpu_relax();
104 		scaler_write(1, SCALER_INT_EN);
105 	} while (--retry > 0 && scaler_read(SCALER_INT_EN) != 1);
106 
107 	return retry ? 0 : -EIO;
108 }
109 
110 static inline void scaler_enable_int(struct scaler_context *scaler)
111 {
112 	u32 val;
113 
114 	val = SCALER_INT_EN_TIMEOUT |
115 		SCALER_INT_EN_ILLEGAL_BLEND |
116 		SCALER_INT_EN_ILLEGAL_RATIO |
117 		SCALER_INT_EN_ILLEGAL_DST_HEIGHT |
118 		SCALER_INT_EN_ILLEGAL_DST_WIDTH |
119 		SCALER_INT_EN_ILLEGAL_DST_V_POS |
120 		SCALER_INT_EN_ILLEGAL_DST_H_POS |
121 		SCALER_INT_EN_ILLEGAL_DST_C_SPAN |
122 		SCALER_INT_EN_ILLEGAL_DST_Y_SPAN |
123 		SCALER_INT_EN_ILLEGAL_DST_CR_BASE |
124 		SCALER_INT_EN_ILLEGAL_DST_CB_BASE |
125 		SCALER_INT_EN_ILLEGAL_DST_Y_BASE |
126 		SCALER_INT_EN_ILLEGAL_DST_COLOR |
127 		SCALER_INT_EN_ILLEGAL_SRC_HEIGHT |
128 		SCALER_INT_EN_ILLEGAL_SRC_WIDTH |
129 		SCALER_INT_EN_ILLEGAL_SRC_CV_POS |
130 		SCALER_INT_EN_ILLEGAL_SRC_CH_POS |
131 		SCALER_INT_EN_ILLEGAL_SRC_YV_POS |
132 		SCALER_INT_EN_ILLEGAL_SRC_YH_POS |
133 		SCALER_INT_EN_ILLEGAL_DST_SPAN |
134 		SCALER_INT_EN_ILLEGAL_SRC_Y_SPAN |
135 		SCALER_INT_EN_ILLEGAL_SRC_CR_BASE |
136 		SCALER_INT_EN_ILLEGAL_SRC_CB_BASE |
137 		SCALER_INT_EN_ILLEGAL_SRC_Y_BASE |
138 		SCALER_INT_EN_ILLEGAL_SRC_COLOR |
139 		SCALER_INT_EN_FRAME_END;
140 	scaler_write(val, SCALER_INT_EN);
141 }
142 
143 static inline void scaler_set_src_fmt(struct scaler_context *scaler,
144 	u32 src_fmt, u32 tile)
145 {
146 	u32 val;
147 
148 	val = SCALER_SRC_CFG_SET_COLOR_FORMAT(src_fmt) | (tile << 10);
149 	scaler_write(val, SCALER_SRC_CFG);
150 }
151 
152 static inline void scaler_set_src_base(struct scaler_context *scaler,
153 	struct exynos_drm_ipp_buffer *src_buf)
154 {
155 	static unsigned int bases[] = {
156 		SCALER_SRC_Y_BASE,
157 		SCALER_SRC_CB_BASE,
158 		SCALER_SRC_CR_BASE,
159 	};
160 	int i;
161 
162 	for (i = 0; i < src_buf->format->num_planes; ++i)
163 		scaler_write(src_buf->dma_addr[i], bases[i]);
164 }
165 
166 static inline void scaler_set_src_span(struct scaler_context *scaler,
167 	struct exynos_drm_ipp_buffer *src_buf)
168 {
169 	u32 val;
170 
171 	val = SCALER_SRC_SPAN_SET_Y_SPAN(src_buf->buf.pitch[0] /
172 		src_buf->format->cpp[0]);
173 
174 	if (src_buf->format->num_planes > 1)
175 		val |= SCALER_SRC_SPAN_SET_C_SPAN(src_buf->buf.pitch[1]);
176 
177 	scaler_write(val, SCALER_SRC_SPAN);
178 }
179 
180 static inline void scaler_set_src_luma_chroma_pos(struct scaler_context *scaler,
181 			struct drm_exynos_ipp_task_rect *src_pos,
182 			const struct scaler_format *fmt)
183 {
184 	u32 val;
185 
186 	val = SCALER_SRC_Y_POS_SET_YH_POS(src_pos->x << 2);
187 	val |=  SCALER_SRC_Y_POS_SET_YV_POS(src_pos->y << 2);
188 	scaler_write(val, SCALER_SRC_Y_POS);
189 	val = SCALER_SRC_C_POS_SET_CH_POS(
190 		(src_pos->x * fmt->chroma_tile_w / 16) << 2);
191 	val |=  SCALER_SRC_C_POS_SET_CV_POS(
192 		(src_pos->y * fmt->chroma_tile_h / 16) << 2);
193 	scaler_write(val, SCALER_SRC_C_POS);
194 }
195 
196 static inline void scaler_set_src_wh(struct scaler_context *scaler,
197 	struct drm_exynos_ipp_task_rect *src_pos)
198 {
199 	u32 val;
200 
201 	val = SCALER_SRC_WH_SET_WIDTH(src_pos->w);
202 	val |= SCALER_SRC_WH_SET_HEIGHT(src_pos->h);
203 	scaler_write(val, SCALER_SRC_WH);
204 }
205 
206 static inline void scaler_set_dst_fmt(struct scaler_context *scaler,
207 	u32 dst_fmt)
208 {
209 	u32 val;
210 
211 	val = SCALER_DST_CFG_SET_COLOR_FORMAT(dst_fmt);
212 	scaler_write(val, SCALER_DST_CFG);
213 }
214 
215 static inline void scaler_set_dst_base(struct scaler_context *scaler,
216 	struct exynos_drm_ipp_buffer *dst_buf)
217 {
218 	static unsigned int bases[] = {
219 		SCALER_DST_Y_BASE,
220 		SCALER_DST_CB_BASE,
221 		SCALER_DST_CR_BASE,
222 	};
223 	int i;
224 
225 	for (i = 0; i < dst_buf->format->num_planes; ++i)
226 		scaler_write(dst_buf->dma_addr[i], bases[i]);
227 }
228 
229 static inline void scaler_set_dst_span(struct scaler_context *scaler,
230 	struct exynos_drm_ipp_buffer *dst_buf)
231 {
232 	u32 val;
233 
234 	val = SCALER_DST_SPAN_SET_Y_SPAN(dst_buf->buf.pitch[0] /
235 		dst_buf->format->cpp[0]);
236 
237 	if (dst_buf->format->num_planes > 1)
238 		val |= SCALER_DST_SPAN_SET_C_SPAN(dst_buf->buf.pitch[1]);
239 
240 	scaler_write(val, SCALER_DST_SPAN);
241 }
242 
243 static inline void scaler_set_dst_luma_pos(struct scaler_context *scaler,
244 	struct drm_exynos_ipp_task_rect *dst_pos)
245 {
246 	u32 val;
247 
248 	val = SCALER_DST_WH_SET_WIDTH(dst_pos->w);
249 	val |= SCALER_DST_WH_SET_HEIGHT(dst_pos->h);
250 	scaler_write(val, SCALER_DST_WH);
251 }
252 
253 static inline void scaler_set_dst_wh(struct scaler_context *scaler,
254 	struct drm_exynos_ipp_task_rect *dst_pos)
255 {
256 	u32 val;
257 
258 	val = SCALER_DST_POS_SET_H_POS(dst_pos->x);
259 	val |= SCALER_DST_POS_SET_V_POS(dst_pos->y);
260 	scaler_write(val, SCALER_DST_POS);
261 }
262 
263 static inline void scaler_set_hv_ratio(struct scaler_context *scaler,
264 	unsigned int rotation,
265 	struct drm_exynos_ipp_task_rect *src_pos,
266 	struct drm_exynos_ipp_task_rect *dst_pos)
267 {
268 	u32 val, h_ratio, v_ratio;
269 
270 	if (drm_rotation_90_or_270(rotation)) {
271 		h_ratio = (src_pos->h << 16) / dst_pos->w;
272 		v_ratio = (src_pos->w << 16) / dst_pos->h;
273 	} else {
274 		h_ratio = (src_pos->w << 16) / dst_pos->w;
275 		v_ratio = (src_pos->h << 16) / dst_pos->h;
276 	}
277 
278 	val = SCALER_H_RATIO_SET(h_ratio);
279 	scaler_write(val, SCALER_H_RATIO);
280 
281 	val = SCALER_V_RATIO_SET(v_ratio);
282 	scaler_write(val, SCALER_V_RATIO);
283 }
284 
285 static inline void scaler_set_rotation(struct scaler_context *scaler,
286 	unsigned int rotation)
287 {
288 	u32 val = 0;
289 
290 	if (rotation & DRM_MODE_ROTATE_90)
291 		val |= SCALER_ROT_CFG_SET_ROTMODE(SCALER_ROT_MODE_90);
292 	else if (rotation & DRM_MODE_ROTATE_180)
293 		val |= SCALER_ROT_CFG_SET_ROTMODE(SCALER_ROT_MODE_180);
294 	else if (rotation & DRM_MODE_ROTATE_270)
295 		val |= SCALER_ROT_CFG_SET_ROTMODE(SCALER_ROT_MODE_270);
296 	if (rotation & DRM_MODE_REFLECT_X)
297 		val |= SCALER_ROT_CFG_FLIP_X_EN;
298 	if (rotation & DRM_MODE_REFLECT_Y)
299 		val |= SCALER_ROT_CFG_FLIP_Y_EN;
300 	scaler_write(val, SCALER_ROT_CFG);
301 }
302 
303 static inline void scaler_set_csc(struct scaler_context *scaler,
304 	const struct drm_format_info *fmt)
305 {
306 	static const u32 csc_mtx[2][3][3] = {
307 		{ /* YCbCr to RGB */
308 			{0x254, 0x000, 0x331},
309 			{0x254, 0xf38, 0xe60},
310 			{0x254, 0x409, 0x000},
311 		},
312 		{ /* RGB to YCbCr */
313 			{0x084, 0x102, 0x032},
314 			{0xfb4, 0xf6b, 0x0e1},
315 			{0x0e1, 0xf44, 0xfdc},
316 		},
317 	};
318 	int i, j, dir;
319 
320 	switch (fmt->format) {
321 	case DRM_FORMAT_RGB565:
322 	case DRM_FORMAT_XRGB1555:
323 	case DRM_FORMAT_ARGB1555:
324 	case DRM_FORMAT_XRGB4444:
325 	case DRM_FORMAT_ARGB4444:
326 	case DRM_FORMAT_XRGB8888:
327 	case DRM_FORMAT_ARGB8888:
328 	case DRM_FORMAT_RGBX8888:
329 	case DRM_FORMAT_RGBA8888:
330 		dir = 1;
331 		break;
332 	default:
333 		dir = 0;
334 	}
335 
336 	for (i = 0; i < 3; i++)
337 		for (j = 0; j < 3; j++)
338 			scaler_write(csc_mtx[dir][i][j], SCALER_CSC_COEF(j, i));
339 }
340 
341 static inline void scaler_set_timer(struct scaler_context *scaler,
342 	unsigned int timer, unsigned int divider)
343 {
344 	u32 val;
345 
346 	val = SCALER_TIMEOUT_CTRL_TIMER_ENABLE;
347 	val |= SCALER_TIMEOUT_CTRL_SET_TIMER_VALUE(timer);
348 	val |= SCALER_TIMEOUT_CTRL_SET_TIMER_DIV(divider);
349 	scaler_write(val, SCALER_TIMEOUT_CTRL);
350 }
351 
352 static inline void scaler_start_hw(struct scaler_context *scaler)
353 {
354 	scaler_write(SCALER_CFG_START_CMD, SCALER_CFG);
355 }
356 
357 static int scaler_commit(struct exynos_drm_ipp *ipp,
358 			  struct exynos_drm_ipp_task *task)
359 {
360 	struct scaler_context *scaler =
361 			container_of(ipp, struct scaler_context, ipp);
362 
363 	struct drm_exynos_ipp_task_rect *src_pos = &task->src.rect;
364 	struct drm_exynos_ipp_task_rect *dst_pos = &task->dst.rect;
365 	const struct scaler_format *src_fmt, *dst_fmt;
366 	int ret = 0;
367 
368 	src_fmt = scaler_get_format(task->src.buf.fourcc);
369 	dst_fmt = scaler_get_format(task->dst.buf.fourcc);
370 
371 	ret = pm_runtime_resume_and_get(scaler->dev);
372 	if (ret < 0)
373 		return ret;
374 
375 	if (scaler_reset(scaler))
376 		return -EIO;
377 
378 	scaler->task = task;
379 
380 	scaler_set_src_fmt(
381 		scaler, src_fmt->internal_fmt, task->src.buf.modifier != 0);
382 	scaler_set_src_base(scaler, &task->src);
383 	scaler_set_src_span(scaler, &task->src);
384 	scaler_set_src_luma_chroma_pos(scaler, src_pos, src_fmt);
385 	scaler_set_src_wh(scaler, src_pos);
386 
387 	scaler_set_dst_fmt(scaler, dst_fmt->internal_fmt);
388 	scaler_set_dst_base(scaler, &task->dst);
389 	scaler_set_dst_span(scaler, &task->dst);
390 	scaler_set_dst_luma_pos(scaler, dst_pos);
391 	scaler_set_dst_wh(scaler, dst_pos);
392 
393 	scaler_set_hv_ratio(scaler, task->transform.rotation, src_pos, dst_pos);
394 	scaler_set_rotation(scaler, task->transform.rotation);
395 
396 	scaler_set_csc(scaler, task->src.format);
397 
398 	scaler_set_timer(scaler, 0xffff, 0xf);
399 
400 	scaler_enable_int(scaler);
401 	scaler_start_hw(scaler);
402 
403 	return 0;
404 }
405 
406 static struct exynos_drm_ipp_funcs ipp_funcs = {
407 	.commit = scaler_commit,
408 };
409 
410 static inline void scaler_disable_int(struct scaler_context *scaler)
411 {
412 	scaler_write(0, SCALER_INT_EN);
413 }
414 
415 static inline u32 scaler_get_int_status(struct scaler_context *scaler)
416 {
417 	u32 val = scaler_read(SCALER_INT_STATUS);
418 
419 	scaler_write(val, SCALER_INT_STATUS);
420 
421 	return val;
422 }
423 
424 static inline int scaler_task_done(u32 val)
425 {
426 	return val & SCALER_INT_STATUS_FRAME_END ? 0 : -EINVAL;
427 }
428 
429 static irqreturn_t scaler_irq_handler(int irq, void *arg)
430 {
431 	struct scaler_context *scaler = arg;
432 
433 	u32 val = scaler_get_int_status(scaler);
434 
435 	scaler_disable_int(scaler);
436 
437 	if (scaler->task) {
438 		struct exynos_drm_ipp_task *task = scaler->task;
439 
440 		scaler->task = NULL;
441 		pm_runtime_mark_last_busy(scaler->dev);
442 		pm_runtime_put_autosuspend(scaler->dev);
443 		exynos_drm_ipp_task_done(task, scaler_task_done(val));
444 	}
445 
446 	return IRQ_HANDLED;
447 }
448 
449 static int scaler_bind(struct device *dev, struct device *master, void *data)
450 {
451 	struct scaler_context *scaler = dev_get_drvdata(dev);
452 	struct drm_device *drm_dev = data;
453 	struct exynos_drm_ipp *ipp = &scaler->ipp;
454 
455 	scaler->drm_dev = drm_dev;
456 	ipp->drm_dev = drm_dev;
457 	exynos_drm_register_dma(drm_dev, dev, &scaler->dma_priv);
458 
459 	exynos_drm_ipp_register(dev, ipp, &ipp_funcs,
460 			DRM_EXYNOS_IPP_CAP_CROP | DRM_EXYNOS_IPP_CAP_ROTATE |
461 			DRM_EXYNOS_IPP_CAP_SCALE | DRM_EXYNOS_IPP_CAP_CONVERT,
462 			scaler->scaler_data->formats,
463 			scaler->scaler_data->num_formats, "scaler");
464 
465 	dev_info(dev, "The exynos scaler has been probed successfully\n");
466 
467 	return 0;
468 }
469 
470 static void scaler_unbind(struct device *dev, struct device *master,
471 			void *data)
472 {
473 	struct scaler_context *scaler = dev_get_drvdata(dev);
474 	struct exynos_drm_ipp *ipp = &scaler->ipp;
475 
476 	exynos_drm_ipp_unregister(dev, ipp);
477 	exynos_drm_unregister_dma(scaler->drm_dev, scaler->dev,
478 				  &scaler->dma_priv);
479 }
480 
481 static const struct component_ops scaler_component_ops = {
482 	.bind	= scaler_bind,
483 	.unbind = scaler_unbind,
484 };
485 
486 static int scaler_probe(struct platform_device *pdev)
487 {
488 	struct device *dev = &pdev->dev;
489 	struct scaler_context *scaler;
490 	int irq;
491 	int ret, i;
492 
493 	scaler = devm_kzalloc(dev, sizeof(*scaler), GFP_KERNEL);
494 	if (!scaler)
495 		return -ENOMEM;
496 
497 	scaler->scaler_data =
498 		(struct scaler_data *)of_device_get_match_data(dev);
499 
500 	scaler->dev = dev;
501 	scaler->regs = devm_platform_ioremap_resource(pdev, 0);
502 	if (IS_ERR(scaler->regs))
503 		return PTR_ERR(scaler->regs);
504 
505 	irq = platform_get_irq(pdev, 0);
506 	if (irq < 0)
507 		return irq;
508 
509 	ret = devm_request_threaded_irq(dev, irq, NULL,	scaler_irq_handler,
510 					IRQF_ONESHOT, "drm_scaler", scaler);
511 	if (ret < 0) {
512 		dev_err(dev, "failed to request irq\n");
513 		return ret;
514 	}
515 
516 	for (i = 0; i < scaler->scaler_data->num_clk; ++i) {
517 		scaler->clock[i] = devm_clk_get(dev,
518 					      scaler->scaler_data->clk_name[i]);
519 		if (IS_ERR(scaler->clock[i])) {
520 			dev_err(dev, "failed to get clock\n");
521 			return PTR_ERR(scaler->clock[i]);
522 		}
523 	}
524 
525 	pm_runtime_use_autosuspend(dev);
526 	pm_runtime_set_autosuspend_delay(dev, SCALER_AUTOSUSPEND_DELAY);
527 	pm_runtime_enable(dev);
528 	platform_set_drvdata(pdev, scaler);
529 
530 	ret = component_add(dev, &scaler_component_ops);
531 	if (ret)
532 		goto err_ippdrv_register;
533 
534 	return 0;
535 
536 err_ippdrv_register:
537 	pm_runtime_dont_use_autosuspend(dev);
538 	pm_runtime_disable(dev);
539 	return ret;
540 }
541 
542 static int scaler_remove(struct platform_device *pdev)
543 {
544 	struct device *dev = &pdev->dev;
545 
546 	component_del(dev, &scaler_component_ops);
547 	pm_runtime_dont_use_autosuspend(dev);
548 	pm_runtime_disable(dev);
549 
550 	return 0;
551 }
552 
553 static int clk_disable_unprepare_wrapper(struct clk *clk)
554 {
555 	clk_disable_unprepare(clk);
556 
557 	return 0;
558 }
559 
560 static int scaler_clk_ctrl(struct scaler_context *scaler, bool enable)
561 {
562 	int (*clk_fun)(struct clk *clk), i;
563 
564 	clk_fun = enable ? clk_prepare_enable : clk_disable_unprepare_wrapper;
565 
566 	for (i = 0; i < scaler->scaler_data->num_clk; ++i)
567 		clk_fun(scaler->clock[i]);
568 
569 	return 0;
570 }
571 
572 static int scaler_runtime_suspend(struct device *dev)
573 {
574 	struct scaler_context *scaler = dev_get_drvdata(dev);
575 
576 	return  scaler_clk_ctrl(scaler, false);
577 }
578 
579 static int scaler_runtime_resume(struct device *dev)
580 {
581 	struct scaler_context *scaler = dev_get_drvdata(dev);
582 
583 	return  scaler_clk_ctrl(scaler, true);
584 }
585 
586 static DEFINE_RUNTIME_DEV_PM_OPS(scaler_pm_ops, scaler_runtime_suspend,
587 				 scaler_runtime_resume, NULL);
588 
589 static const struct drm_exynos_ipp_limit scaler_5420_two_pixel_hv_limits[] = {
590 	{ IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K }) },
591 	{ IPP_SIZE_LIMIT(AREA, .h.align = 2, .v.align = 2) },
592 	{ IPP_SCALE_LIMIT(.h = { 65536 * 1 / 4, 65536 * 16 },
593 			  .v = { 65536 * 1 / 4, 65536 * 16 }) },
594 };
595 
596 static const struct drm_exynos_ipp_limit scaler_5420_two_pixel_h_limits[] = {
597 	{ IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K }) },
598 	{ IPP_SIZE_LIMIT(AREA, .h.align = 2, .v.align = 1) },
599 	{ IPP_SCALE_LIMIT(.h = { 65536 * 1 / 4, 65536 * 16 },
600 			  .v = { 65536 * 1 / 4, 65536 * 16 }) },
601 };
602 
603 static const struct drm_exynos_ipp_limit scaler_5420_one_pixel_limits[] = {
604 	{ IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K }) },
605 	{ IPP_SCALE_LIMIT(.h = { 65536 * 1 / 4, 65536 * 16 },
606 			  .v = { 65536 * 1 / 4, 65536 * 16 }) },
607 };
608 
609 static const struct drm_exynos_ipp_limit scaler_5420_tile_limits[] = {
610 	{ IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K })},
611 	{ IPP_SIZE_LIMIT(AREA, .h.align = 16, .v.align = 16) },
612 	{ IPP_SCALE_LIMIT(.h = {1, 1}, .v = {1, 1})},
613 	{ }
614 };
615 
616 #define IPP_SRCDST_TILE_FORMAT(f, l)	\
617 	IPP_SRCDST_MFORMAT(f, DRM_FORMAT_MOD_SAMSUNG_16_16_TILE, (l))
618 
619 static const struct exynos_drm_ipp_formats exynos5420_formats[] = {
620 	/* SCALER_YUV420_2P_UV */
621 	{ IPP_SRCDST_FORMAT(NV21, scaler_5420_two_pixel_hv_limits) },
622 
623 	/* SCALER_YUV420_2P_VU */
624 	{ IPP_SRCDST_FORMAT(NV12, scaler_5420_two_pixel_hv_limits) },
625 
626 	/* SCALER_YUV420_3P */
627 	{ IPP_SRCDST_FORMAT(YUV420, scaler_5420_two_pixel_hv_limits) },
628 
629 	/* SCALER_YUV422_1P_YUYV */
630 	{ IPP_SRCDST_FORMAT(YUYV, scaler_5420_two_pixel_h_limits) },
631 
632 	/* SCALER_YUV422_1P_UYVY */
633 	{ IPP_SRCDST_FORMAT(UYVY, scaler_5420_two_pixel_h_limits) },
634 
635 	/* SCALER_YUV422_1P_YVYU */
636 	{ IPP_SRCDST_FORMAT(YVYU, scaler_5420_two_pixel_h_limits) },
637 
638 	/* SCALER_YUV422_2P_UV */
639 	{ IPP_SRCDST_FORMAT(NV61, scaler_5420_two_pixel_h_limits) },
640 
641 	/* SCALER_YUV422_2P_VU */
642 	{ IPP_SRCDST_FORMAT(NV16, scaler_5420_two_pixel_h_limits) },
643 
644 	/* SCALER_YUV422_3P */
645 	{ IPP_SRCDST_FORMAT(YUV422, scaler_5420_two_pixel_h_limits) },
646 
647 	/* SCALER_YUV444_2P_UV */
648 	{ IPP_SRCDST_FORMAT(NV42, scaler_5420_one_pixel_limits) },
649 
650 	/* SCALER_YUV444_2P_VU */
651 	{ IPP_SRCDST_FORMAT(NV24, scaler_5420_one_pixel_limits) },
652 
653 	/* SCALER_YUV444_3P */
654 	{ IPP_SRCDST_FORMAT(YUV444, scaler_5420_one_pixel_limits) },
655 
656 	/* SCALER_RGB_565 */
657 	{ IPP_SRCDST_FORMAT(RGB565, scaler_5420_one_pixel_limits) },
658 
659 	/* SCALER_ARGB1555 */
660 	{ IPP_SRCDST_FORMAT(XRGB1555, scaler_5420_one_pixel_limits) },
661 
662 	/* SCALER_ARGB1555 */
663 	{ IPP_SRCDST_FORMAT(ARGB1555, scaler_5420_one_pixel_limits) },
664 
665 	/* SCALER_ARGB4444 */
666 	{ IPP_SRCDST_FORMAT(XRGB4444, scaler_5420_one_pixel_limits) },
667 
668 	/* SCALER_ARGB4444 */
669 	{ IPP_SRCDST_FORMAT(ARGB4444, scaler_5420_one_pixel_limits) },
670 
671 	/* SCALER_ARGB8888 */
672 	{ IPP_SRCDST_FORMAT(XRGB8888, scaler_5420_one_pixel_limits) },
673 
674 	/* SCALER_ARGB8888 */
675 	{ IPP_SRCDST_FORMAT(ARGB8888, scaler_5420_one_pixel_limits) },
676 
677 	/* SCALER_RGBA8888 */
678 	{ IPP_SRCDST_FORMAT(RGBX8888, scaler_5420_one_pixel_limits) },
679 
680 	/* SCALER_RGBA8888 */
681 	{ IPP_SRCDST_FORMAT(RGBA8888, scaler_5420_one_pixel_limits) },
682 
683 	/* SCALER_YUV420_2P_UV TILE */
684 	{ IPP_SRCDST_TILE_FORMAT(NV21, scaler_5420_tile_limits) },
685 
686 	/* SCALER_YUV420_2P_VU TILE */
687 	{ IPP_SRCDST_TILE_FORMAT(NV12, scaler_5420_tile_limits) },
688 
689 	/* SCALER_YUV420_3P TILE */
690 	{ IPP_SRCDST_TILE_FORMAT(YUV420, scaler_5420_tile_limits) },
691 
692 	/* SCALER_YUV422_1P_YUYV TILE */
693 	{ IPP_SRCDST_TILE_FORMAT(YUYV, scaler_5420_tile_limits) },
694 };
695 
696 static const struct scaler_data exynos5420_data = {
697 	.clk_name	= {"mscl"},
698 	.num_clk	= 1,
699 	.formats	= exynos5420_formats,
700 	.num_formats	= ARRAY_SIZE(exynos5420_formats),
701 };
702 
703 static const struct scaler_data exynos5433_data = {
704 	.clk_name	= {"pclk", "aclk", "aclk_xiu"},
705 	.num_clk	= 3,
706 	.formats	= exynos5420_formats, /* intentional */
707 	.num_formats	= ARRAY_SIZE(exynos5420_formats),
708 };
709 
710 static const struct of_device_id exynos_scaler_match[] = {
711 	{
712 		.compatible = "samsung,exynos5420-scaler",
713 		.data = &exynos5420_data,
714 	}, {
715 		.compatible = "samsung,exynos5433-scaler",
716 		.data = &exynos5433_data,
717 	}, {
718 	},
719 };
720 MODULE_DEVICE_TABLE(of, exynos_scaler_match);
721 
722 struct platform_driver scaler_driver = {
723 	.probe		= scaler_probe,
724 	.remove		= scaler_remove,
725 	.driver		= {
726 		.name	= "exynos-scaler",
727 		.owner	= THIS_MODULE,
728 		.pm	= pm_ptr(&scaler_pm_ops),
729 		.of_match_table = exynos_scaler_match,
730 	},
731 };
732