1 /*
2  * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26 
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
39 
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
46 #endif
47 #include "dce120/dce120_resource.h"
48 
49 #define DC_LOGGER_INIT(logger)
50 
51 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
52 {
53 	enum dce_version dc_version = DCE_VERSION_UNKNOWN;
54 	switch (asic_id.chip_family) {
55 
56 	case FAMILY_CI:
57 		dc_version = DCE_VERSION_8_0;
58 		break;
59 	case FAMILY_KV:
60 		if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
61 		    ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
62 		    ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
63 			dc_version = DCE_VERSION_8_3;
64 		else
65 			dc_version = DCE_VERSION_8_1;
66 		break;
67 	case FAMILY_CZ:
68 		dc_version = DCE_VERSION_11_0;
69 		break;
70 
71 	case FAMILY_VI:
72 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
73 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
74 			dc_version = DCE_VERSION_10_0;
75 			break;
76 		}
77 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
78 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
79 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
80 			dc_version = DCE_VERSION_11_2;
81 		}
82 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
83 			dc_version = DCE_VERSION_11_22;
84 		break;
85 	case FAMILY_AI:
86 		dc_version = DCE_VERSION_12_0;
87 		break;
88 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
89 	case FAMILY_RV:
90 		dc_version = DCN_VERSION_1_0;
91 		break;
92 #endif
93 	default:
94 		dc_version = DCE_VERSION_UNKNOWN;
95 		break;
96 	}
97 	return dc_version;
98 }
99 
100 struct resource_pool *dc_create_resource_pool(
101 				struct dc  *dc,
102 				int num_virtual_links,
103 				enum dce_version dc_version,
104 				struct hw_asic_id asic_id)
105 {
106 	struct resource_pool *res_pool = NULL;
107 
108 	switch (dc_version) {
109 	case DCE_VERSION_8_0:
110 		res_pool = dce80_create_resource_pool(
111 			num_virtual_links, dc);
112 		break;
113 	case DCE_VERSION_8_1:
114 		res_pool = dce81_create_resource_pool(
115 			num_virtual_links, dc);
116 		break;
117 	case DCE_VERSION_8_3:
118 		res_pool = dce83_create_resource_pool(
119 			num_virtual_links, dc);
120 		break;
121 	case DCE_VERSION_10_0:
122 		res_pool = dce100_create_resource_pool(
123 				num_virtual_links, dc);
124 		break;
125 	case DCE_VERSION_11_0:
126 		res_pool = dce110_create_resource_pool(
127 			num_virtual_links, dc, asic_id);
128 		break;
129 	case DCE_VERSION_11_2:
130 	case DCE_VERSION_11_22:
131 		res_pool = dce112_create_resource_pool(
132 			num_virtual_links, dc);
133 		break;
134 	case DCE_VERSION_12_0:
135 		res_pool = dce120_create_resource_pool(
136 			num_virtual_links, dc);
137 		break;
138 
139 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
140 	case DCN_VERSION_1_0:
141 		res_pool = dcn10_create_resource_pool(
142 				num_virtual_links, dc);
143 		break;
144 #endif
145 
146 
147 	default:
148 		break;
149 	}
150 	if (res_pool != NULL) {
151 		struct dc_firmware_info fw_info = { { 0 } };
152 
153 		if (dc->ctx->dc_bios->funcs->get_firmware_info(
154 				dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
155 				res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
156 			} else
157 				ASSERT_CRITICAL(false);
158 	}
159 
160 	return res_pool;
161 }
162 
163 void dc_destroy_resource_pool(struct dc  *dc)
164 {
165 	if (dc) {
166 		if (dc->res_pool)
167 			dc->res_pool->funcs->destroy(&dc->res_pool);
168 
169 		kfree(dc->hwseq);
170 	}
171 }
172 
173 static void update_num_audio(
174 	const struct resource_straps *straps,
175 	unsigned int *num_audio,
176 	struct audio_support *aud_support)
177 {
178 	aud_support->dp_audio = true;
179 	aud_support->hdmi_audio_native = false;
180 	aud_support->hdmi_audio_on_dongle = false;
181 
182 	if (straps->hdmi_disable == 0) {
183 		if (straps->dc_pinstraps_audio & 0x2) {
184 			aud_support->hdmi_audio_on_dongle = true;
185 			aud_support->hdmi_audio_native = true;
186 		}
187 	}
188 
189 	switch (straps->audio_stream_number) {
190 	case 0: /* multi streams supported */
191 		break;
192 	case 1: /* multi streams not supported */
193 		*num_audio = 1;
194 		break;
195 	default:
196 		DC_ERR("DC: unexpected audio fuse!\n");
197 	}
198 }
199 
200 bool resource_construct(
201 	unsigned int num_virtual_links,
202 	struct dc  *dc,
203 	struct resource_pool *pool,
204 	const struct resource_create_funcs *create_funcs)
205 {
206 	struct dc_context *ctx = dc->ctx;
207 	const struct resource_caps *caps = pool->res_cap;
208 	int i;
209 	unsigned int num_audio = caps->num_audio;
210 	struct resource_straps straps = {0};
211 
212 	if (create_funcs->read_dce_straps)
213 		create_funcs->read_dce_straps(dc->ctx, &straps);
214 
215 	pool->audio_count = 0;
216 	if (create_funcs->create_audio) {
217 		/* find the total number of streams available via the
218 		 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
219 		 * registers (one for each pin) starting from pin 1
220 		 * up to the max number of audio pins.
221 		 * We stop on the first pin where
222 		 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
223 		 */
224 		update_num_audio(&straps, &num_audio, &pool->audio_support);
225 		for (i = 0; i < caps->num_audio; i++) {
226 			struct audio *aud = create_funcs->create_audio(ctx, i);
227 
228 			if (aud == NULL) {
229 				DC_ERR("DC: failed to create audio!\n");
230 				return false;
231 			}
232 			if (!aud->funcs->endpoint_valid(aud)) {
233 				aud->funcs->destroy(&aud);
234 				break;
235 			}
236 			pool->audios[i] = aud;
237 			pool->audio_count++;
238 		}
239 	}
240 
241 	pool->stream_enc_count = 0;
242 	if (create_funcs->create_stream_encoder) {
243 		for (i = 0; i < caps->num_stream_encoder; i++) {
244 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
245 			if (pool->stream_enc[i] == NULL)
246 				DC_ERR("DC: failed to create stream_encoder!\n");
247 			pool->stream_enc_count++;
248 		}
249 	}
250 	dc->caps.dynamic_audio = false;
251 	if (pool->audio_count < pool->stream_enc_count) {
252 		dc->caps.dynamic_audio = true;
253 	}
254 	for (i = 0; i < num_virtual_links; i++) {
255 		pool->stream_enc[pool->stream_enc_count] =
256 			virtual_stream_encoder_create(
257 					ctx, ctx->dc_bios);
258 		if (pool->stream_enc[pool->stream_enc_count] == NULL) {
259 			DC_ERR("DC: failed to create stream_encoder!\n");
260 			return false;
261 		}
262 		pool->stream_enc_count++;
263 	}
264 
265 	dc->hwseq = create_funcs->create_hwseq(ctx);
266 
267 	return true;
268 }
269 static int find_matching_clock_source(
270 		const struct resource_pool *pool,
271 		struct clock_source *clock_source)
272 {
273 
274 	int i;
275 
276 	for (i = 0; i < pool->clk_src_count; i++) {
277 		if (pool->clock_sources[i] == clock_source)
278 			return i;
279 	}
280 	return -1;
281 }
282 
283 void resource_unreference_clock_source(
284 		struct resource_context *res_ctx,
285 		const struct resource_pool *pool,
286 		struct clock_source *clock_source)
287 {
288 	int i = find_matching_clock_source(pool, clock_source);
289 
290 	if (i > -1)
291 		res_ctx->clock_source_ref_count[i]--;
292 
293 	if (pool->dp_clock_source == clock_source)
294 		res_ctx->dp_clock_source_ref_count--;
295 }
296 
297 void resource_reference_clock_source(
298 		struct resource_context *res_ctx,
299 		const struct resource_pool *pool,
300 		struct clock_source *clock_source)
301 {
302 	int i = find_matching_clock_source(pool, clock_source);
303 
304 	if (i > -1)
305 		res_ctx->clock_source_ref_count[i]++;
306 
307 	if (pool->dp_clock_source == clock_source)
308 		res_ctx->dp_clock_source_ref_count++;
309 }
310 
311 int resource_get_clock_source_reference(
312 		struct resource_context *res_ctx,
313 		const struct resource_pool *pool,
314 		struct clock_source *clock_source)
315 {
316 	int i = find_matching_clock_source(pool, clock_source);
317 
318 	if (i > -1)
319 		return res_ctx->clock_source_ref_count[i];
320 
321 	if (pool->dp_clock_source == clock_source)
322 		return res_ctx->dp_clock_source_ref_count;
323 
324 	return -1;
325 }
326 
327 bool resource_are_streams_timing_synchronizable(
328 	struct dc_stream_state *stream1,
329 	struct dc_stream_state *stream2)
330 {
331 	if (stream1->timing.h_total != stream2->timing.h_total)
332 		return false;
333 
334 	if (stream1->timing.v_total != stream2->timing.v_total)
335 		return false;
336 
337 	if (stream1->timing.h_addressable
338 				!= stream2->timing.h_addressable)
339 		return false;
340 
341 	if (stream1->timing.v_addressable
342 				!= stream2->timing.v_addressable)
343 		return false;
344 
345 	if (stream1->timing.pix_clk_khz
346 				!= stream2->timing.pix_clk_khz)
347 		return false;
348 
349 	if (stream1->clamping.c_depth != stream2->clamping.c_depth)
350 		return false;
351 
352 	if (stream1->phy_pix_clk != stream2->phy_pix_clk
353 			&& (!dc_is_dp_signal(stream1->signal)
354 			|| !dc_is_dp_signal(stream2->signal)))
355 		return false;
356 
357 	return true;
358 }
359 static bool is_dp_and_hdmi_sharable(
360 		struct dc_stream_state *stream1,
361 		struct dc_stream_state *stream2)
362 {
363 	if (stream1->ctx->dc->caps.disable_dp_clk_share)
364 		return false;
365 
366 	if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
367 	    stream2->clamping.c_depth != COLOR_DEPTH_888)
368 	return false;
369 
370 	return true;
371 
372 }
373 
374 static bool is_sharable_clk_src(
375 	const struct pipe_ctx *pipe_with_clk_src,
376 	const struct pipe_ctx *pipe)
377 {
378 	if (pipe_with_clk_src->clock_source == NULL)
379 		return false;
380 
381 	if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
382 		return false;
383 
384 	if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
385 		(dc_is_dp_signal(pipe->stream->signal) &&
386 		!is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
387 				     pipe->stream)))
388 		return false;
389 
390 	if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
391 			&& dc_is_dual_link_signal(pipe->stream->signal))
392 		return false;
393 
394 	if (dc_is_hdmi_signal(pipe->stream->signal)
395 			&& dc_is_dual_link_signal(pipe_with_clk_src->stream->signal))
396 		return false;
397 
398 	if (!resource_are_streams_timing_synchronizable(
399 			pipe_with_clk_src->stream, pipe->stream))
400 		return false;
401 
402 	return true;
403 }
404 
405 struct clock_source *resource_find_used_clk_src_for_sharing(
406 					struct resource_context *res_ctx,
407 					struct pipe_ctx *pipe_ctx)
408 {
409 	int i;
410 
411 	for (i = 0; i < MAX_PIPES; i++) {
412 		if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
413 			return res_ctx->pipe_ctx[i].clock_source;
414 	}
415 
416 	return NULL;
417 }
418 
419 static enum pixel_format convert_pixel_format_to_dalsurface(
420 		enum surface_pixel_format surface_pixel_format)
421 {
422 	enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
423 
424 	switch (surface_pixel_format) {
425 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
426 		dal_pixel_format = PIXEL_FORMAT_INDEX8;
427 		break;
428 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
429 		dal_pixel_format = PIXEL_FORMAT_RGB565;
430 		break;
431 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
432 		dal_pixel_format = PIXEL_FORMAT_RGB565;
433 		break;
434 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
435 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
436 		break;
437 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
438 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
439 		break;
440 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
441 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
442 		break;
443 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
444 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
445 		break;
446 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
447 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
448 		break;
449 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
450 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
451 		dal_pixel_format = PIXEL_FORMAT_FP16;
452 		break;
453 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
454 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
455 		dal_pixel_format = PIXEL_FORMAT_420BPP8;
456 		break;
457 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
458 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
459 		dal_pixel_format = PIXEL_FORMAT_420BPP10;
460 		break;
461 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
462 	default:
463 		dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
464 		break;
465 	}
466 	return dal_pixel_format;
467 }
468 
469 static void rect_swap_helper(struct rect *rect)
470 {
471 	swap(rect->height, rect->width);
472 	swap(rect->x, rect->y);
473 }
474 
475 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
476 {
477 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
478 	const struct dc_stream_state *stream = pipe_ctx->stream;
479 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
480 	struct rect surf_src = plane_state->src_rect;
481 	struct rect clip = { 0 };
482 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
483 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
484 	bool pri_split = pipe_ctx->bottom_pipe &&
485 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
486 	bool sec_split = pipe_ctx->top_pipe &&
487 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
488 
489 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
490 		stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
491 		pri_split = false;
492 		sec_split = false;
493 	}
494 
495 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
496 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
497 		rect_swap_helper(&surf_src);
498 
499 	/* The actual clip is an intersection between stream
500 	 * source and surface clip
501 	 */
502 	clip.x = stream->src.x > plane_state->clip_rect.x ?
503 			stream->src.x : plane_state->clip_rect.x;
504 
505 	clip.width = stream->src.x + stream->src.width <
506 			plane_state->clip_rect.x + plane_state->clip_rect.width ?
507 			stream->src.x + stream->src.width - clip.x :
508 			plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
509 
510 	clip.y = stream->src.y > plane_state->clip_rect.y ?
511 			stream->src.y : plane_state->clip_rect.y;
512 
513 	clip.height = stream->src.y + stream->src.height <
514 			plane_state->clip_rect.y + plane_state->clip_rect.height ?
515 			stream->src.y + stream->src.height - clip.y :
516 			plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
517 
518 	/* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
519 	 * num_pixels = clip.num_pix * scl_ratio
520 	 */
521 	data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
522 			surf_src.width / plane_state->dst_rect.width;
523 	data->viewport.width = clip.width *
524 			surf_src.width / plane_state->dst_rect.width;
525 
526 	data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
527 			surf_src.height / plane_state->dst_rect.height;
528 	data->viewport.height = clip.height *
529 			surf_src.height / plane_state->dst_rect.height;
530 
531 	/* Round down, compensate in init */
532 	data->viewport_c.x = data->viewport.x / vpc_div;
533 	data->viewport_c.y = data->viewport.y / vpc_div;
534 	data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
535 			dc_fixpt_half : dc_fixpt_zero;
536 	data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
537 			dc_fixpt_half : dc_fixpt_zero;
538 	/* Round up, assume original video size always even dimensions */
539 	data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
540 	data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
541 
542 	/* Handle hsplit */
543 	if (sec_split) {
544 		data->viewport.x +=  data->viewport.width / 2;
545 		data->viewport_c.x +=  data->viewport_c.width / 2;
546 		/* Ceil offset pipe */
547 		data->viewport.width = (data->viewport.width + 1) / 2;
548 		data->viewport_c.width = (data->viewport_c.width + 1) / 2;
549 	} else if (pri_split) {
550 		data->viewport.width /= 2;
551 		data->viewport_c.width /= 2;
552 	}
553 
554 	if (plane_state->rotation == ROTATION_ANGLE_90 ||
555 			plane_state->rotation == ROTATION_ANGLE_270) {
556 		rect_swap_helper(&data->viewport_c);
557 		rect_swap_helper(&data->viewport);
558 	}
559 }
560 
561 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
562 {
563 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
564 	const struct dc_stream_state *stream = pipe_ctx->stream;
565 	struct rect surf_src = plane_state->src_rect;
566 	struct rect surf_clip = plane_state->clip_rect;
567 	bool pri_split = pipe_ctx->bottom_pipe &&
568 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
569 	bool sec_split = pipe_ctx->top_pipe &&
570 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
571 	bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
572 
573 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
574 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
575 		rect_swap_helper(&surf_src);
576 
577 	pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
578 	if (stream->src.x < surf_clip.x)
579 		pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
580 			- stream->src.x) * stream->dst.width
581 						/ stream->src.width;
582 
583 	pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
584 			stream->dst.width / stream->src.width;
585 	if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
586 			stream->dst.x + stream->dst.width)
587 		pipe_ctx->plane_res.scl_data.recout.width =
588 			stream->dst.x + stream->dst.width
589 						- pipe_ctx->plane_res.scl_data.recout.x;
590 
591 	pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
592 	if (stream->src.y < surf_clip.y)
593 		pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
594 			- stream->src.y) * stream->dst.height
595 						/ stream->src.height;
596 
597 	pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
598 			stream->dst.height / stream->src.height;
599 	if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
600 			stream->dst.y + stream->dst.height)
601 		pipe_ctx->plane_res.scl_data.recout.height =
602 			stream->dst.y + stream->dst.height
603 						- pipe_ctx->plane_res.scl_data.recout.y;
604 
605 	/* Handle h & vsplit */
606 	if (sec_split && top_bottom_split) {
607 		pipe_ctx->plane_res.scl_data.recout.y +=
608 				pipe_ctx->plane_res.scl_data.recout.height / 2;
609 		/* Floor primary pipe, ceil 2ndary pipe */
610 		pipe_ctx->plane_res.scl_data.recout.height =
611 				(pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
612 	} else if (pri_split && top_bottom_split)
613 		pipe_ctx->plane_res.scl_data.recout.height /= 2;
614 	else if (pri_split || sec_split) {
615 		/* HMirror XOR Secondary_pipe XOR Rotation_180 */
616 		bool right_view = (sec_split != plane_state->horizontal_mirror) !=
617 					(plane_state->rotation == ROTATION_ANGLE_180);
618 
619 		if (plane_state->rotation == ROTATION_ANGLE_90
620 				|| plane_state->rotation == ROTATION_ANGLE_270)
621 			/* Secondary_pipe XOR Rotation_270 */
622 			right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
623 
624 		if (right_view) {
625 			pipe_ctx->plane_res.scl_data.recout.x +=
626 					pipe_ctx->plane_res.scl_data.recout.width / 2;
627 			/* Ceil offset pipe */
628 			pipe_ctx->plane_res.scl_data.recout.width =
629 					(pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
630 		} else {
631 			pipe_ctx->plane_res.scl_data.recout.width /= 2;
632 		}
633 	}
634 	/* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
635 	 *			* 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
636 	 *			ratio)
637 	 */
638 	recout_full->x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
639 					* stream->dst.width / stream->src.width -
640 			surf_src.x * plane_state->dst_rect.width / surf_src.width
641 					* stream->dst.width / stream->src.width;
642 	recout_full->y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
643 					* stream->dst.height / stream->src.height -
644 			surf_src.y * plane_state->dst_rect.height / surf_src.height
645 					* stream->dst.height / stream->src.height;
646 
647 	recout_full->width = plane_state->dst_rect.width
648 					* stream->dst.width / stream->src.width;
649 	recout_full->height = plane_state->dst_rect.height
650 					* stream->dst.height / stream->src.height;
651 }
652 
653 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
654 {
655 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
656 	const struct dc_stream_state *stream = pipe_ctx->stream;
657 	struct rect surf_src = plane_state->src_rect;
658 	const int in_w = stream->src.width;
659 	const int in_h = stream->src.height;
660 	const int out_w = stream->dst.width;
661 	const int out_h = stream->dst.height;
662 
663 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
664 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
665 		rect_swap_helper(&surf_src);
666 
667 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
668 					surf_src.width,
669 					plane_state->dst_rect.width);
670 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
671 					surf_src.height,
672 					plane_state->dst_rect.height);
673 
674 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
675 		pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
676 	else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
677 		pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
678 
679 	pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
680 		pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
681 	pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
682 		pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
683 
684 	pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
685 	pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
686 
687 	if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
688 			|| pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
689 		pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
690 		pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
691 	}
692 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
693 			pipe_ctx->plane_res.scl_data.ratios.horz, 19);
694 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
695 			pipe_ctx->plane_res.scl_data.ratios.vert, 19);
696 	pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
697 			pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
698 	pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
699 			pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
700 }
701 
702 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
703 {
704 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
705 	struct rect src = pipe_ctx->plane_state->src_rect;
706 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
707 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
708 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
709 
710 	/*
711 	 * Need to calculate the scan direction for viewport to make adjustments
712 	 */
713 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
714 		flip_vert_scan_dir = true;
715 		flip_horz_scan_dir = true;
716 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
717 		flip_vert_scan_dir = true;
718 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
719 		flip_horz_scan_dir = true;
720 
721 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
722 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
723 		rect_swap_helper(&src);
724 		rect_swap_helper(&data->viewport_c);
725 		rect_swap_helper(&data->viewport);
726 	} else if (pipe_ctx->plane_state->horizontal_mirror)
727 			flip_horz_scan_dir = !flip_horz_scan_dir;
728 
729 	/*
730 	 * Init calculated according to formula:
731 	 * 	init = (scaling_ratio + number_of_taps + 1) / 2
732 	 * 	init_bot = init + scaling_ratio
733 	 * 	init_c = init + truncated_vp_c_offset(from calculate viewport)
734 	 */
735 	data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
736 			dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
737 
738 	data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
739 			dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
740 
741 	data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
742 			dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
743 
744 	data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
745 			dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
746 
747 	if (!flip_horz_scan_dir) {
748 		/* Adjust for viewport end clip-off */
749 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
750 			int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
751 			int int_part = dc_fixpt_floor(
752 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
753 
754 			int_part = int_part > 0 ? int_part : 0;
755 			data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
756 		}
757 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
758 			int vp_clip = (src.x + src.width) / vpc_div -
759 					data->viewport_c.width - data->viewport_c.x;
760 			int int_part = dc_fixpt_floor(
761 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
762 
763 			int_part = int_part > 0 ? int_part : 0;
764 			data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
765 		}
766 
767 		/* Adjust for non-0 viewport offset */
768 		if (data->viewport.x) {
769 			int int_part;
770 
771 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
772 					data->ratios.horz, data->recout.x - recout_full->x));
773 			int_part = dc_fixpt_floor(data->inits.h) - data->viewport.x;
774 			if (int_part < data->taps.h_taps) {
775 				int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
776 							(data->taps.h_taps - int_part) : data->viewport.x;
777 				data->viewport.x -= int_adj;
778 				data->viewport.width += int_adj;
779 				int_part += int_adj;
780 			} else if (int_part > data->taps.h_taps) {
781 				data->viewport.x += int_part - data->taps.h_taps;
782 				data->viewport.width -= int_part - data->taps.h_taps;
783 				int_part = data->taps.h_taps;
784 			}
785 			data->inits.h.value &= 0xffffffff;
786 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
787 		}
788 
789 		if (data->viewport_c.x) {
790 			int int_part;
791 
792 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
793 					data->ratios.horz_c, data->recout.x - recout_full->x));
794 			int_part = dc_fixpt_floor(data->inits.h_c) - data->viewport_c.x;
795 			if (int_part < data->taps.h_taps_c) {
796 				int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
797 						(data->taps.h_taps_c - int_part) : data->viewport_c.x;
798 				data->viewport_c.x -= int_adj;
799 				data->viewport_c.width += int_adj;
800 				int_part += int_adj;
801 			} else if (int_part > data->taps.h_taps_c) {
802 				data->viewport_c.x += int_part - data->taps.h_taps_c;
803 				data->viewport_c.width -= int_part - data->taps.h_taps_c;
804 				int_part = data->taps.h_taps_c;
805 			}
806 			data->inits.h_c.value &= 0xffffffff;
807 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
808 		}
809 	} else {
810 		/* Adjust for non-0 viewport offset */
811 		if (data->viewport.x) {
812 			int int_part = dc_fixpt_floor(
813 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
814 
815 			int_part = int_part > 0 ? int_part : 0;
816 			data->viewport.width += int_part < data->viewport.x ? int_part : data->viewport.x;
817 			data->viewport.x -= int_part < data->viewport.x ? int_part : data->viewport.x;
818 		}
819 		if (data->viewport_c.x) {
820 			int int_part = dc_fixpt_floor(
821 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
822 
823 			int_part = int_part > 0 ? int_part : 0;
824 			data->viewport_c.width += int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
825 			data->viewport_c.x -= int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
826 		}
827 
828 		/* Adjust for viewport end clip-off */
829 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
830 			int int_part;
831 			int end_offset = src.x + src.width
832 					- data->viewport.x - data->viewport.width;
833 
834 			/*
835 			 * this is init if vp had no offset, keep in mind this is from the
836 			 * right side of vp due to scan direction
837 			 */
838 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
839 					data->ratios.horz, data->recout.x - recout_full->x));
840 			/*
841 			 * this is the difference between first pixel of viewport available to read
842 			 * and init position, takning into account scan direction
843 			 */
844 			int_part = dc_fixpt_floor(data->inits.h) - end_offset;
845 			if (int_part < data->taps.h_taps) {
846 				int int_adj = end_offset >= (data->taps.h_taps - int_part) ?
847 							(data->taps.h_taps - int_part) : end_offset;
848 				data->viewport.width += int_adj;
849 				int_part += int_adj;
850 			} else if (int_part > data->taps.h_taps) {
851 				data->viewport.width += int_part - data->taps.h_taps;
852 				int_part = data->taps.h_taps;
853 			}
854 			data->inits.h.value &= 0xffffffff;
855 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
856 		}
857 
858 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
859 			int int_part;
860 			int end_offset = (src.x + src.width) / vpc_div
861 					- data->viewport_c.x - data->viewport_c.width;
862 
863 			/*
864 			 * this is init if vp had no offset, keep in mind this is from the
865 			 * right side of vp due to scan direction
866 			 */
867 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
868 					data->ratios.horz_c, data->recout.x - recout_full->x));
869 			/*
870 			 * this is the difference between first pixel of viewport available to read
871 			 * and init position, takning into account scan direction
872 			 */
873 			int_part = dc_fixpt_floor(data->inits.h_c) - end_offset;
874 			if (int_part < data->taps.h_taps_c) {
875 				int int_adj = end_offset >= (data->taps.h_taps_c - int_part) ?
876 							(data->taps.h_taps_c - int_part) : end_offset;
877 				data->viewport_c.width += int_adj;
878 				int_part += int_adj;
879 			} else if (int_part > data->taps.h_taps_c) {
880 				data->viewport_c.width += int_part - data->taps.h_taps_c;
881 				int_part = data->taps.h_taps_c;
882 			}
883 			data->inits.h_c.value &= 0xffffffff;
884 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
885 		}
886 
887 	}
888 	if (!flip_vert_scan_dir) {
889 		/* Adjust for viewport end clip-off */
890 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
891 			int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
892 			int int_part = dc_fixpt_floor(
893 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
894 
895 			int_part = int_part > 0 ? int_part : 0;
896 			data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
897 		}
898 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
899 			int vp_clip = (src.y + src.height) / vpc_div -
900 					data->viewport_c.height - data->viewport_c.y;
901 			int int_part = dc_fixpt_floor(
902 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
903 
904 			int_part = int_part > 0 ? int_part : 0;
905 			data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
906 		}
907 
908 		/* Adjust for non-0 viewport offset */
909 		if (data->viewport.y) {
910 			int int_part;
911 
912 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
913 					data->ratios.vert, data->recout.y - recout_full->y));
914 			int_part = dc_fixpt_floor(data->inits.v) - data->viewport.y;
915 			if (int_part < data->taps.v_taps) {
916 				int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
917 							(data->taps.v_taps - int_part) : data->viewport.y;
918 				data->viewport.y -= int_adj;
919 				data->viewport.height += int_adj;
920 				int_part += int_adj;
921 			} else if (int_part > data->taps.v_taps) {
922 				data->viewport.y += int_part - data->taps.v_taps;
923 				data->viewport.height -= int_part - data->taps.v_taps;
924 				int_part = data->taps.v_taps;
925 			}
926 			data->inits.v.value &= 0xffffffff;
927 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
928 		}
929 
930 		if (data->viewport_c.y) {
931 			int int_part;
932 
933 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
934 					data->ratios.vert_c, data->recout.y - recout_full->y));
935 			int_part = dc_fixpt_floor(data->inits.v_c) - data->viewport_c.y;
936 			if (int_part < data->taps.v_taps_c) {
937 				int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
938 						(data->taps.v_taps_c - int_part) : data->viewport_c.y;
939 				data->viewport_c.y -= int_adj;
940 				data->viewport_c.height += int_adj;
941 				int_part += int_adj;
942 			} else if (int_part > data->taps.v_taps_c) {
943 				data->viewport_c.y += int_part - data->taps.v_taps_c;
944 				data->viewport_c.height -= int_part - data->taps.v_taps_c;
945 				int_part = data->taps.v_taps_c;
946 			}
947 			data->inits.v_c.value &= 0xffffffff;
948 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
949 		}
950 	} else {
951 		/* Adjust for non-0 viewport offset */
952 		if (data->viewport.y) {
953 			int int_part = dc_fixpt_floor(
954 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
955 
956 			int_part = int_part > 0 ? int_part : 0;
957 			data->viewport.height += int_part < data->viewport.y ? int_part : data->viewport.y;
958 			data->viewport.y -= int_part < data->viewport.y ? int_part : data->viewport.y;
959 		}
960 		if (data->viewport_c.y) {
961 			int int_part = dc_fixpt_floor(
962 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
963 
964 			int_part = int_part > 0 ? int_part : 0;
965 			data->viewport_c.height += int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
966 			data->viewport_c.y -= int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
967 		}
968 
969 		/* Adjust for viewport end clip-off */
970 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
971 			int int_part;
972 			int end_offset = src.y + src.height
973 					- data->viewport.y - data->viewport.height;
974 
975 			/*
976 			 * this is init if vp had no offset, keep in mind this is from the
977 			 * right side of vp due to scan direction
978 			 */
979 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
980 					data->ratios.vert, data->recout.y - recout_full->y));
981 			/*
982 			 * this is the difference between first pixel of viewport available to read
983 			 * and init position, taking into account scan direction
984 			 */
985 			int_part = dc_fixpt_floor(data->inits.v) - end_offset;
986 			if (int_part < data->taps.v_taps) {
987 				int int_adj = end_offset >= (data->taps.v_taps - int_part) ?
988 							(data->taps.v_taps - int_part) : end_offset;
989 				data->viewport.height += int_adj;
990 				int_part += int_adj;
991 			} else if (int_part > data->taps.v_taps) {
992 				data->viewport.height += int_part - data->taps.v_taps;
993 				int_part = data->taps.v_taps;
994 			}
995 			data->inits.v.value &= 0xffffffff;
996 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
997 		}
998 
999 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
1000 			int int_part;
1001 			int end_offset = (src.y + src.height) / vpc_div
1002 					- data->viewport_c.y - data->viewport_c.height;
1003 
1004 			/*
1005 			 * this is init if vp had no offset, keep in mind this is from the
1006 			 * right side of vp due to scan direction
1007 			 */
1008 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
1009 					data->ratios.vert_c, data->recout.y - recout_full->y));
1010 			/*
1011 			 * this is the difference between first pixel of viewport available to read
1012 			 * and init position, taking into account scan direction
1013 			 */
1014 			int_part = dc_fixpt_floor(data->inits.v_c) - end_offset;
1015 			if (int_part < data->taps.v_taps_c) {
1016 				int int_adj = end_offset >= (data->taps.v_taps_c - int_part) ?
1017 							(data->taps.v_taps_c - int_part) : end_offset;
1018 				data->viewport_c.height += int_adj;
1019 				int_part += int_adj;
1020 			} else if (int_part > data->taps.v_taps_c) {
1021 				data->viewport_c.height += int_part - data->taps.v_taps_c;
1022 				int_part = data->taps.v_taps_c;
1023 			}
1024 			data->inits.v_c.value &= 0xffffffff;
1025 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
1026 		}
1027 	}
1028 
1029 	/* Interlaced inits based on final vert inits */
1030 	data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
1031 	data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
1032 
1033 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
1034 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
1035 		rect_swap_helper(&data->viewport_c);
1036 		rect_swap_helper(&data->viewport);
1037 	}
1038 }
1039 
1040 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
1041 {
1042 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1043 	struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
1044 	struct rect recout_full = { 0 };
1045 	bool res = false;
1046 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
1047 	/* Important: scaling ratio calculation requires pixel format,
1048 	 * lb depth calculation requires recout and taps require scaling ratios.
1049 	 * Inits require viewport, taps, ratios and recout of split pipe
1050 	 */
1051 	pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
1052 			pipe_ctx->plane_state->format);
1053 
1054 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1055 		pipe_ctx->stream->dst.height *= 2;
1056 
1057 	calculate_scaling_ratios(pipe_ctx);
1058 
1059 	calculate_viewport(pipe_ctx);
1060 
1061 	if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
1062 		return false;
1063 
1064 	calculate_recout(pipe_ctx, &recout_full);
1065 
1066 	/**
1067 	 * Setting line buffer pixel depth to 24bpp yields banding
1068 	 * on certain displays, such as the Sharp 4k
1069 	 */
1070 	pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
1071 
1072 	pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
1073 	pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
1074 
1075 	pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
1076 	pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
1077 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1078 		pipe_ctx->plane_res.scl_data.v_active *= 2;
1079 
1080 
1081 	/* Taps calculations */
1082 	if (pipe_ctx->plane_res.xfm != NULL)
1083 		res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1084 				pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1085 
1086 	if (pipe_ctx->plane_res.dpp != NULL)
1087 		res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1088 				pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1089 	if (!res) {
1090 		/* Try 24 bpp linebuffer */
1091 		pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
1092 
1093 		if (pipe_ctx->plane_res.xfm != NULL)
1094 			res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1095 					pipe_ctx->plane_res.xfm,
1096 					&pipe_ctx->plane_res.scl_data,
1097 					&plane_state->scaling_quality);
1098 
1099 		if (pipe_ctx->plane_res.dpp != NULL)
1100 			res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1101 					pipe_ctx->plane_res.dpp,
1102 					&pipe_ctx->plane_res.scl_data,
1103 					&plane_state->scaling_quality);
1104 	}
1105 
1106 	if (res)
1107 		/* May need to re-check lb size after this in some obscure scenario */
1108 		calculate_inits_and_adj_vp(pipe_ctx, &recout_full);
1109 
1110 	DC_LOG_SCALER(
1111 				"%s: Viewport:\nheight:%d width:%d x:%d "
1112 				"y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1113 				"y:%d\n",
1114 				__func__,
1115 				pipe_ctx->plane_res.scl_data.viewport.height,
1116 				pipe_ctx->plane_res.scl_data.viewport.width,
1117 				pipe_ctx->plane_res.scl_data.viewport.x,
1118 				pipe_ctx->plane_res.scl_data.viewport.y,
1119 				plane_state->dst_rect.height,
1120 				plane_state->dst_rect.width,
1121 				plane_state->dst_rect.x,
1122 				plane_state->dst_rect.y);
1123 
1124 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1125 		pipe_ctx->stream->dst.height /= 2;
1126 
1127 	return res;
1128 }
1129 
1130 
1131 enum dc_status resource_build_scaling_params_for_context(
1132 	const struct dc  *dc,
1133 	struct dc_state *context)
1134 {
1135 	int i;
1136 
1137 	for (i = 0; i < MAX_PIPES; i++) {
1138 		if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1139 				context->res_ctx.pipe_ctx[i].stream != NULL)
1140 			if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1141 				return DC_FAIL_SCALING;
1142 	}
1143 
1144 	return DC_OK;
1145 }
1146 
1147 struct pipe_ctx *find_idle_secondary_pipe(
1148 		struct resource_context *res_ctx,
1149 		const struct resource_pool *pool)
1150 {
1151 	int i;
1152 	struct pipe_ctx *secondary_pipe = NULL;
1153 
1154 	/*
1155 	 * search backwards for the second pipe to keep pipe
1156 	 * assignment more consistent
1157 	 */
1158 
1159 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1160 		if (res_ctx->pipe_ctx[i].stream == NULL) {
1161 			secondary_pipe = &res_ctx->pipe_ctx[i];
1162 			secondary_pipe->pipe_idx = i;
1163 			break;
1164 		}
1165 	}
1166 
1167 
1168 	return secondary_pipe;
1169 }
1170 
1171 struct pipe_ctx *resource_get_head_pipe_for_stream(
1172 		struct resource_context *res_ctx,
1173 		struct dc_stream_state *stream)
1174 {
1175 	int i;
1176 	for (i = 0; i < MAX_PIPES; i++) {
1177 		if (res_ctx->pipe_ctx[i].stream == stream &&
1178 				!res_ctx->pipe_ctx[i].top_pipe) {
1179 			return &res_ctx->pipe_ctx[i];
1180 			break;
1181 		}
1182 	}
1183 	return NULL;
1184 }
1185 
1186 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1187 		struct resource_context *res_ctx,
1188 		struct dc_stream_state *stream)
1189 {
1190 	struct pipe_ctx *head_pipe, *tail_pipe;
1191 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1192 
1193 	if (!head_pipe)
1194 		return NULL;
1195 
1196 	tail_pipe = head_pipe->bottom_pipe;
1197 
1198 	while (tail_pipe) {
1199 		head_pipe = tail_pipe;
1200 		tail_pipe = tail_pipe->bottom_pipe;
1201 	}
1202 
1203 	return head_pipe;
1204 }
1205 
1206 /*
1207  * A free_pipe for a stream is defined here as a pipe
1208  * that has no surface attached yet
1209  */
1210 static struct pipe_ctx *acquire_free_pipe_for_stream(
1211 		struct dc_state *context,
1212 		const struct resource_pool *pool,
1213 		struct dc_stream_state *stream)
1214 {
1215 	int i;
1216 	struct resource_context *res_ctx = &context->res_ctx;
1217 
1218 	struct pipe_ctx *head_pipe = NULL;
1219 
1220 	/* Find head pipe, which has the back end set up*/
1221 
1222 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1223 
1224 	if (!head_pipe) {
1225 		ASSERT(0);
1226 		return NULL;
1227 	}
1228 
1229 	if (!head_pipe->plane_state)
1230 		return head_pipe;
1231 
1232 	/* Re-use pipe already acquired for this stream if available*/
1233 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1234 		if (res_ctx->pipe_ctx[i].stream == stream &&
1235 				!res_ctx->pipe_ctx[i].plane_state) {
1236 			return &res_ctx->pipe_ctx[i];
1237 		}
1238 	}
1239 
1240 	/*
1241 	 * At this point we have no re-useable pipe for this stream and we need
1242 	 * to acquire an idle one to satisfy the request
1243 	 */
1244 
1245 	if (!pool->funcs->acquire_idle_pipe_for_layer)
1246 		return NULL;
1247 
1248 	return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1249 
1250 }
1251 
1252 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1253 static int acquire_first_split_pipe(
1254 		struct resource_context *res_ctx,
1255 		const struct resource_pool *pool,
1256 		struct dc_stream_state *stream)
1257 {
1258 	int i;
1259 
1260 	for (i = 0; i < pool->pipe_count; i++) {
1261 		struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1262 
1263 		if (pipe_ctx->top_pipe &&
1264 				pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1265 			pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1266 			if (pipe_ctx->bottom_pipe)
1267 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1268 
1269 			memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1270 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1271 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1272 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1273 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1274 			pipe_ctx->stream_res.opp = pool->opps[i];
1275 			pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1276 			pipe_ctx->pipe_idx = i;
1277 
1278 			pipe_ctx->stream = stream;
1279 			return i;
1280 		}
1281 	}
1282 	return -1;
1283 }
1284 #endif
1285 
1286 bool dc_add_plane_to_context(
1287 		const struct dc *dc,
1288 		struct dc_stream_state *stream,
1289 		struct dc_plane_state *plane_state,
1290 		struct dc_state *context)
1291 {
1292 	int i;
1293 	struct resource_pool *pool = dc->res_pool;
1294 	struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1295 	struct dc_stream_status *stream_status = NULL;
1296 
1297 	for (i = 0; i < context->stream_count; i++)
1298 		if (context->streams[i] == stream) {
1299 			stream_status = &context->stream_status[i];
1300 			break;
1301 		}
1302 	if (stream_status == NULL) {
1303 		dm_error("Existing stream not found; failed to attach surface!\n");
1304 		return false;
1305 	}
1306 
1307 
1308 	if (stream_status->plane_count == MAX_SURFACE_NUM) {
1309 		dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1310 				plane_state, MAX_SURFACE_NUM);
1311 		return false;
1312 	}
1313 
1314 	head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1315 
1316 	if (!head_pipe) {
1317 		dm_error("Head pipe not found for stream_state %p !\n", stream);
1318 		return false;
1319 	}
1320 
1321 	free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1322 
1323 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1324 	if (!free_pipe) {
1325 		int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1326 		if (pipe_idx >= 0)
1327 			free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1328 	}
1329 #endif
1330 	if (!free_pipe)
1331 		return false;
1332 
1333 	/* retain new surfaces */
1334 	dc_plane_state_retain(plane_state);
1335 	free_pipe->plane_state = plane_state;
1336 
1337 	if (head_pipe != free_pipe) {
1338 
1339 		tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1340 		ASSERT(tail_pipe);
1341 
1342 		free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1343 		free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1344 		free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1345 		free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1346 		free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1347 		free_pipe->clock_source = tail_pipe->clock_source;
1348 		free_pipe->top_pipe = tail_pipe;
1349 		tail_pipe->bottom_pipe = free_pipe;
1350 	}
1351 
1352 	/* assign new surfaces*/
1353 	stream_status->plane_states[stream_status->plane_count] = plane_state;
1354 
1355 	stream_status->plane_count++;
1356 
1357 	return true;
1358 }
1359 
1360 bool dc_remove_plane_from_context(
1361 		const struct dc *dc,
1362 		struct dc_stream_state *stream,
1363 		struct dc_plane_state *plane_state,
1364 		struct dc_state *context)
1365 {
1366 	int i;
1367 	struct dc_stream_status *stream_status = NULL;
1368 	struct resource_pool *pool = dc->res_pool;
1369 
1370 	for (i = 0; i < context->stream_count; i++)
1371 		if (context->streams[i] == stream) {
1372 			stream_status = &context->stream_status[i];
1373 			break;
1374 		}
1375 
1376 	if (stream_status == NULL) {
1377 		dm_error("Existing stream not found; failed to remove plane.\n");
1378 		return false;
1379 	}
1380 
1381 	/* release pipe for plane*/
1382 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1383 		struct pipe_ctx *pipe_ctx;
1384 
1385 		if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1386 			pipe_ctx = &context->res_ctx.pipe_ctx[i];
1387 
1388 			if (pipe_ctx->top_pipe)
1389 				pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1390 
1391 			/* Second condition is to avoid setting NULL to top pipe
1392 			 * of tail pipe making it look like head pipe in subsequent
1393 			 * deletes
1394 			 */
1395 			if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1396 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1397 
1398 			/*
1399 			 * For head pipe detach surfaces from pipe for tail
1400 			 * pipe just zero it out
1401 			 */
1402 			if (!pipe_ctx->top_pipe || (!pipe_ctx->top_pipe->top_pipe &&
1403 					pipe_ctx->top_pipe->stream_res.opp != pipe_ctx->stream_res.opp)) {
1404 				pipe_ctx->top_pipe = NULL;
1405 				pipe_ctx->plane_state = NULL;
1406 				pipe_ctx->bottom_pipe = NULL;
1407 			} else {
1408 				memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1409 			}
1410 		}
1411 	}
1412 
1413 
1414 	for (i = 0; i < stream_status->plane_count; i++) {
1415 		if (stream_status->plane_states[i] == plane_state) {
1416 
1417 			dc_plane_state_release(stream_status->plane_states[i]);
1418 			break;
1419 		}
1420 	}
1421 
1422 	if (i == stream_status->plane_count) {
1423 		dm_error("Existing plane_state not found; failed to detach it!\n");
1424 		return false;
1425 	}
1426 
1427 	stream_status->plane_count--;
1428 
1429 	/* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1430 	for (; i < stream_status->plane_count; i++)
1431 		stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1432 
1433 	stream_status->plane_states[stream_status->plane_count] = NULL;
1434 
1435 	return true;
1436 }
1437 
1438 bool dc_rem_all_planes_for_stream(
1439 		const struct dc *dc,
1440 		struct dc_stream_state *stream,
1441 		struct dc_state *context)
1442 {
1443 	int i, old_plane_count;
1444 	struct dc_stream_status *stream_status = NULL;
1445 	struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1446 
1447 	for (i = 0; i < context->stream_count; i++)
1448 			if (context->streams[i] == stream) {
1449 				stream_status = &context->stream_status[i];
1450 				break;
1451 			}
1452 
1453 	if (stream_status == NULL) {
1454 		dm_error("Existing stream %p not found!\n", stream);
1455 		return false;
1456 	}
1457 
1458 	old_plane_count = stream_status->plane_count;
1459 
1460 	for (i = 0; i < old_plane_count; i++)
1461 		del_planes[i] = stream_status->plane_states[i];
1462 
1463 	for (i = 0; i < old_plane_count; i++)
1464 		if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1465 			return false;
1466 
1467 	return true;
1468 }
1469 
1470 static bool add_all_planes_for_stream(
1471 		const struct dc *dc,
1472 		struct dc_stream_state *stream,
1473 		const struct dc_validation_set set[],
1474 		int set_count,
1475 		struct dc_state *context)
1476 {
1477 	int i, j;
1478 
1479 	for (i = 0; i < set_count; i++)
1480 		if (set[i].stream == stream)
1481 			break;
1482 
1483 	if (i == set_count) {
1484 		dm_error("Stream %p not found in set!\n", stream);
1485 		return false;
1486 	}
1487 
1488 	for (j = 0; j < set[i].plane_count; j++)
1489 		if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1490 			return false;
1491 
1492 	return true;
1493 }
1494 
1495 bool dc_add_all_planes_for_stream(
1496 		const struct dc *dc,
1497 		struct dc_stream_state *stream,
1498 		struct dc_plane_state * const *plane_states,
1499 		int plane_count,
1500 		struct dc_state *context)
1501 {
1502 	struct dc_validation_set set;
1503 	int i;
1504 
1505 	set.stream = stream;
1506 	set.plane_count = plane_count;
1507 
1508 	for (i = 0; i < plane_count; i++)
1509 		set.plane_states[i] = plane_states[i];
1510 
1511 	return add_all_planes_for_stream(dc, stream, &set, 1, context);
1512 }
1513 
1514 
1515 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1516 	struct dc_stream_state *new_stream)
1517 {
1518 	if (cur_stream == NULL)
1519 		return true;
1520 
1521 	if (memcmp(&cur_stream->hdr_static_metadata,
1522 			&new_stream->hdr_static_metadata,
1523 			sizeof(struct dc_info_packet)) != 0)
1524 		return true;
1525 
1526 	return false;
1527 }
1528 
1529 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1530 		struct dc_stream_state *new_stream)
1531 {
1532 	if (cur_stream == NULL)
1533 		return true;
1534 
1535 	/* If sink pointer changed, it means this is a hotplug, we should do
1536 	 * full hw setting.
1537 	 */
1538 	if (cur_stream->sink != new_stream->sink)
1539 		return true;
1540 
1541 	/* If output color space is changed, need to reprogram info frames */
1542 	if (cur_stream->output_color_space != new_stream->output_color_space)
1543 		return true;
1544 
1545 	return memcmp(
1546 		&cur_stream->timing,
1547 		&new_stream->timing,
1548 		sizeof(struct dc_crtc_timing)) != 0;
1549 }
1550 
1551 static bool are_stream_backends_same(
1552 	struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1553 {
1554 	if (stream_a == stream_b)
1555 		return true;
1556 
1557 	if (stream_a == NULL || stream_b == NULL)
1558 		return false;
1559 
1560 	if (is_timing_changed(stream_a, stream_b))
1561 		return false;
1562 
1563 	if (is_hdr_static_meta_changed(stream_a, stream_b))
1564 		return false;
1565 
1566 	return true;
1567 }
1568 
1569 bool dc_is_stream_unchanged(
1570 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1571 {
1572 
1573 	if (!are_stream_backends_same(old_stream, stream))
1574 		return false;
1575 
1576 	return true;
1577 }
1578 
1579 bool dc_is_stream_scaling_unchanged(
1580 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1581 {
1582 	if (old_stream == stream)
1583 		return true;
1584 
1585 	if (old_stream == NULL || stream == NULL)
1586 		return false;
1587 
1588 	if (memcmp(&old_stream->src,
1589 			&stream->src,
1590 			sizeof(struct rect)) != 0)
1591 		return false;
1592 
1593 	if (memcmp(&old_stream->dst,
1594 			&stream->dst,
1595 			sizeof(struct rect)) != 0)
1596 		return false;
1597 
1598 	return true;
1599 }
1600 
1601 static void update_stream_engine_usage(
1602 		struct resource_context *res_ctx,
1603 		const struct resource_pool *pool,
1604 		struct stream_encoder *stream_enc,
1605 		bool acquired)
1606 {
1607 	int i;
1608 
1609 	for (i = 0; i < pool->stream_enc_count; i++) {
1610 		if (pool->stream_enc[i] == stream_enc)
1611 			res_ctx->is_stream_enc_acquired[i] = acquired;
1612 	}
1613 }
1614 
1615 /* TODO: release audio object */
1616 void update_audio_usage(
1617 		struct resource_context *res_ctx,
1618 		const struct resource_pool *pool,
1619 		struct audio *audio,
1620 		bool acquired)
1621 {
1622 	int i;
1623 	for (i = 0; i < pool->audio_count; i++) {
1624 		if (pool->audios[i] == audio)
1625 			res_ctx->is_audio_acquired[i] = acquired;
1626 	}
1627 }
1628 
1629 static int acquire_first_free_pipe(
1630 		struct resource_context *res_ctx,
1631 		const struct resource_pool *pool,
1632 		struct dc_stream_state *stream)
1633 {
1634 	int i;
1635 
1636 	for (i = 0; i < pool->pipe_count; i++) {
1637 		if (!res_ctx->pipe_ctx[i].stream) {
1638 			struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1639 
1640 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1641 			pipe_ctx->plane_res.mi = pool->mis[i];
1642 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1643 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1644 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1645 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1646 			pipe_ctx->stream_res.opp = pool->opps[i];
1647 			if (pool->dpps[i])
1648 				pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1649 			pipe_ctx->pipe_idx = i;
1650 
1651 
1652 			pipe_ctx->stream = stream;
1653 			return i;
1654 		}
1655 	}
1656 	return -1;
1657 }
1658 
1659 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1660 		struct resource_context *res_ctx,
1661 		const struct resource_pool *pool,
1662 		struct dc_stream_state *stream)
1663 {
1664 	int i;
1665 	int j = -1;
1666 	struct dc_link *link = stream->sink->link;
1667 
1668 	for (i = 0; i < pool->stream_enc_count; i++) {
1669 		if (!res_ctx->is_stream_enc_acquired[i] &&
1670 				pool->stream_enc[i]) {
1671 			/* Store first available for MST second display
1672 			 * in daisy chain use case */
1673 			j = i;
1674 			if (pool->stream_enc[i]->id ==
1675 					link->link_enc->preferred_engine)
1676 				return pool->stream_enc[i];
1677 		}
1678 	}
1679 
1680 	/*
1681 	 * below can happen in cases when stream encoder is acquired:
1682 	 * 1) for second MST display in chain, so preferred engine already
1683 	 * acquired;
1684 	 * 2) for another link, which preferred engine already acquired by any
1685 	 * MST configuration.
1686 	 *
1687 	 * If signal is of DP type and preferred engine not found, return last available
1688 	 *
1689 	 * TODO - This is just a patch up and a generic solution is
1690 	 * required for non DP connectors.
1691 	 */
1692 
1693 	if (j >= 0 && dc_is_dp_signal(stream->signal))
1694 		return pool->stream_enc[j];
1695 
1696 	return NULL;
1697 }
1698 
1699 static struct audio *find_first_free_audio(
1700 		struct resource_context *res_ctx,
1701 		const struct resource_pool *pool,
1702 		enum engine_id id)
1703 {
1704 	int i, available_audio_count;
1705 
1706 	available_audio_count = pool->audio_count;
1707 
1708 	for (i = 0; i < available_audio_count; i++) {
1709 		if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1710 			/*we have enough audio endpoint, find the matching inst*/
1711 			if (id != i)
1712 				continue;
1713 			return pool->audios[i];
1714 		}
1715 	}
1716 
1717 	/* use engine id to find free audio */
1718 	if ((id < available_audio_count) && (res_ctx->is_audio_acquired[id] == false)) {
1719 		return pool->audios[id];
1720 	}
1721 	/*not found the matching one, first come first serve*/
1722 	for (i = 0; i < available_audio_count; i++) {
1723 		if (res_ctx->is_audio_acquired[i] == false) {
1724 			return pool->audios[i];
1725 		}
1726 	}
1727 	return 0;
1728 }
1729 
1730 bool resource_is_stream_unchanged(
1731 	struct dc_state *old_context, struct dc_stream_state *stream)
1732 {
1733 	int i;
1734 
1735 	for (i = 0; i < old_context->stream_count; i++) {
1736 		struct dc_stream_state *old_stream = old_context->streams[i];
1737 
1738 		if (are_stream_backends_same(old_stream, stream))
1739 				return true;
1740 	}
1741 
1742 	return false;
1743 }
1744 
1745 enum dc_status dc_add_stream_to_ctx(
1746 		struct dc *dc,
1747 		struct dc_state *new_ctx,
1748 		struct dc_stream_state *stream)
1749 {
1750 	struct dc_context *dc_ctx = dc->ctx;
1751 	enum dc_status res;
1752 
1753 	if (new_ctx->stream_count >= dc->res_pool->timing_generator_count) {
1754 		DC_ERROR("Max streams reached, can't add stream %p !\n", stream);
1755 		return DC_ERROR_UNEXPECTED;
1756 	}
1757 
1758 	new_ctx->streams[new_ctx->stream_count] = stream;
1759 	dc_stream_retain(stream);
1760 	new_ctx->stream_count++;
1761 
1762 	res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1763 	if (res != DC_OK)
1764 		DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1765 
1766 	return res;
1767 }
1768 
1769 enum dc_status dc_remove_stream_from_ctx(
1770 			struct dc *dc,
1771 			struct dc_state *new_ctx,
1772 			struct dc_stream_state *stream)
1773 {
1774 	int i;
1775 	struct dc_context *dc_ctx = dc->ctx;
1776 	struct pipe_ctx *del_pipe = NULL;
1777 
1778 	/* Release primary pipe */
1779 	for (i = 0; i < MAX_PIPES; i++) {
1780 		if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1781 				!new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1782 			del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1783 
1784 			ASSERT(del_pipe->stream_res.stream_enc);
1785 			update_stream_engine_usage(
1786 					&new_ctx->res_ctx,
1787 						dc->res_pool,
1788 					del_pipe->stream_res.stream_enc,
1789 					false);
1790 
1791 			if (del_pipe->stream_res.audio)
1792 				update_audio_usage(
1793 					&new_ctx->res_ctx,
1794 					dc->res_pool,
1795 					del_pipe->stream_res.audio,
1796 					false);
1797 
1798 			resource_unreference_clock_source(&new_ctx->res_ctx,
1799 							  dc->res_pool,
1800 							  del_pipe->clock_source);
1801 
1802 			if (dc->res_pool->funcs->remove_stream_from_ctx)
1803 				dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1804 
1805 			memset(del_pipe, 0, sizeof(*del_pipe));
1806 		}
1807 	}
1808 
1809 	if (!del_pipe) {
1810 		DC_ERROR("Pipe not found for stream %p !\n", stream);
1811 		return DC_ERROR_UNEXPECTED;
1812 	}
1813 
1814 	for (i = 0; i < new_ctx->stream_count; i++)
1815 		if (new_ctx->streams[i] == stream)
1816 			break;
1817 
1818 	if (new_ctx->streams[i] != stream) {
1819 		DC_ERROR("Context doesn't have stream %p !\n", stream);
1820 		return DC_ERROR_UNEXPECTED;
1821 	}
1822 
1823 	dc_stream_release(new_ctx->streams[i]);
1824 	new_ctx->stream_count--;
1825 
1826 	/* Trim back arrays */
1827 	for (; i < new_ctx->stream_count; i++) {
1828 		new_ctx->streams[i] = new_ctx->streams[i + 1];
1829 		new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1830 	}
1831 
1832 	new_ctx->streams[new_ctx->stream_count] = NULL;
1833 	memset(
1834 			&new_ctx->stream_status[new_ctx->stream_count],
1835 			0,
1836 			sizeof(new_ctx->stream_status[0]));
1837 
1838 	return DC_OK;
1839 }
1840 
1841 static struct dc_stream_state *find_pll_sharable_stream(
1842 		struct dc_stream_state *stream_needs_pll,
1843 		struct dc_state *context)
1844 {
1845 	int i;
1846 
1847 	for (i = 0; i < context->stream_count; i++) {
1848 		struct dc_stream_state *stream_has_pll = context->streams[i];
1849 
1850 		/* We are looking for non dp, non virtual stream */
1851 		if (resource_are_streams_timing_synchronizable(
1852 			stream_needs_pll, stream_has_pll)
1853 			&& !dc_is_dp_signal(stream_has_pll->signal)
1854 			&& stream_has_pll->sink->link->connector_signal
1855 			!= SIGNAL_TYPE_VIRTUAL)
1856 			return stream_has_pll;
1857 
1858 	}
1859 
1860 	return NULL;
1861 }
1862 
1863 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1864 {
1865 	uint32_t pix_clk = timing->pix_clk_khz;
1866 	uint32_t normalized_pix_clk = pix_clk;
1867 
1868 	if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1869 		pix_clk /= 2;
1870 	if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1871 		switch (timing->display_color_depth) {
1872 		case COLOR_DEPTH_666:
1873 		case COLOR_DEPTH_888:
1874 			normalized_pix_clk = pix_clk;
1875 			break;
1876 		case COLOR_DEPTH_101010:
1877 			normalized_pix_clk = (pix_clk * 30) / 24;
1878 			break;
1879 		case COLOR_DEPTH_121212:
1880 			normalized_pix_clk = (pix_clk * 36) / 24;
1881 		break;
1882 		case COLOR_DEPTH_161616:
1883 			normalized_pix_clk = (pix_clk * 48) / 24;
1884 		break;
1885 		default:
1886 			ASSERT(0);
1887 		break;
1888 		}
1889 	}
1890 	return normalized_pix_clk;
1891 }
1892 
1893 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1894 {
1895 	/* update actual pixel clock on all streams */
1896 	if (dc_is_hdmi_signal(stream->signal))
1897 		stream->phy_pix_clk = get_norm_pix_clk(
1898 			&stream->timing);
1899 	else
1900 		stream->phy_pix_clk =
1901 			stream->timing.pix_clk_khz;
1902 }
1903 
1904 enum dc_status resource_map_pool_resources(
1905 		const struct dc  *dc,
1906 		struct dc_state *context,
1907 		struct dc_stream_state *stream)
1908 {
1909 	const struct resource_pool *pool = dc->res_pool;
1910 	int i;
1911 	struct dc_context *dc_ctx = dc->ctx;
1912 	struct pipe_ctx *pipe_ctx = NULL;
1913 	int pipe_idx = -1;
1914 
1915 	/* TODO Check if this is needed */
1916 	/*if (!resource_is_stream_unchanged(old_context, stream)) {
1917 			if (stream != NULL && old_context->streams[i] != NULL) {
1918 				stream->bit_depth_params =
1919 						old_context->streams[i]->bit_depth_params;
1920 				stream->clamping = old_context->streams[i]->clamping;
1921 				continue;
1922 			}
1923 		}
1924 	*/
1925 
1926 	calculate_phy_pix_clks(stream);
1927 
1928 	/* acquire new resources */
1929 	pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1930 
1931 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1932 	if (pipe_idx < 0)
1933 		pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1934 #endif
1935 
1936 	if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
1937 		return DC_NO_CONTROLLER_RESOURCE;
1938 
1939 	pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1940 
1941 	pipe_ctx->stream_res.stream_enc =
1942 		find_first_free_match_stream_enc_for_link(
1943 			&context->res_ctx, pool, stream);
1944 
1945 	if (!pipe_ctx->stream_res.stream_enc)
1946 		return DC_NO_STREAM_ENG_RESOURCE;
1947 
1948 	update_stream_engine_usage(
1949 		&context->res_ctx, pool,
1950 		pipe_ctx->stream_res.stream_enc,
1951 		true);
1952 
1953 	/* TODO: Add check if ASIC support and EDID audio */
1954 	if (!stream->sink->converter_disable_audio &&
1955 	    dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1956 	    stream->audio_info.mode_count && stream->audio_info.flags.all) {
1957 		pipe_ctx->stream_res.audio = find_first_free_audio(
1958 		&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1959 
1960 		/*
1961 		 * Audio assigned in order first come first get.
1962 		 * There are asics which has number of audio
1963 		 * resources less then number of pipes
1964 		 */
1965 		if (pipe_ctx->stream_res.audio)
1966 			update_audio_usage(&context->res_ctx, pool,
1967 					   pipe_ctx->stream_res.audio, true);
1968 	}
1969 
1970 	/* Add ABM to the resource if on EDP */
1971 	if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
1972 		pipe_ctx->stream_res.abm = pool->abm;
1973 
1974 	for (i = 0; i < context->stream_count; i++)
1975 		if (context->streams[i] == stream) {
1976 			context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1977 			context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1978 			return DC_OK;
1979 		}
1980 
1981 	DC_ERROR("Stream %p not found in new ctx!\n", stream);
1982 	return DC_ERROR_UNEXPECTED;
1983 }
1984 
1985 void dc_resource_state_copy_construct_current(
1986 		const struct dc *dc,
1987 		struct dc_state *dst_ctx)
1988 {
1989 	dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1990 }
1991 
1992 
1993 void dc_resource_state_construct(
1994 		const struct dc *dc,
1995 		struct dc_state *dst_ctx)
1996 {
1997 	dst_ctx->dis_clk = dc->res_pool->dccg;
1998 }
1999 
2000 enum dc_status dc_validate_global_state(
2001 		struct dc *dc,
2002 		struct dc_state *new_ctx)
2003 {
2004 	enum dc_status result = DC_ERROR_UNEXPECTED;
2005 	int i, j;
2006 
2007 	if (!new_ctx)
2008 		return DC_ERROR_UNEXPECTED;
2009 
2010 	if (dc->res_pool->funcs->validate_global) {
2011 		result = dc->res_pool->funcs->validate_global(dc, new_ctx);
2012 		if (result != DC_OK)
2013 			return result;
2014 	}
2015 
2016 	for (i = 0; i < new_ctx->stream_count; i++) {
2017 		struct dc_stream_state *stream = new_ctx->streams[i];
2018 
2019 		for (j = 0; j < dc->res_pool->pipe_count; j++) {
2020 			struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
2021 
2022 			if (pipe_ctx->stream != stream)
2023 				continue;
2024 
2025 			/* Switch to dp clock source only if there is
2026 			 * no non dp stream that shares the same timing
2027 			 * with the dp stream.
2028 			 */
2029 			if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
2030 				!find_pll_sharable_stream(stream, new_ctx)) {
2031 
2032 				resource_unreference_clock_source(
2033 						&new_ctx->res_ctx,
2034 						dc->res_pool,
2035 						pipe_ctx->clock_source);
2036 
2037 				pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
2038 				resource_reference_clock_source(
2039 						&new_ctx->res_ctx,
2040 						dc->res_pool,
2041 						 pipe_ctx->clock_source);
2042 			}
2043 		}
2044 	}
2045 
2046 	result = resource_build_scaling_params_for_context(dc, new_ctx);
2047 
2048 	if (result == DC_OK)
2049 		if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2050 			result = DC_FAIL_BANDWIDTH_VALIDATE;
2051 
2052 	return result;
2053 }
2054 
2055 static void patch_gamut_packet_checksum(
2056 		struct dc_info_packet *gamut_packet)
2057 {
2058 	/* For gamut we recalc checksum */
2059 	if (gamut_packet->valid) {
2060 		uint8_t chk_sum = 0;
2061 		uint8_t *ptr;
2062 		uint8_t i;
2063 
2064 		/*start of the Gamut data. */
2065 		ptr = &gamut_packet->sb[3];
2066 
2067 		for (i = 0; i <= gamut_packet->sb[1]; i++)
2068 			chk_sum += ptr[i];
2069 
2070 		gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2071 	}
2072 }
2073 
2074 static void set_avi_info_frame(
2075 		struct dc_info_packet *info_packet,
2076 		struct pipe_ctx *pipe_ctx)
2077 {
2078 	struct dc_stream_state *stream = pipe_ctx->stream;
2079 	enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2080 	uint32_t pixel_encoding = 0;
2081 	enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2082 	enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2083 	bool itc = false;
2084 	uint8_t itc_value = 0;
2085 	uint8_t cn0_cn1 = 0;
2086 	unsigned int cn0_cn1_value = 0;
2087 	uint8_t *check_sum = NULL;
2088 	uint8_t byte_index = 0;
2089 	union hdmi_info_packet hdmi_info;
2090 	union display_content_support support = {0};
2091 	unsigned int vic = pipe_ctx->stream->timing.vic;
2092 	enum dc_timing_3d_format format;
2093 
2094 	memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2095 
2096 	color_space = pipe_ctx->stream->output_color_space;
2097 	if (color_space == COLOR_SPACE_UNKNOWN)
2098 		color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2099 			COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2100 
2101 	/* Initialize header */
2102 	hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2103 	/* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2104 	* not be used in HDMI 2.0 (Section 10.1) */
2105 	hdmi_info.bits.header.version = 2;
2106 	hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2107 
2108 	/*
2109 	 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2110 	 * according to HDMI 2.0 spec (Section 10.1)
2111 	 */
2112 
2113 	switch (stream->timing.pixel_encoding) {
2114 	case PIXEL_ENCODING_YCBCR422:
2115 		pixel_encoding = 1;
2116 		break;
2117 
2118 	case PIXEL_ENCODING_YCBCR444:
2119 		pixel_encoding = 2;
2120 		break;
2121 	case PIXEL_ENCODING_YCBCR420:
2122 		pixel_encoding = 3;
2123 		break;
2124 
2125 	case PIXEL_ENCODING_RGB:
2126 	default:
2127 		pixel_encoding = 0;
2128 	}
2129 
2130 	/* Y0_Y1_Y2 : The pixel encoding */
2131 	/* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2132 	hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2133 
2134 	/* A0 = 1 Active Format Information valid */
2135 	hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2136 
2137 	/* B0, B1 = 3; Bar info data is valid */
2138 	hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2139 
2140 	hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2141 
2142 	/* S0, S1 : Underscan / Overscan */
2143 	/* TODO: un-hardcode scan type */
2144 	scan_type = SCANNING_TYPE_UNDERSCAN;
2145 	hdmi_info.bits.S0_S1 = scan_type;
2146 
2147 	/* C0, C1 : Colorimetry */
2148 	if (color_space == COLOR_SPACE_YCBCR709 ||
2149 			color_space == COLOR_SPACE_YCBCR709_LIMITED)
2150 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2151 	else if (color_space == COLOR_SPACE_YCBCR601 ||
2152 			color_space == COLOR_SPACE_YCBCR601_LIMITED)
2153 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2154 	else {
2155 		hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2156 	}
2157 	if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2158 			color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2159 			color_space == COLOR_SPACE_2020_YCBCR) {
2160 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2161 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2162 	} else if (color_space == COLOR_SPACE_ADOBERGB) {
2163 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2164 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2165 	}
2166 
2167 	/* TODO: un-hardcode aspect ratio */
2168 	aspect = stream->timing.aspect_ratio;
2169 
2170 	switch (aspect) {
2171 	case ASPECT_RATIO_4_3:
2172 	case ASPECT_RATIO_16_9:
2173 		hdmi_info.bits.M0_M1 = aspect;
2174 		break;
2175 
2176 	case ASPECT_RATIO_NO_DATA:
2177 	case ASPECT_RATIO_64_27:
2178 	case ASPECT_RATIO_256_135:
2179 	default:
2180 		hdmi_info.bits.M0_M1 = 0;
2181 	}
2182 
2183 	/* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2184 	hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2185 
2186 	/* TODO: un-hardcode cn0_cn1 and itc */
2187 
2188 	cn0_cn1 = 0;
2189 	cn0_cn1_value = 0;
2190 
2191 	itc = true;
2192 	itc_value = 1;
2193 
2194 	support = stream->sink->edid_caps.content_support;
2195 
2196 	if (itc) {
2197 		if (!support.bits.valid_content_type) {
2198 			cn0_cn1_value = 0;
2199 		} else {
2200 			if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2201 				if (support.bits.graphics_content == 1) {
2202 					cn0_cn1_value = 0;
2203 				}
2204 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2205 				if (support.bits.photo_content == 1) {
2206 					cn0_cn1_value = 1;
2207 				} else {
2208 					cn0_cn1_value = 0;
2209 					itc_value = 0;
2210 				}
2211 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2212 				if (support.bits.cinema_content == 1) {
2213 					cn0_cn1_value = 2;
2214 				} else {
2215 					cn0_cn1_value = 0;
2216 					itc_value = 0;
2217 				}
2218 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2219 				if (support.bits.game_content == 1) {
2220 					cn0_cn1_value = 3;
2221 				} else {
2222 					cn0_cn1_value = 0;
2223 					itc_value = 0;
2224 				}
2225 			}
2226 		}
2227 		hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2228 		hdmi_info.bits.ITC = itc_value;
2229 	}
2230 
2231 	/* TODO : We should handle YCC quantization */
2232 	/* but we do not have matrix calculation */
2233 	if (stream->sink->edid_caps.qs_bit == 1 &&
2234 			stream->sink->edid_caps.qy_bit == 1) {
2235 		if (color_space == COLOR_SPACE_SRGB ||
2236 			color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2237 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_FULL_RANGE;
2238 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2239 		} else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2240 					color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2241 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_LIMITED_RANGE;
2242 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2243 		} else {
2244 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2245 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2246 		}
2247 	} else {
2248 		hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2249 		hdmi_info.bits.YQ0_YQ1   = YYC_QUANTIZATION_LIMITED_RANGE;
2250 	}
2251 
2252 	///VIC
2253 	format = stream->timing.timing_3d_format;
2254 	/*todo, add 3DStereo support*/
2255 	if (format != TIMING_3D_FORMAT_NONE) {
2256 		// Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2257 		switch (pipe_ctx->stream->timing.hdmi_vic) {
2258 		case 1:
2259 			vic = 95;
2260 			break;
2261 		case 2:
2262 			vic = 94;
2263 			break;
2264 		case 3:
2265 			vic = 93;
2266 			break;
2267 		case 4:
2268 			vic = 98;
2269 			break;
2270 		default:
2271 			break;
2272 		}
2273 	}
2274 	hdmi_info.bits.VIC0_VIC7 = vic;
2275 
2276 	/* pixel repetition
2277 	 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2278 	 * repetition start from 1 */
2279 	hdmi_info.bits.PR0_PR3 = 0;
2280 
2281 	/* Bar Info
2282 	 * barTop:    Line Number of End of Top Bar.
2283 	 * barBottom: Line Number of Start of Bottom Bar.
2284 	 * barLeft:   Pixel Number of End of Left Bar.
2285 	 * barRight:  Pixel Number of Start of Right Bar. */
2286 	hdmi_info.bits.bar_top = stream->timing.v_border_top;
2287 	hdmi_info.bits.bar_bottom = (stream->timing.v_total
2288 			- stream->timing.v_border_bottom + 1);
2289 	hdmi_info.bits.bar_left  = stream->timing.h_border_left;
2290 	hdmi_info.bits.bar_right = (stream->timing.h_total
2291 			- stream->timing.h_border_right + 1);
2292 
2293 	/* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2294 	check_sum = &hdmi_info.packet_raw_data.sb[0];
2295 
2296 	*check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2297 
2298 	for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2299 		*check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2300 
2301 	/* one byte complement */
2302 	*check_sum = (uint8_t) (0x100 - *check_sum);
2303 
2304 	/* Store in hw_path_mode */
2305 	info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2306 	info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2307 	info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2308 
2309 	for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2310 		info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2311 
2312 	info_packet->valid = true;
2313 }
2314 
2315 static void set_vendor_info_packet(
2316 		struct dc_info_packet *info_packet,
2317 		struct dc_stream_state *stream)
2318 {
2319 	uint32_t length = 0;
2320 	bool hdmi_vic_mode = false;
2321 	uint8_t checksum = 0;
2322 	uint32_t i = 0;
2323 	enum dc_timing_3d_format format;
2324 	// Can be different depending on packet content /*todo*/
2325 	// unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2326 
2327 	info_packet->valid = false;
2328 
2329 	format = stream->timing.timing_3d_format;
2330 	if (stream->view_format == VIEW_3D_FORMAT_NONE)
2331 		format = TIMING_3D_FORMAT_NONE;
2332 
2333 	/* Can be different depending on packet content */
2334 	length = 5;
2335 
2336 	if (stream->timing.hdmi_vic != 0
2337 			&& stream->timing.h_total >= 3840
2338 			&& stream->timing.v_total >= 2160)
2339 		hdmi_vic_mode = true;
2340 
2341 	/* According to HDMI 1.4a CTS, VSIF should be sent
2342 	 * for both 3D stereo and HDMI VIC modes.
2343 	 * For all other modes, there is no VSIF sent.  */
2344 
2345 	if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2346 		return;
2347 
2348 	/* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2349 	info_packet->sb[1] = 0x03;
2350 	info_packet->sb[2] = 0x0C;
2351 	info_packet->sb[3] = 0x00;
2352 
2353 	/*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2354 	 * The value for HDMI_Video_Format are:
2355 	 * 0x0 (0b000) - No additional HDMI video format is presented in this
2356 	 * packet
2357 	 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2358 	 * parameter follows
2359 	 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2360 	 * potentially 3D_Ext_Data follows
2361 	 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2362 	if (format != TIMING_3D_FORMAT_NONE)
2363 		info_packet->sb[4] = (2 << 5);
2364 	else if (hdmi_vic_mode)
2365 		info_packet->sb[4] = (1 << 5);
2366 
2367 	/* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2368 	 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2369 	 * The value for 3D_Structure are:
2370 	 * 0x0 - Frame Packing
2371 	 * 0x1 - Field Alternative
2372 	 * 0x2 - Line Alternative
2373 	 * 0x3 - Side-by-Side (full)
2374 	 * 0x4 - L + depth
2375 	 * 0x5 - L + depth + graphics + graphics-depth
2376 	 * 0x6 - Top-and-Bottom
2377 	 * 0x7 - Reserved for future use
2378 	 * 0x8 - Side-by-Side (Half)
2379 	 * 0x9..0xE - Reserved for future use
2380 	 * 0xF - Not used */
2381 	switch (format) {
2382 	case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2383 	case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2384 		info_packet->sb[5] = (0x0 << 4);
2385 		break;
2386 
2387 	case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2388 	case TIMING_3D_FORMAT_SBS_SW_PACKED:
2389 		info_packet->sb[5] = (0x8 << 4);
2390 		length = 6;
2391 		break;
2392 
2393 	case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2394 	case TIMING_3D_FORMAT_TB_SW_PACKED:
2395 		info_packet->sb[5] = (0x6 << 4);
2396 		break;
2397 
2398 	default:
2399 		break;
2400 	}
2401 
2402 	/*PB5: If PB4 is set to 0x1 (extended resolution format)
2403 	 * fill PB5 with the correct HDMI VIC code */
2404 	if (hdmi_vic_mode)
2405 		info_packet->sb[5] = stream->timing.hdmi_vic;
2406 
2407 	/* Header */
2408 	info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2409 	info_packet->hb1 = 0x01; /* Version */
2410 
2411 	/* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2412 	info_packet->hb2 = (uint8_t) (length);
2413 
2414 	/* Calculate checksum */
2415 	checksum = 0;
2416 	checksum += info_packet->hb0;
2417 	checksum += info_packet->hb1;
2418 	checksum += info_packet->hb2;
2419 
2420 	for (i = 1; i <= length; i++)
2421 		checksum += info_packet->sb[i];
2422 
2423 	info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2424 
2425 	info_packet->valid = true;
2426 }
2427 
2428 static void set_spd_info_packet(
2429 		struct dc_info_packet *info_packet,
2430 		struct dc_stream_state *stream)
2431 {
2432 	/* SPD info packet for FreeSync */
2433 
2434 	unsigned char checksum = 0;
2435 	unsigned int idx, payload_size = 0;
2436 
2437 	/* Check if Freesync is supported. Return if false. If true,
2438 	 * set the corresponding bit in the info packet
2439 	 */
2440 	if (stream->freesync_ctx.supported == false)
2441 		return;
2442 
2443 	if (dc_is_hdmi_signal(stream->signal)) {
2444 
2445 		/* HEADER */
2446 
2447 		/* HB0  = Packet Type = 0x83 (Source Product
2448 		 *	  Descriptor InfoFrame)
2449 		 */
2450 		info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2451 
2452 		/* HB1  = Version = 0x01 */
2453 		info_packet->hb1 = 0x01;
2454 
2455 		/* HB2  = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2456 		info_packet->hb2 = 0x08;
2457 
2458 		payload_size = 0x08;
2459 
2460 	} else if (dc_is_dp_signal(stream->signal)) {
2461 
2462 		/* HEADER */
2463 
2464 		/* HB0  = Secondary-data Packet ID = 0 - Only non-zero
2465 		 *	  when used to associate audio related info packets
2466 		 */
2467 		info_packet->hb0 = 0x00;
2468 
2469 		/* HB1  = Packet Type = 0x83 (Source Product
2470 		 *	  Descriptor InfoFrame)
2471 		 */
2472 		info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2473 
2474 		/* HB2  = [Bits 7:0 = Least significant eight bits -
2475 		 *	  For INFOFRAME, the value must be 1Bh]
2476 		 */
2477 		info_packet->hb2 = 0x1B;
2478 
2479 		/* HB3  = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2480 		 *	  [Bits 1:0 = Most significant two bits = 0x00]
2481 		 */
2482 		info_packet->hb3 = 0x04;
2483 
2484 		payload_size = 0x1B;
2485 	}
2486 
2487 	/* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2488 	info_packet->sb[1] = 0x1A;
2489 
2490 	/* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2491 	info_packet->sb[2] = 0x00;
2492 
2493 	/* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2494 	info_packet->sb[3] = 0x00;
2495 
2496 	/* PB4 = Reserved */
2497 	info_packet->sb[4] = 0x00;
2498 
2499 	/* PB5 = Reserved */
2500 	info_packet->sb[5] = 0x00;
2501 
2502 	/* PB6 = [Bits 7:3 = Reserved] */
2503 	info_packet->sb[6] = 0x00;
2504 
2505 	if (stream->freesync_ctx.supported == true)
2506 		/* PB6 = [Bit 0 = FreeSync Supported] */
2507 		info_packet->sb[6] |= 0x01;
2508 
2509 	if (stream->freesync_ctx.enabled == true)
2510 		/* PB6 = [Bit 1 = FreeSync Enabled] */
2511 		info_packet->sb[6] |= 0x02;
2512 
2513 	if (stream->freesync_ctx.active == true)
2514 		/* PB6 = [Bit 2 = FreeSync Active] */
2515 		info_packet->sb[6] |= 0x04;
2516 
2517 	/* PB7 = FreeSync Minimum refresh rate (Hz) */
2518 	info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2519 			min_refresh_in_micro_hz / 1000000);
2520 
2521 	/* PB8 = FreeSync Maximum refresh rate (Hz)
2522 	 *
2523 	 * Note: We do not use the maximum capable refresh rate
2524 	 * of the panel, because we should never go above the field
2525 	 * rate of the mode timing set.
2526 	 */
2527 	info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2528 			nominal_refresh_in_micro_hz / 1000000);
2529 
2530 	/* PB9 - PB27  = Reserved */
2531 	for (idx = 9; idx <= 27; idx++)
2532 		info_packet->sb[idx] = 0x00;
2533 
2534 	/* Calculate checksum */
2535 	checksum += info_packet->hb0;
2536 	checksum += info_packet->hb1;
2537 	checksum += info_packet->hb2;
2538 	checksum += info_packet->hb3;
2539 
2540 	for (idx = 1; idx <= payload_size; idx++)
2541 		checksum += info_packet->sb[idx];
2542 
2543 	/* PB0 = Checksum (one byte complement) */
2544 	info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2545 
2546 	info_packet->valid = true;
2547 }
2548 
2549 static void set_hdr_static_info_packet(
2550 		struct dc_info_packet *info_packet,
2551 		struct dc_stream_state *stream)
2552 {
2553 	/* HDR Static Metadata info packet for HDR10 */
2554 
2555 	if (!stream->hdr_static_metadata.valid ||
2556 			stream->use_dynamic_meta)
2557 		return;
2558 
2559 	*info_packet = stream->hdr_static_metadata;
2560 }
2561 
2562 static void set_vsc_info_packet(
2563 		struct dc_info_packet *info_packet,
2564 		struct dc_stream_state *stream)
2565 {
2566 	unsigned int vscPacketRevision = 0;
2567 	unsigned int i;
2568 
2569 	/*VSC packet set to 2 when DP revision >= 1.2*/
2570 	if (stream->psr_version != 0) {
2571 		vscPacketRevision = 2;
2572 	}
2573 
2574 	/* VSC packet not needed based on the features
2575 	 * supported by this DP display
2576 	 */
2577 	if (vscPacketRevision == 0)
2578 		return;
2579 
2580 	if (vscPacketRevision == 0x2) {
2581 		/* Secondary-data Packet ID = 0*/
2582 		info_packet->hb0 = 0x00;
2583 		/* 07h - Packet Type Value indicating Video
2584 		 * Stream Configuration packet
2585 		 */
2586 		info_packet->hb1 = 0x07;
2587 		/* 02h = VSC SDP supporting 3D stereo and PSR
2588 		 * (applies to eDP v1.3 or higher).
2589 		 */
2590 		info_packet->hb2 = 0x02;
2591 		/* 08h = VSC packet supporting 3D stereo + PSR
2592 		 * (HB2 = 02h).
2593 		 */
2594 		info_packet->hb3 = 0x08;
2595 
2596 		for (i = 0; i < 28; i++)
2597 			info_packet->sb[i] = 0;
2598 
2599 		info_packet->valid = true;
2600 	}
2601 
2602 	/*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2603 }
2604 
2605 void dc_resource_state_destruct(struct dc_state *context)
2606 {
2607 	int i, j;
2608 
2609 	for (i = 0; i < context->stream_count; i++) {
2610 		for (j = 0; j < context->stream_status[i].plane_count; j++)
2611 			dc_plane_state_release(
2612 				context->stream_status[i].plane_states[j]);
2613 
2614 		context->stream_status[i].plane_count = 0;
2615 		dc_stream_release(context->streams[i]);
2616 		context->streams[i] = NULL;
2617 	}
2618 }
2619 
2620 /*
2621  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2622  * by the src_ctx
2623  */
2624 void dc_resource_state_copy_construct(
2625 		const struct dc_state *src_ctx,
2626 		struct dc_state *dst_ctx)
2627 {
2628 	int i, j;
2629 	struct kref refcount = dst_ctx->refcount;
2630 
2631 	*dst_ctx = *src_ctx;
2632 
2633 	for (i = 0; i < MAX_PIPES; i++) {
2634 		struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2635 
2636 		if (cur_pipe->top_pipe)
2637 			cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2638 
2639 		if (cur_pipe->bottom_pipe)
2640 			cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2641 
2642 	}
2643 
2644 	for (i = 0; i < dst_ctx->stream_count; i++) {
2645 		dc_stream_retain(dst_ctx->streams[i]);
2646 		for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2647 			dc_plane_state_retain(
2648 				dst_ctx->stream_status[i].plane_states[j]);
2649 	}
2650 
2651 	/* context refcount should not be overridden */
2652 	dst_ctx->refcount = refcount;
2653 
2654 }
2655 
2656 struct clock_source *dc_resource_find_first_free_pll(
2657 		struct resource_context *res_ctx,
2658 		const struct resource_pool *pool)
2659 {
2660 	int i;
2661 
2662 	for (i = 0; i < pool->clk_src_count; ++i) {
2663 		if (res_ctx->clock_source_ref_count[i] == 0)
2664 			return pool->clock_sources[i];
2665 	}
2666 
2667 	return NULL;
2668 }
2669 
2670 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2671 {
2672 	enum signal_type signal = SIGNAL_TYPE_NONE;
2673 	struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2674 
2675 	/* default all packets to invalid */
2676 	info->avi.valid = false;
2677 	info->gamut.valid = false;
2678 	info->vendor.valid = false;
2679 	info->spd.valid = false;
2680 	info->hdrsmd.valid = false;
2681 	info->vsc.valid = false;
2682 
2683 	signal = pipe_ctx->stream->signal;
2684 
2685 	/* HDMi and DP have different info packets*/
2686 	if (dc_is_hdmi_signal(signal)) {
2687 		set_avi_info_frame(&info->avi, pipe_ctx);
2688 
2689 		set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2690 
2691 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2692 
2693 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2694 
2695 	} else if (dc_is_dp_signal(signal)) {
2696 		set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2697 
2698 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2699 
2700 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2701 	}
2702 
2703 	patch_gamut_packet_checksum(&info->gamut);
2704 }
2705 
2706 enum dc_status resource_map_clock_resources(
2707 		const struct dc  *dc,
2708 		struct dc_state *context,
2709 		struct dc_stream_state *stream)
2710 {
2711 	/* acquire new resources */
2712 	const struct resource_pool *pool = dc->res_pool;
2713 	struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2714 				&context->res_ctx, stream);
2715 
2716 	if (!pipe_ctx)
2717 		return DC_ERROR_UNEXPECTED;
2718 
2719 	if (dc_is_dp_signal(pipe_ctx->stream->signal)
2720 		|| pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2721 		pipe_ctx->clock_source = pool->dp_clock_source;
2722 	else {
2723 		pipe_ctx->clock_source = NULL;
2724 
2725 		if (!dc->config.disable_disp_pll_sharing)
2726 			pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2727 				&context->res_ctx,
2728 				pipe_ctx);
2729 
2730 		if (pipe_ctx->clock_source == NULL)
2731 			pipe_ctx->clock_source =
2732 				dc_resource_find_first_free_pll(
2733 					&context->res_ctx,
2734 					pool);
2735 	}
2736 
2737 	if (pipe_ctx->clock_source == NULL)
2738 		return DC_NO_CLOCK_SOURCE_RESOURCE;
2739 
2740 	resource_reference_clock_source(
2741 		&context->res_ctx, pool,
2742 		pipe_ctx->clock_source);
2743 
2744 	return DC_OK;
2745 }
2746 
2747 /*
2748  * Note: We need to disable output if clock sources change,
2749  * since bios does optimization and doesn't apply if changing
2750  * PHY when not already disabled.
2751  */
2752 bool pipe_need_reprogram(
2753 		struct pipe_ctx *pipe_ctx_old,
2754 		struct pipe_ctx *pipe_ctx)
2755 {
2756 	if (!pipe_ctx_old->stream)
2757 		return false;
2758 
2759 	if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2760 		return true;
2761 
2762 	if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2763 		return true;
2764 
2765 	if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2766 		return true;
2767 
2768 	if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2769 			&& pipe_ctx_old->stream != pipe_ctx->stream)
2770 		return true;
2771 
2772 	if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2773 		return true;
2774 
2775 	if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2776 		return true;
2777 
2778 	if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2779 		return true;
2780 
2781 	return false;
2782 }
2783 
2784 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2785 		struct bit_depth_reduction_params *fmt_bit_depth)
2786 {
2787 	enum dc_dither_option option = stream->dither_option;
2788 	enum dc_pixel_encoding pixel_encoding =
2789 			stream->timing.pixel_encoding;
2790 
2791 	memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2792 
2793 	if (option == DITHER_OPTION_DEFAULT) {
2794 		switch (stream->timing.display_color_depth) {
2795 		case COLOR_DEPTH_666:
2796 			option = DITHER_OPTION_SPATIAL6;
2797 			break;
2798 		case COLOR_DEPTH_888:
2799 			option = DITHER_OPTION_SPATIAL8;
2800 			break;
2801 		case COLOR_DEPTH_101010:
2802 			option = DITHER_OPTION_SPATIAL10;
2803 			break;
2804 		default:
2805 			option = DITHER_OPTION_DISABLE;
2806 		}
2807 	}
2808 
2809 	if (option == DITHER_OPTION_DISABLE)
2810 		return;
2811 
2812 	if (option == DITHER_OPTION_TRUN6) {
2813 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2814 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2815 	} else if (option == DITHER_OPTION_TRUN8 ||
2816 			option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2817 			option == DITHER_OPTION_TRUN8_FM6) {
2818 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2819 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2820 	} else if (option == DITHER_OPTION_TRUN10        ||
2821 			option == DITHER_OPTION_TRUN10_SPATIAL6   ||
2822 			option == DITHER_OPTION_TRUN10_SPATIAL8   ||
2823 			option == DITHER_OPTION_TRUN10_FM8     ||
2824 			option == DITHER_OPTION_TRUN10_FM6     ||
2825 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2826 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2827 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2828 	}
2829 
2830 	/* special case - Formatter can only reduce by 4 bits at most.
2831 	 * When reducing from 12 to 6 bits,
2832 	 * HW recommends we use trunc with round mode
2833 	 * (if we did nothing, trunc to 10 bits would be used)
2834 	 * note that any 12->10 bit reduction is ignored prior to DCE8,
2835 	 * as the input was 10 bits.
2836 	 */
2837 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2838 			option == DITHER_OPTION_SPATIAL6 ||
2839 			option == DITHER_OPTION_FM6) {
2840 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2841 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2842 		fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2843 	}
2844 
2845 	/* spatial dither
2846 	 * note that spatial modes 1-3 are never used
2847 	 */
2848 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM            ||
2849 			option == DITHER_OPTION_SPATIAL6 ||
2850 			option == DITHER_OPTION_TRUN10_SPATIAL6      ||
2851 			option == DITHER_OPTION_TRUN8_SPATIAL6) {
2852 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2853 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2854 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2855 		fmt_bit_depth->flags.RGB_RANDOM =
2856 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2857 	} else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM            ||
2858 			option == DITHER_OPTION_SPATIAL8 ||
2859 			option == DITHER_OPTION_SPATIAL8_FM6        ||
2860 			option == DITHER_OPTION_TRUN10_SPATIAL8      ||
2861 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2862 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2863 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2864 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2865 		fmt_bit_depth->flags.RGB_RANDOM =
2866 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2867 	} else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2868 			option == DITHER_OPTION_SPATIAL10 ||
2869 			option == DITHER_OPTION_SPATIAL10_FM8 ||
2870 			option == DITHER_OPTION_SPATIAL10_FM6) {
2871 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2872 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2873 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2874 		fmt_bit_depth->flags.RGB_RANDOM =
2875 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2876 	}
2877 
2878 	if (option == DITHER_OPTION_SPATIAL6 ||
2879 			option == DITHER_OPTION_SPATIAL8 ||
2880 			option == DITHER_OPTION_SPATIAL10) {
2881 		fmt_bit_depth->flags.FRAME_RANDOM = 0;
2882 	} else {
2883 		fmt_bit_depth->flags.FRAME_RANDOM = 1;
2884 	}
2885 
2886 	//////////////////////
2887 	//// temporal dither
2888 	//////////////////////
2889 	if (option == DITHER_OPTION_FM6           ||
2890 			option == DITHER_OPTION_SPATIAL8_FM6     ||
2891 			option == DITHER_OPTION_SPATIAL10_FM6     ||
2892 			option == DITHER_OPTION_TRUN10_FM6     ||
2893 			option == DITHER_OPTION_TRUN8_FM6      ||
2894 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2895 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2896 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2897 	} else if (option == DITHER_OPTION_FM8        ||
2898 			option == DITHER_OPTION_SPATIAL10_FM8  ||
2899 			option == DITHER_OPTION_TRUN10_FM8) {
2900 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2901 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2902 	} else if (option == DITHER_OPTION_FM10) {
2903 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2904 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2905 	}
2906 
2907 	fmt_bit_depth->pixel_encoding = pixel_encoding;
2908 }
2909 
2910 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2911 {
2912 	struct dc  *core_dc = dc;
2913 	struct dc_link *link = stream->sink->link;
2914 	struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2915 	enum dc_status res = DC_OK;
2916 
2917 	calculate_phy_pix_clks(stream);
2918 
2919 	if (!tg->funcs->validate_timing(tg, &stream->timing))
2920 		res = DC_FAIL_CONTROLLER_VALIDATE;
2921 
2922 	if (res == DC_OK)
2923 		if (!link->link_enc->funcs->validate_output_with_stream(
2924 						link->link_enc, stream))
2925 			res = DC_FAIL_ENC_VALIDATE;
2926 
2927 	/* TODO: validate audio ASIC caps, encoder */
2928 
2929 	if (res == DC_OK)
2930 		res = dc_link_validate_mode_timing(stream,
2931 		      link,
2932 		      &stream->timing);
2933 
2934 	return res;
2935 }
2936 
2937 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2938 {
2939 	enum dc_status res = DC_OK;
2940 
2941 	/* TODO For now validates pixel format only */
2942 	if (dc->res_pool->funcs->validate_plane)
2943 		return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2944 
2945 	return res;
2946 }
2947