xref: /linux/drivers/gpu/drm/i915/display/intel_vdsc.c (revision 021bc4b9)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2018 Intel Corporation
4  *
5  * Author: Gaurav K Singh <gaurav.k.singh@intel.com>
6  *         Manasi Navare <manasi.d.navare@intel.com>
7  */
8 #include <linux/limits.h>
9 
10 #include <drm/display/drm_dsc_helper.h>
11 
12 #include "i915_drv.h"
13 #include "i915_reg.h"
14 #include "intel_crtc.h"
15 #include "intel_de.h"
16 #include "intel_display_types.h"
17 #include "intel_dsi.h"
18 #include "intel_qp_tables.h"
19 #include "intel_vdsc.h"
20 #include "intel_vdsc_regs.h"
21 
22 bool intel_dsc_source_support(const struct intel_crtc_state *crtc_state)
23 {
24 	const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
25 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
26 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
27 
28 	if (!HAS_DSC(i915))
29 		return false;
30 
31 	if (DISPLAY_VER(i915) == 11 && cpu_transcoder == TRANSCODER_A)
32 		return false;
33 
34 	return true;
35 }
36 
37 static bool is_pipe_dsc(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
38 {
39 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
40 
41 	if (DISPLAY_VER(i915) >= 12)
42 		return true;
43 
44 	if (cpu_transcoder == TRANSCODER_EDP ||
45 	    cpu_transcoder == TRANSCODER_DSI_0 ||
46 	    cpu_transcoder == TRANSCODER_DSI_1)
47 		return false;
48 
49 	/* There's no pipe A DSC engine on ICL */
50 	drm_WARN_ON(&i915->drm, crtc->pipe == PIPE_A);
51 
52 	return true;
53 }
54 
55 static void
56 intel_vdsc_set_min_max_qp(struct drm_dsc_config *vdsc_cfg, int buf,
57 			  int bpp)
58 {
59 	int bpc = vdsc_cfg->bits_per_component;
60 
61 	/* Read range_minqp and range_max_qp from qp tables */
62 	vdsc_cfg->rc_range_params[buf].range_min_qp =
63 		intel_lookup_range_min_qp(bpc, buf, bpp, vdsc_cfg->native_420);
64 	vdsc_cfg->rc_range_params[buf].range_max_qp =
65 		intel_lookup_range_max_qp(bpc, buf, bpp, vdsc_cfg->native_420);
66 }
67 
68 /*
69  * We are using the method provided in DSC 1.2a C-Model in codec_main.c
70  * Above method use a common formula to derive values for any combination of DSC
71  * variables. The formula approach may yield slight differences in the derived PPS
72  * parameters from the original parameter sets. These differences are not consequential
73  * to the coding performance because all parameter sets have been shown to produce
74  * visually lossless quality (provides the same PPS values as
75  * DSCParameterValuesVESA V1-2 spreadsheet).
76  */
77 static void
78 calculate_rc_params(struct drm_dsc_config *vdsc_cfg)
79 {
80 	int bpp = to_bpp_int(vdsc_cfg->bits_per_pixel);
81 	int bpc = vdsc_cfg->bits_per_component;
82 	int qp_bpc_modifier = (bpc - 8) * 2;
83 	int uncompressed_bpg_rate;
84 	int first_line_bpg_offset;
85 	u32 res, buf_i, bpp_i;
86 
87 	if (vdsc_cfg->slice_height >= 8)
88 		first_line_bpg_offset =
89 			12 + (9 * min(34, vdsc_cfg->slice_height - 8)) / 100;
90 	else
91 		first_line_bpg_offset = 2 * (vdsc_cfg->slice_height - 1);
92 
93 	uncompressed_bpg_rate = (3 * bpc + (vdsc_cfg->convert_rgb ? 0 : 2)) * 3;
94 	vdsc_cfg->first_line_bpg_offset = clamp(first_line_bpg_offset, 0,
95 						uncompressed_bpg_rate - 3 * bpp);
96 
97 	/*
98 	 * According to DSC 1.2 spec in Section 4.1 if native_420 is set:
99 	 * -second_line_bpg_offset is 12 in general and equal to 2*(slice_height-1) if slice
100 	 * height < 8.
101 	 * -second_line_offset_adj is 512 as shown by emperical values to yield best chroma
102 	 * preservation in second line.
103 	 * -nsl_bpg_offset is calculated as second_line_offset/slice_height -1 then rounded
104 	 * up to 16 fractional bits, we left shift second line offset by 11 to preserve 11
105 	 * fractional bits.
106 	 */
107 	if (vdsc_cfg->native_420) {
108 		if (vdsc_cfg->slice_height >= 8)
109 			vdsc_cfg->second_line_bpg_offset = 12;
110 		else
111 			vdsc_cfg->second_line_bpg_offset =
112 				2 * (vdsc_cfg->slice_height - 1);
113 
114 		vdsc_cfg->second_line_offset_adj = 512;
115 		vdsc_cfg->nsl_bpg_offset = DIV_ROUND_UP(vdsc_cfg->second_line_bpg_offset << 11,
116 							vdsc_cfg->slice_height - 1);
117 	}
118 
119 	/* Our hw supports only 444 modes as of today */
120 	if (bpp >= 12)
121 		vdsc_cfg->initial_offset = 2048;
122 	else if (bpp >= 10)
123 		vdsc_cfg->initial_offset = 5632 - DIV_ROUND_UP(((bpp - 10) * 3584), 2);
124 	else if (bpp >= 8)
125 		vdsc_cfg->initial_offset = 6144 - DIV_ROUND_UP(((bpp - 8) * 512), 2);
126 	else
127 		vdsc_cfg->initial_offset = 6144;
128 
129 	/* initial_xmit_delay = rc_model_size/2/compression_bpp */
130 	vdsc_cfg->initial_xmit_delay = DIV_ROUND_UP(DSC_RC_MODEL_SIZE_CONST, 2 * bpp);
131 
132 	vdsc_cfg->flatness_min_qp = 3 + qp_bpc_modifier;
133 	vdsc_cfg->flatness_max_qp = 12 + qp_bpc_modifier;
134 
135 	vdsc_cfg->rc_quant_incr_limit0 = 11 + qp_bpc_modifier;
136 	vdsc_cfg->rc_quant_incr_limit1 = 11 + qp_bpc_modifier;
137 
138 	if (vdsc_cfg->native_420) {
139 		static const s8 ofs_und4[] = {
140 			2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12
141 		};
142 		static const s8 ofs_und5[] = {
143 			2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
144 		};
145 		static const s8 ofs_und6[] = {
146 			2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
147 		};
148 		static const s8 ofs_und8[] = {
149 			10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12
150 		};
151 		/*
152 		 * For 420 format since bits_per_pixel (bpp) is set to target bpp * 2,
153 		 * QP table values for target bpp 4.0 to 4.4375 (rounded to 4.0) are
154 		 * actually for bpp 8 to 8.875 (rounded to 4.0 * 2 i.e 8).
155 		 * Similarly values for target bpp 4.5 to 4.8375 (rounded to 4.5)
156 		 * are for bpp 9 to 9.875 (rounded to 4.5 * 2 i.e 9), and so on.
157 		 */
158 		bpp_i  = bpp - 8;
159 		for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) {
160 			u8 range_bpg_offset;
161 
162 			intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i);
163 
164 			/* Calculate range_bpg_offset */
165 			if (bpp <= 8) {
166 				range_bpg_offset = ofs_und4[buf_i];
167 			} else if (bpp <= 10) {
168 				res = DIV_ROUND_UP(((bpp - 8) *
169 						    (ofs_und5[buf_i] - ofs_und4[buf_i])), 2);
170 				range_bpg_offset = ofs_und4[buf_i] + res;
171 			} else if (bpp <= 12) {
172 				res = DIV_ROUND_UP(((bpp - 10) *
173 						    (ofs_und6[buf_i] - ofs_und5[buf_i])), 2);
174 				range_bpg_offset = ofs_und5[buf_i] + res;
175 			} else if (bpp <= 16) {
176 				res = DIV_ROUND_UP(((bpp - 12) *
177 						    (ofs_und8[buf_i] - ofs_und6[buf_i])), 4);
178 				range_bpg_offset = ofs_und6[buf_i] + res;
179 			} else {
180 				range_bpg_offset = ofs_und8[buf_i];
181 			}
182 
183 			vdsc_cfg->rc_range_params[buf_i].range_bpg_offset =
184 				range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK;
185 		}
186 	} else {
187 		/* fractional bpp part * 10000 (for precision up to 4 decimal places) */
188 		int fractional_bits = to_bpp_frac(vdsc_cfg->bits_per_pixel);
189 
190 		static const s8 ofs_und6[] = {
191 			0, -2, -2, -4, -6, -6, -8, -8, -8, -10, -10, -12, -12, -12, -12
192 		};
193 		static const s8 ofs_und8[] = {
194 			2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
195 		};
196 		static const s8 ofs_und12[] = {
197 			2, 0, 0, -2, -4, -6, -8, -8, -8, -10, -10, -10, -12, -12, -12
198 		};
199 		static const s8 ofs_und15[] = {
200 			10, 8, 6, 4, 2, 0, -2, -4, -6, -8, -10, -10, -12, -12, -12
201 		};
202 
203 		/*
204 		 * QP table rows have values in increment of 0.5.
205 		 * So 6.0 bpp to 6.4375 will have index 0, 6.5 to 6.9375 will have index 1,
206 		 * and so on.
207 		 * 0.5 fractional part with 4 decimal precision becomes 5000
208 		 */
209 		bpp_i  = ((bpp - 6) + (fractional_bits < 5000 ? 0 : 1));
210 
211 		for (buf_i = 0; buf_i < DSC_NUM_BUF_RANGES; buf_i++) {
212 			u8 range_bpg_offset;
213 
214 			intel_vdsc_set_min_max_qp(vdsc_cfg, buf_i, bpp_i);
215 
216 			/* Calculate range_bpg_offset */
217 			if (bpp <= 6) {
218 				range_bpg_offset = ofs_und6[buf_i];
219 			} else if (bpp <= 8) {
220 				res = DIV_ROUND_UP(((bpp - 6) *
221 						    (ofs_und8[buf_i] - ofs_und6[buf_i])), 2);
222 				range_bpg_offset = ofs_und6[buf_i] + res;
223 			} else if (bpp <= 12) {
224 				range_bpg_offset = ofs_und8[buf_i];
225 			} else if (bpp <= 15) {
226 				res = DIV_ROUND_UP(((bpp - 12) *
227 						    (ofs_und15[buf_i] - ofs_und12[buf_i])), 3);
228 				range_bpg_offset = ofs_und12[buf_i] + res;
229 			} else {
230 				range_bpg_offset = ofs_und15[buf_i];
231 			}
232 
233 			vdsc_cfg->rc_range_params[buf_i].range_bpg_offset =
234 				range_bpg_offset & DSC_RANGE_BPG_OFFSET_MASK;
235 		}
236 	}
237 }
238 
239 static int intel_dsc_slice_dimensions_valid(struct intel_crtc_state *pipe_config,
240 					    struct drm_dsc_config *vdsc_cfg)
241 {
242 	if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_RGB ||
243 	    pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) {
244 		if (vdsc_cfg->slice_height > 4095)
245 			return -EINVAL;
246 		if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 15000)
247 			return -EINVAL;
248 	} else if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) {
249 		if (vdsc_cfg->slice_width % 2)
250 			return -EINVAL;
251 		if (vdsc_cfg->slice_height % 2)
252 			return -EINVAL;
253 		if (vdsc_cfg->slice_height > 4094)
254 			return -EINVAL;
255 		if (vdsc_cfg->slice_height * vdsc_cfg->slice_width < 30000)
256 			return -EINVAL;
257 	}
258 
259 	return 0;
260 }
261 
262 int intel_dsc_compute_params(struct intel_crtc_state *pipe_config)
263 {
264 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
265 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
266 	struct drm_dsc_config *vdsc_cfg = &pipe_config->dsc.config;
267 	u16 compressed_bpp = to_bpp_int(pipe_config->dsc.compressed_bpp_x16);
268 	int err;
269 	int ret;
270 
271 	vdsc_cfg->pic_width = pipe_config->hw.adjusted_mode.crtc_hdisplay;
272 	vdsc_cfg->slice_width = DIV_ROUND_UP(vdsc_cfg->pic_width,
273 					     pipe_config->dsc.slice_count);
274 
275 	err = intel_dsc_slice_dimensions_valid(pipe_config, vdsc_cfg);
276 
277 	if (err) {
278 		drm_dbg_kms(&dev_priv->drm, "Slice dimension requirements not met\n");
279 		return err;
280 	}
281 
282 	/*
283 	 * According to DSC 1.2 specs if colorspace is YCbCr then convert_rgb is 0
284 	 * else 1
285 	 */
286 	vdsc_cfg->convert_rgb = pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR420 &&
287 				pipe_config->output_format != INTEL_OUTPUT_FORMAT_YCBCR444;
288 
289 	if (DISPLAY_VER(dev_priv) >= 14 &&
290 	    pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
291 		vdsc_cfg->native_420 = true;
292 	/* We do not support YcBCr422 as of now */
293 	vdsc_cfg->native_422 = false;
294 	vdsc_cfg->simple_422 = false;
295 	/* Gen 11 does not support VBR */
296 	vdsc_cfg->vbr_enable = false;
297 
298 	vdsc_cfg->bits_per_pixel = pipe_config->dsc.compressed_bpp_x16;
299 
300 	/*
301 	 * According to DSC 1.2 specs in Section 4.1 if native_420 is set
302 	 * we need to double the current bpp.
303 	 */
304 	if (vdsc_cfg->native_420)
305 		vdsc_cfg->bits_per_pixel <<= 1;
306 
307 	vdsc_cfg->bits_per_component = pipe_config->pipe_bpp / 3;
308 
309 	drm_dsc_set_rc_buf_thresh(vdsc_cfg);
310 
311 	/*
312 	 * From XE_LPD onwards we supports compression bpps in steps of 1
313 	 * upto uncompressed bpp-1, hence add calculations for all the rc
314 	 * parameters
315 	 */
316 	if (DISPLAY_VER(dev_priv) >= 13) {
317 		calculate_rc_params(vdsc_cfg);
318 	} else {
319 		if ((compressed_bpp == 8 ||
320 		     compressed_bpp == 12) &&
321 		    (vdsc_cfg->bits_per_component == 8 ||
322 		     vdsc_cfg->bits_per_component == 10 ||
323 		     vdsc_cfg->bits_per_component == 12))
324 			ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_1_PRE_SCR);
325 		else
326 			ret = drm_dsc_setup_rc_params(vdsc_cfg, DRM_DSC_1_2_444);
327 
328 		if (ret)
329 			return ret;
330 	}
331 
332 	/*
333 	 * BitsPerComponent value determines mux_word_size:
334 	 * When BitsPerComponent is less than or 10bpc, muxWordSize will be equal to
335 	 * 48 bits otherwise 64
336 	 */
337 	if (vdsc_cfg->bits_per_component <= 10)
338 		vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_8_10_BPC;
339 	else
340 		vdsc_cfg->mux_word_size = DSC_MUX_WORD_SIZE_12_BPC;
341 
342 	/* InitialScaleValue is a 6 bit value with 3 fractional bits (U3.3) */
343 	vdsc_cfg->initial_scale_value = (vdsc_cfg->rc_model_size << 3) /
344 		(vdsc_cfg->rc_model_size - vdsc_cfg->initial_offset);
345 
346 	return 0;
347 }
348 
349 enum intel_display_power_domain
350 intel_dsc_power_domain(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
351 {
352 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
353 	enum pipe pipe = crtc->pipe;
354 
355 	/*
356 	 * VDSC/joining uses a separate power well, PW2, and requires
357 	 * POWER_DOMAIN_TRANSCODER_VDSC_PW2 power domain in two cases:
358 	 *
359 	 *  - ICL eDP/DSI transcoder
360 	 *  - Display version 12 (except RKL) pipe A
361 	 *
362 	 * For any other pipe, VDSC/joining uses the power well associated with
363 	 * the pipe in use. Hence another reference on the pipe power domain
364 	 * will suffice. (Except no VDSC/joining on ICL pipe A.)
365 	 */
366 	if (DISPLAY_VER(i915) == 12 && !IS_ROCKETLAKE(i915) && pipe == PIPE_A)
367 		return POWER_DOMAIN_TRANSCODER_VDSC_PW2;
368 	else if (is_pipe_dsc(crtc, cpu_transcoder))
369 		return POWER_DOMAIN_PIPE(pipe);
370 	else
371 		return POWER_DOMAIN_TRANSCODER_VDSC_PW2;
372 }
373 
374 static int intel_dsc_get_vdsc_per_pipe(const struct intel_crtc_state *crtc_state)
375 {
376 	return crtc_state->dsc.dsc_split ? 2 : 1;
377 }
378 
379 int intel_dsc_get_num_vdsc_instances(const struct intel_crtc_state *crtc_state)
380 {
381 	int num_vdsc_instances = intel_dsc_get_vdsc_per_pipe(crtc_state);
382 
383 	if (crtc_state->bigjoiner_pipes)
384 		num_vdsc_instances *= 2;
385 
386 	return num_vdsc_instances;
387 }
388 
389 static void intel_dsc_get_pps_reg(const struct intel_crtc_state *crtc_state, int pps,
390 				  i915_reg_t *dsc_reg, int dsc_reg_num)
391 {
392 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
393 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
394 	enum pipe pipe = crtc->pipe;
395 	bool pipe_dsc;
396 
397 	pipe_dsc = is_pipe_dsc(crtc, cpu_transcoder);
398 
399 	if (dsc_reg_num >= 3)
400 		MISSING_CASE(dsc_reg_num);
401 	if (dsc_reg_num >= 2)
402 		dsc_reg[1] = pipe_dsc ? ICL_DSC1_PPS(pipe, pps) : DSCC_PPS(pps);
403 	if (dsc_reg_num >= 1)
404 		dsc_reg[0] = pipe_dsc ? ICL_DSC0_PPS(pipe, pps) : DSCA_PPS(pps);
405 }
406 
407 static void intel_dsc_pps_write(const struct intel_crtc_state *crtc_state,
408 				int pps, u32 pps_val)
409 {
410 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
411 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
412 	i915_reg_t dsc_reg[2];
413 	int i, vdsc_per_pipe, dsc_reg_num;
414 
415 	vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
416 	dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe);
417 
418 	drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe);
419 
420 	intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num);
421 
422 	for (i = 0; i < dsc_reg_num; i++)
423 		intel_de_write(i915, dsc_reg[i], pps_val);
424 }
425 
426 static void intel_dsc_pps_configure(const struct intel_crtc_state *crtc_state)
427 {
428 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
429 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
430 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
431 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
432 	enum pipe pipe = crtc->pipe;
433 	u32 pps_val;
434 	u32 rc_buf_thresh_dword[4];
435 	u32 rc_range_params_dword[8];
436 	int i = 0;
437 	int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state);
438 	int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
439 
440 	/* PPS 0 */
441 	pps_val = DSC_PPS0_VER_MAJOR(1) |
442 		DSC_PPS0_VER_MINOR(vdsc_cfg->dsc_version_minor) |
443 		DSC_PPS0_BPC(vdsc_cfg->bits_per_component) |
444 		DSC_PPS0_LINE_BUF_DEPTH(vdsc_cfg->line_buf_depth);
445 	if (vdsc_cfg->dsc_version_minor == 2) {
446 		pps_val |= DSC_PPS0_ALT_ICH_SEL;
447 		if (vdsc_cfg->native_420)
448 			pps_val |= DSC_PPS0_NATIVE_420_ENABLE;
449 		if (vdsc_cfg->native_422)
450 			pps_val |= DSC_PPS0_NATIVE_422_ENABLE;
451 	}
452 	if (vdsc_cfg->block_pred_enable)
453 		pps_val |= DSC_PPS0_BLOCK_PREDICTION;
454 	if (vdsc_cfg->convert_rgb)
455 		pps_val |= DSC_PPS0_COLOR_SPACE_CONVERSION;
456 	if (vdsc_cfg->simple_422)
457 		pps_val |= DSC_PPS0_422_ENABLE;
458 	if (vdsc_cfg->vbr_enable)
459 		pps_val |= DSC_PPS0_VBR_ENABLE;
460 	drm_dbg_kms(&dev_priv->drm, "PPS0 = 0x%08x\n", pps_val);
461 	intel_dsc_pps_write(crtc_state, 0, pps_val);
462 
463 	/* PPS 1 */
464 	pps_val = DSC_PPS1_BPP(vdsc_cfg->bits_per_pixel);
465 	drm_dbg_kms(&dev_priv->drm, "PPS1 = 0x%08x\n", pps_val);
466 	intel_dsc_pps_write(crtc_state, 1, pps_val);
467 
468 	/* PPS 2 */
469 	pps_val = DSC_PPS2_PIC_HEIGHT(vdsc_cfg->pic_height) |
470 		DSC_PPS2_PIC_WIDTH(vdsc_cfg->pic_width / num_vdsc_instances);
471 	drm_dbg_kms(&dev_priv->drm, "PPS2 = 0x%08x\n", pps_val);
472 	intel_dsc_pps_write(crtc_state, 2, pps_val);
473 
474 	/* PPS 3 */
475 	pps_val = DSC_PPS3_SLICE_HEIGHT(vdsc_cfg->slice_height) |
476 		DSC_PPS3_SLICE_WIDTH(vdsc_cfg->slice_width);
477 	drm_dbg_kms(&dev_priv->drm, "PPS3 = 0x%08x\n", pps_val);
478 	intel_dsc_pps_write(crtc_state, 3, pps_val);
479 
480 	/* PPS 4 */
481 	pps_val = DSC_PPS4_INITIAL_XMIT_DELAY(vdsc_cfg->initial_xmit_delay) |
482 		DSC_PPS4_INITIAL_DEC_DELAY(vdsc_cfg->initial_dec_delay);
483 	drm_dbg_kms(&dev_priv->drm, "PPS4 = 0x%08x\n", pps_val);
484 	intel_dsc_pps_write(crtc_state, 4, pps_val);
485 
486 	/* PPS 5 */
487 	pps_val = DSC_PPS5_SCALE_INC_INT(vdsc_cfg->scale_increment_interval) |
488 		DSC_PPS5_SCALE_DEC_INT(vdsc_cfg->scale_decrement_interval);
489 	drm_dbg_kms(&dev_priv->drm, "PPS5 = 0x%08x\n", pps_val);
490 	intel_dsc_pps_write(crtc_state, 5, pps_val);
491 
492 	/* PPS 6 */
493 	pps_val = DSC_PPS6_INITIAL_SCALE_VALUE(vdsc_cfg->initial_scale_value) |
494 		DSC_PPS6_FIRST_LINE_BPG_OFFSET(vdsc_cfg->first_line_bpg_offset) |
495 		DSC_PPS6_FLATNESS_MIN_QP(vdsc_cfg->flatness_min_qp) |
496 		DSC_PPS6_FLATNESS_MAX_QP(vdsc_cfg->flatness_max_qp);
497 	drm_dbg_kms(&dev_priv->drm, "PPS6 = 0x%08x\n", pps_val);
498 	intel_dsc_pps_write(crtc_state, 6, pps_val);
499 
500 	/* PPS 7 */
501 	pps_val = DSC_PPS7_SLICE_BPG_OFFSET(vdsc_cfg->slice_bpg_offset) |
502 		DSC_PPS7_NFL_BPG_OFFSET(vdsc_cfg->nfl_bpg_offset);
503 	drm_dbg_kms(&dev_priv->drm, "PPS7 = 0x%08x\n", pps_val);
504 	intel_dsc_pps_write(crtc_state, 7, pps_val);
505 
506 	/* PPS 8 */
507 	pps_val = DSC_PPS8_FINAL_OFFSET(vdsc_cfg->final_offset) |
508 		DSC_PPS8_INITIAL_OFFSET(vdsc_cfg->initial_offset);
509 	drm_dbg_kms(&dev_priv->drm, "PPS8 = 0x%08x\n", pps_val);
510 	intel_dsc_pps_write(crtc_state, 8, pps_val);
511 
512 	/* PPS 9 */
513 	pps_val = DSC_PPS9_RC_MODEL_SIZE(vdsc_cfg->rc_model_size) |
514 		DSC_PPS9_RC_EDGE_FACTOR(DSC_RC_EDGE_FACTOR_CONST);
515 	drm_dbg_kms(&dev_priv->drm, "PPS9 = 0x%08x\n", pps_val);
516 	intel_dsc_pps_write(crtc_state, 9, pps_val);
517 
518 	/* PPS 10 */
519 	pps_val = DSC_PPS10_RC_QUANT_INC_LIMIT0(vdsc_cfg->rc_quant_incr_limit0) |
520 		DSC_PPS10_RC_QUANT_INC_LIMIT1(vdsc_cfg->rc_quant_incr_limit1) |
521 		DSC_PPS10_RC_TARGET_OFF_HIGH(DSC_RC_TGT_OFFSET_HI_CONST) |
522 		DSC_PPS10_RC_TARGET_OFF_LOW(DSC_RC_TGT_OFFSET_LO_CONST);
523 	drm_dbg_kms(&dev_priv->drm, "PPS10 = 0x%08x\n", pps_val);
524 	intel_dsc_pps_write(crtc_state, 10, pps_val);
525 
526 	/* PPS 16 */
527 	pps_val = DSC_PPS16_SLICE_CHUNK_SIZE(vdsc_cfg->slice_chunk_size) |
528 		DSC_PPS16_SLICE_PER_LINE((vdsc_cfg->pic_width / num_vdsc_instances) /
529 					 vdsc_cfg->slice_width) |
530 		DSC_PPS16_SLICE_ROW_PER_FRAME(vdsc_cfg->pic_height /
531 					      vdsc_cfg->slice_height);
532 	drm_dbg_kms(&dev_priv->drm, "PPS16 = 0x%08x\n", pps_val);
533 	intel_dsc_pps_write(crtc_state, 16, pps_val);
534 
535 	if (DISPLAY_VER(dev_priv) >= 14) {
536 		/* PPS 17 */
537 		pps_val = DSC_PPS17_SL_BPG_OFFSET(vdsc_cfg->second_line_bpg_offset);
538 		drm_dbg_kms(&dev_priv->drm, "PPS17 = 0x%08x\n", pps_val);
539 		intel_dsc_pps_write(crtc_state, 17, pps_val);
540 
541 		/* PPS 18 */
542 		pps_val = DSC_PPS18_NSL_BPG_OFFSET(vdsc_cfg->nsl_bpg_offset) |
543 			DSC_PPS18_SL_OFFSET_ADJ(vdsc_cfg->second_line_offset_adj);
544 		drm_dbg_kms(&dev_priv->drm, "PPS18 = 0x%08x\n", pps_val);
545 		intel_dsc_pps_write(crtc_state, 18, pps_val);
546 	}
547 
548 	/* Populate the RC_BUF_THRESH registers */
549 	memset(rc_buf_thresh_dword, 0, sizeof(rc_buf_thresh_dword));
550 	for (i = 0; i < DSC_NUM_BUF_RANGES - 1; i++) {
551 		rc_buf_thresh_dword[i / 4] |=
552 			(u32)(vdsc_cfg->rc_buf_thresh[i] <<
553 			      BITS_PER_BYTE * (i % 4));
554 		drm_dbg_kms(&dev_priv->drm, "RC_BUF_THRESH_%d = 0x%08x\n", i,
555 			    rc_buf_thresh_dword[i / 4]);
556 	}
557 	if (!is_pipe_dsc(crtc, cpu_transcoder)) {
558 		intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0,
559 			       rc_buf_thresh_dword[0]);
560 		intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_0_UDW,
561 			       rc_buf_thresh_dword[1]);
562 		intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1,
563 			       rc_buf_thresh_dword[2]);
564 		intel_de_write(dev_priv, DSCA_RC_BUF_THRESH_1_UDW,
565 			       rc_buf_thresh_dword[3]);
566 		if (vdsc_instances_per_pipe > 1) {
567 			intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0,
568 				       rc_buf_thresh_dword[0]);
569 			intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_0_UDW,
570 				       rc_buf_thresh_dword[1]);
571 			intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1,
572 				       rc_buf_thresh_dword[2]);
573 			intel_de_write(dev_priv, DSCC_RC_BUF_THRESH_1_UDW,
574 				       rc_buf_thresh_dword[3]);
575 		}
576 	} else {
577 		intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0(pipe),
578 			       rc_buf_thresh_dword[0]);
579 		intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_0_UDW(pipe),
580 			       rc_buf_thresh_dword[1]);
581 		intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1(pipe),
582 			       rc_buf_thresh_dword[2]);
583 		intel_de_write(dev_priv, ICL_DSC0_RC_BUF_THRESH_1_UDW(pipe),
584 			       rc_buf_thresh_dword[3]);
585 		if (vdsc_instances_per_pipe > 1) {
586 			intel_de_write(dev_priv,
587 				       ICL_DSC1_RC_BUF_THRESH_0(pipe),
588 				       rc_buf_thresh_dword[0]);
589 			intel_de_write(dev_priv,
590 				       ICL_DSC1_RC_BUF_THRESH_0_UDW(pipe),
591 				       rc_buf_thresh_dword[1]);
592 			intel_de_write(dev_priv,
593 				       ICL_DSC1_RC_BUF_THRESH_1(pipe),
594 				       rc_buf_thresh_dword[2]);
595 			intel_de_write(dev_priv,
596 				       ICL_DSC1_RC_BUF_THRESH_1_UDW(pipe),
597 				       rc_buf_thresh_dword[3]);
598 		}
599 	}
600 
601 	/* Populate the RC_RANGE_PARAMETERS registers */
602 	memset(rc_range_params_dword, 0, sizeof(rc_range_params_dword));
603 	for (i = 0; i < DSC_NUM_BUF_RANGES; i++) {
604 		rc_range_params_dword[i / 2] |=
605 			(u32)(((vdsc_cfg->rc_range_params[i].range_bpg_offset <<
606 				RC_BPG_OFFSET_SHIFT) |
607 			       (vdsc_cfg->rc_range_params[i].range_max_qp <<
608 				RC_MAX_QP_SHIFT) |
609 			       (vdsc_cfg->rc_range_params[i].range_min_qp <<
610 				RC_MIN_QP_SHIFT)) << 16 * (i % 2));
611 		drm_dbg_kms(&dev_priv->drm, "RC_RANGE_PARAM_%d = 0x%08x\n", i,
612 			    rc_range_params_dword[i / 2]);
613 	}
614 	if (!is_pipe_dsc(crtc, cpu_transcoder)) {
615 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0,
616 			       rc_range_params_dword[0]);
617 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_0_UDW,
618 			       rc_range_params_dword[1]);
619 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1,
620 			       rc_range_params_dword[2]);
621 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_1_UDW,
622 			       rc_range_params_dword[3]);
623 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2,
624 			       rc_range_params_dword[4]);
625 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_2_UDW,
626 			       rc_range_params_dword[5]);
627 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3,
628 			       rc_range_params_dword[6]);
629 		intel_de_write(dev_priv, DSCA_RC_RANGE_PARAMETERS_3_UDW,
630 			       rc_range_params_dword[7]);
631 		if (vdsc_instances_per_pipe > 1) {
632 			intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_0,
633 				       rc_range_params_dword[0]);
634 			intel_de_write(dev_priv,
635 				       DSCC_RC_RANGE_PARAMETERS_0_UDW,
636 				       rc_range_params_dword[1]);
637 			intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_1,
638 				       rc_range_params_dword[2]);
639 			intel_de_write(dev_priv,
640 				       DSCC_RC_RANGE_PARAMETERS_1_UDW,
641 				       rc_range_params_dword[3]);
642 			intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_2,
643 				       rc_range_params_dword[4]);
644 			intel_de_write(dev_priv,
645 				       DSCC_RC_RANGE_PARAMETERS_2_UDW,
646 				       rc_range_params_dword[5]);
647 			intel_de_write(dev_priv, DSCC_RC_RANGE_PARAMETERS_3,
648 				       rc_range_params_dword[6]);
649 			intel_de_write(dev_priv,
650 				       DSCC_RC_RANGE_PARAMETERS_3_UDW,
651 				       rc_range_params_dword[7]);
652 		}
653 	} else {
654 		intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_0(pipe),
655 			       rc_range_params_dword[0]);
656 		intel_de_write(dev_priv,
657 			       ICL_DSC0_RC_RANGE_PARAMETERS_0_UDW(pipe),
658 			       rc_range_params_dword[1]);
659 		intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_1(pipe),
660 			       rc_range_params_dword[2]);
661 		intel_de_write(dev_priv,
662 			       ICL_DSC0_RC_RANGE_PARAMETERS_1_UDW(pipe),
663 			       rc_range_params_dword[3]);
664 		intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_2(pipe),
665 			       rc_range_params_dword[4]);
666 		intel_de_write(dev_priv,
667 			       ICL_DSC0_RC_RANGE_PARAMETERS_2_UDW(pipe),
668 			       rc_range_params_dword[5]);
669 		intel_de_write(dev_priv, ICL_DSC0_RC_RANGE_PARAMETERS_3(pipe),
670 			       rc_range_params_dword[6]);
671 		intel_de_write(dev_priv,
672 			       ICL_DSC0_RC_RANGE_PARAMETERS_3_UDW(pipe),
673 			       rc_range_params_dword[7]);
674 		if (vdsc_instances_per_pipe > 1) {
675 			intel_de_write(dev_priv,
676 				       ICL_DSC1_RC_RANGE_PARAMETERS_0(pipe),
677 				       rc_range_params_dword[0]);
678 			intel_de_write(dev_priv,
679 				       ICL_DSC1_RC_RANGE_PARAMETERS_0_UDW(pipe),
680 				       rc_range_params_dword[1]);
681 			intel_de_write(dev_priv,
682 				       ICL_DSC1_RC_RANGE_PARAMETERS_1(pipe),
683 				       rc_range_params_dword[2]);
684 			intel_de_write(dev_priv,
685 				       ICL_DSC1_RC_RANGE_PARAMETERS_1_UDW(pipe),
686 				       rc_range_params_dword[3]);
687 			intel_de_write(dev_priv,
688 				       ICL_DSC1_RC_RANGE_PARAMETERS_2(pipe),
689 				       rc_range_params_dword[4]);
690 			intel_de_write(dev_priv,
691 				       ICL_DSC1_RC_RANGE_PARAMETERS_2_UDW(pipe),
692 				       rc_range_params_dword[5]);
693 			intel_de_write(dev_priv,
694 				       ICL_DSC1_RC_RANGE_PARAMETERS_3(pipe),
695 				       rc_range_params_dword[6]);
696 			intel_de_write(dev_priv,
697 				       ICL_DSC1_RC_RANGE_PARAMETERS_3_UDW(pipe),
698 				       rc_range_params_dword[7]);
699 		}
700 	}
701 }
702 
703 void intel_dsc_dsi_pps_write(struct intel_encoder *encoder,
704 			     const struct intel_crtc_state *crtc_state)
705 {
706 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
707 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
708 	struct mipi_dsi_device *dsi;
709 	struct drm_dsc_picture_parameter_set pps;
710 	enum port port;
711 
712 	if (!crtc_state->dsc.compression_enable)
713 		return;
714 
715 	drm_dsc_pps_payload_pack(&pps, vdsc_cfg);
716 
717 	for_each_dsi_port(port, intel_dsi->ports) {
718 		dsi = intel_dsi->dsi_hosts[port]->device;
719 
720 		mipi_dsi_picture_parameter_set(dsi, &pps);
721 		mipi_dsi_compression_mode(dsi, true);
722 	}
723 }
724 
725 void intel_dsc_dp_pps_write(struct intel_encoder *encoder,
726 			    const struct intel_crtc_state *crtc_state)
727 {
728 	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
729 	const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
730 	struct drm_dsc_pps_infoframe dp_dsc_pps_sdp;
731 
732 	if (!crtc_state->dsc.compression_enable)
733 		return;
734 
735 	/* Prepare DP SDP PPS header as per DP 1.4 spec, Table 2-123 */
736 	drm_dsc_dp_pps_header_init(&dp_dsc_pps_sdp.pps_header);
737 
738 	/* Fill the PPS payload bytes as per DSC spec 1.2 Table 4-1 */
739 	drm_dsc_pps_payload_pack(&dp_dsc_pps_sdp.pps_payload, vdsc_cfg);
740 
741 	dig_port->write_infoframe(encoder, crtc_state,
742 				  DP_SDP_PPS, &dp_dsc_pps_sdp,
743 				  sizeof(dp_dsc_pps_sdp));
744 }
745 
746 static i915_reg_t dss_ctl1_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
747 {
748 	return is_pipe_dsc(crtc, cpu_transcoder) ?
749 		ICL_PIPE_DSS_CTL1(crtc->pipe) : DSS_CTL1;
750 }
751 
752 static i915_reg_t dss_ctl2_reg(struct intel_crtc *crtc, enum transcoder cpu_transcoder)
753 {
754 	return is_pipe_dsc(crtc, cpu_transcoder) ?
755 		ICL_PIPE_DSS_CTL2(crtc->pipe) : DSS_CTL2;
756 }
757 
758 void intel_uncompressed_joiner_enable(const struct intel_crtc_state *crtc_state)
759 {
760 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
761 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
762 	u32 dss_ctl1_val = 0;
763 
764 	if (crtc_state->bigjoiner_pipes && !crtc_state->dsc.compression_enable) {
765 		if (intel_crtc_is_bigjoiner_slave(crtc_state))
766 			dss_ctl1_val |= UNCOMPRESSED_JOINER_SLAVE;
767 		else
768 			dss_ctl1_val |= UNCOMPRESSED_JOINER_MASTER;
769 
770 		intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val);
771 	}
772 }
773 
774 void intel_dsc_enable(const struct intel_crtc_state *crtc_state)
775 {
776 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
777 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
778 	u32 dss_ctl1_val = 0;
779 	u32 dss_ctl2_val = 0;
780 	int vdsc_instances_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
781 
782 	if (!crtc_state->dsc.compression_enable)
783 		return;
784 
785 	intel_dsc_pps_configure(crtc_state);
786 
787 	dss_ctl2_val |= LEFT_BRANCH_VDSC_ENABLE;
788 	if (vdsc_instances_per_pipe > 1) {
789 		dss_ctl2_val |= RIGHT_BRANCH_VDSC_ENABLE;
790 		dss_ctl1_val |= JOINER_ENABLE;
791 	}
792 	if (crtc_state->bigjoiner_pipes) {
793 		dss_ctl1_val |= BIG_JOINER_ENABLE;
794 		if (!intel_crtc_is_bigjoiner_slave(crtc_state))
795 			dss_ctl1_val |= MASTER_BIG_JOINER_ENABLE;
796 	}
797 	intel_de_write(dev_priv, dss_ctl1_reg(crtc, crtc_state->cpu_transcoder), dss_ctl1_val);
798 	intel_de_write(dev_priv, dss_ctl2_reg(crtc, crtc_state->cpu_transcoder), dss_ctl2_val);
799 }
800 
801 void intel_dsc_disable(const struct intel_crtc_state *old_crtc_state)
802 {
803 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
804 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
805 
806 	/* Disable only if either of them is enabled */
807 	if (old_crtc_state->dsc.compression_enable ||
808 	    old_crtc_state->bigjoiner_pipes) {
809 		intel_de_write(dev_priv, dss_ctl1_reg(crtc, old_crtc_state->cpu_transcoder), 0);
810 		intel_de_write(dev_priv, dss_ctl2_reg(crtc, old_crtc_state->cpu_transcoder), 0);
811 	}
812 }
813 
814 static u32 intel_dsc_pps_read(struct intel_crtc_state *crtc_state, int pps,
815 			      bool *all_equal)
816 {
817 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
818 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
819 	i915_reg_t dsc_reg[2];
820 	int i, vdsc_per_pipe, dsc_reg_num;
821 	u32 val;
822 
823 	vdsc_per_pipe = intel_dsc_get_vdsc_per_pipe(crtc_state);
824 	dsc_reg_num = min_t(int, ARRAY_SIZE(dsc_reg), vdsc_per_pipe);
825 
826 	drm_WARN_ON_ONCE(&i915->drm, dsc_reg_num < vdsc_per_pipe);
827 
828 	intel_dsc_get_pps_reg(crtc_state, pps, dsc_reg, dsc_reg_num);
829 
830 	*all_equal = true;
831 
832 	val = intel_de_read(i915, dsc_reg[0]);
833 
834 	for (i = 1; i < dsc_reg_num; i++) {
835 		if (intel_de_read(i915, dsc_reg[i]) != val) {
836 			*all_equal = false;
837 			break;
838 		}
839 	}
840 
841 	return val;
842 }
843 
844 static u32 intel_dsc_pps_read_and_verify(struct intel_crtc_state *crtc_state, int pps)
845 {
846 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
847 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
848 	u32 val;
849 	bool all_equal;
850 
851 	val = intel_dsc_pps_read(crtc_state, pps, &all_equal);
852 	drm_WARN_ON(&i915->drm, !all_equal);
853 
854 	return val;
855 }
856 
857 static void intel_dsc_get_pps_config(struct intel_crtc_state *crtc_state)
858 {
859 	struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
860 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
861 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
862 	int num_vdsc_instances = intel_dsc_get_num_vdsc_instances(crtc_state);
863 	u32 pps_temp;
864 
865 	/* PPS 0 */
866 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 0);
867 
868 	vdsc_cfg->bits_per_component = REG_FIELD_GET(DSC_PPS0_BPC_MASK, pps_temp);
869 	vdsc_cfg->line_buf_depth = REG_FIELD_GET(DSC_PPS0_LINE_BUF_DEPTH_MASK, pps_temp);
870 	vdsc_cfg->block_pred_enable = pps_temp & DSC_PPS0_BLOCK_PREDICTION;
871 	vdsc_cfg->convert_rgb = pps_temp & DSC_PPS0_COLOR_SPACE_CONVERSION;
872 	vdsc_cfg->simple_422 = pps_temp & DSC_PPS0_422_ENABLE;
873 	vdsc_cfg->native_422 = pps_temp & DSC_PPS0_NATIVE_422_ENABLE;
874 	vdsc_cfg->native_420 = pps_temp & DSC_PPS0_NATIVE_420_ENABLE;
875 	vdsc_cfg->vbr_enable = pps_temp & DSC_PPS0_VBR_ENABLE;
876 
877 	/* PPS 1 */
878 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 1);
879 
880 	vdsc_cfg->bits_per_pixel = REG_FIELD_GET(DSC_PPS1_BPP_MASK, pps_temp);
881 
882 	if (vdsc_cfg->native_420)
883 		vdsc_cfg->bits_per_pixel >>= 1;
884 
885 	crtc_state->dsc.compressed_bpp_x16 = vdsc_cfg->bits_per_pixel;
886 
887 	/* PPS 2 */
888 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 2);
889 
890 	vdsc_cfg->pic_width = REG_FIELD_GET(DSC_PPS2_PIC_WIDTH_MASK, pps_temp) * num_vdsc_instances;
891 	vdsc_cfg->pic_height = REG_FIELD_GET(DSC_PPS2_PIC_HEIGHT_MASK, pps_temp);
892 
893 	/* PPS 3 */
894 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 3);
895 
896 	vdsc_cfg->slice_width = REG_FIELD_GET(DSC_PPS3_SLICE_WIDTH_MASK, pps_temp);
897 	vdsc_cfg->slice_height = REG_FIELD_GET(DSC_PPS3_SLICE_HEIGHT_MASK, pps_temp);
898 
899 	/* PPS 4 */
900 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 4);
901 
902 	vdsc_cfg->initial_dec_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_DEC_DELAY_MASK, pps_temp);
903 	vdsc_cfg->initial_xmit_delay = REG_FIELD_GET(DSC_PPS4_INITIAL_XMIT_DELAY_MASK, pps_temp);
904 
905 	/* PPS 5 */
906 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 5);
907 
908 	vdsc_cfg->scale_decrement_interval = REG_FIELD_GET(DSC_PPS5_SCALE_DEC_INT_MASK, pps_temp);
909 	vdsc_cfg->scale_increment_interval = REG_FIELD_GET(DSC_PPS5_SCALE_INC_INT_MASK, pps_temp);
910 
911 	/* PPS 6 */
912 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 6);
913 
914 	vdsc_cfg->initial_scale_value = REG_FIELD_GET(DSC_PPS6_INITIAL_SCALE_VALUE_MASK, pps_temp);
915 	vdsc_cfg->first_line_bpg_offset = REG_FIELD_GET(DSC_PPS6_FIRST_LINE_BPG_OFFSET_MASK, pps_temp);
916 	vdsc_cfg->flatness_min_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MIN_QP_MASK, pps_temp);
917 	vdsc_cfg->flatness_max_qp = REG_FIELD_GET(DSC_PPS6_FLATNESS_MAX_QP_MASK, pps_temp);
918 
919 	/* PPS 7 */
920 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 7);
921 
922 	vdsc_cfg->nfl_bpg_offset = REG_FIELD_GET(DSC_PPS7_NFL_BPG_OFFSET_MASK, pps_temp);
923 	vdsc_cfg->slice_bpg_offset = REG_FIELD_GET(DSC_PPS7_SLICE_BPG_OFFSET_MASK, pps_temp);
924 
925 	/* PPS 8 */
926 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 8);
927 
928 	vdsc_cfg->initial_offset = REG_FIELD_GET(DSC_PPS8_INITIAL_OFFSET_MASK, pps_temp);
929 	vdsc_cfg->final_offset = REG_FIELD_GET(DSC_PPS8_FINAL_OFFSET_MASK, pps_temp);
930 
931 	/* PPS 9 */
932 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 9);
933 
934 	vdsc_cfg->rc_model_size = REG_FIELD_GET(DSC_PPS9_RC_MODEL_SIZE_MASK, pps_temp);
935 
936 	/* PPS 10 */
937 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 10);
938 
939 	vdsc_cfg->rc_quant_incr_limit0 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT0_MASK, pps_temp);
940 	vdsc_cfg->rc_quant_incr_limit1 = REG_FIELD_GET(DSC_PPS10_RC_QUANT_INC_LIMIT1_MASK, pps_temp);
941 
942 	/* PPS 16 */
943 	pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 16);
944 
945 	vdsc_cfg->slice_chunk_size = REG_FIELD_GET(DSC_PPS16_SLICE_CHUNK_SIZE_MASK, pps_temp);
946 
947 	if (DISPLAY_VER(i915) >= 14) {
948 		/* PPS 17 */
949 		pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 17);
950 
951 		vdsc_cfg->second_line_bpg_offset = REG_FIELD_GET(DSC_PPS17_SL_BPG_OFFSET_MASK, pps_temp);
952 
953 		/* PPS 18 */
954 		pps_temp = intel_dsc_pps_read_and_verify(crtc_state, 18);
955 
956 		vdsc_cfg->nsl_bpg_offset = REG_FIELD_GET(DSC_PPS18_NSL_BPG_OFFSET_MASK, pps_temp);
957 		vdsc_cfg->second_line_offset_adj = REG_FIELD_GET(DSC_PPS18_SL_OFFSET_ADJ_MASK, pps_temp);
958 	}
959 }
960 
961 void intel_dsc_get_config(struct intel_crtc_state *crtc_state)
962 {
963 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
964 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
965 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
966 	enum intel_display_power_domain power_domain;
967 	intel_wakeref_t wakeref;
968 	u32 dss_ctl1, dss_ctl2;
969 
970 	if (!intel_dsc_source_support(crtc_state))
971 		return;
972 
973 	power_domain = intel_dsc_power_domain(crtc, cpu_transcoder);
974 
975 	wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
976 	if (!wakeref)
977 		return;
978 
979 	dss_ctl1 = intel_de_read(dev_priv, dss_ctl1_reg(crtc, cpu_transcoder));
980 	dss_ctl2 = intel_de_read(dev_priv, dss_ctl2_reg(crtc, cpu_transcoder));
981 
982 	crtc_state->dsc.compression_enable = dss_ctl2 & LEFT_BRANCH_VDSC_ENABLE;
983 	if (!crtc_state->dsc.compression_enable)
984 		goto out;
985 
986 	crtc_state->dsc.dsc_split = (dss_ctl2 & RIGHT_BRANCH_VDSC_ENABLE) &&
987 		(dss_ctl1 & JOINER_ENABLE);
988 
989 	intel_dsc_get_pps_config(crtc_state);
990 out:
991 	intel_display_power_put(dev_priv, power_domain, wakeref);
992 }
993