1 // SPDX-License-Identifier: MIT
2 //
3 // Copyright 2024 Advanced Micro Devices, Inc.
4
5
6 #include "dml2_internal_types.h"
7 #include "dml_top.h"
8 #include "dml2_core_dcn4_calcs.h"
9 #include "dml2_internal_shared_types.h"
10 #include "dml21_utils.h"
11 #include "dml21_translation_helper.h"
12 #include "dml2_dc_resource_mgmt.h"
13
dml21_allocate_memory(struct dml2_context ** dml_ctx)14 static bool dml21_allocate_memory(struct dml2_context **dml_ctx)
15 {
16 *dml_ctx = (struct dml2_context *)kzalloc(sizeof(struct dml2_context), GFP_KERNEL);
17 if (!(*dml_ctx))
18 return false;
19
20 (*dml_ctx)->v21.dml_init.dml2_instance = (struct dml2_instance *)kzalloc(sizeof(struct dml2_instance), GFP_KERNEL);
21 if (!((*dml_ctx)->v21.dml_init.dml2_instance))
22 return false;
23
24 (*dml_ctx)->v21.mode_support.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance;
25 (*dml_ctx)->v21.mode_programming.dml2_instance = (*dml_ctx)->v21.dml_init.dml2_instance;
26
27 (*dml_ctx)->v21.mode_support.display_config = &(*dml_ctx)->v21.display_config;
28 (*dml_ctx)->v21.mode_programming.display_config = (*dml_ctx)->v21.mode_support.display_config;
29
30 (*dml_ctx)->v21.mode_programming.programming = (struct dml2_display_cfg_programming *)kzalloc(sizeof(struct dml2_display_cfg_programming), GFP_KERNEL);
31 if (!((*dml_ctx)->v21.mode_programming.programming))
32 return false;
33
34 return true;
35 }
36
dml21_apply_debug_options(const struct dc * in_dc,struct dml2_context * dml_ctx,const struct dml2_configuration_options * config)37 static void dml21_apply_debug_options(const struct dc *in_dc, struct dml2_context *dml_ctx, const struct dml2_configuration_options *config)
38 {
39 bool disable_fams2;
40 struct dml2_pmo_options *pmo_options = &dml_ctx->v21.dml_init.options.pmo_options;
41
42 /* ODM options */
43 pmo_options->disable_dyn_odm = !config->minimize_dispclk_using_odm;
44 pmo_options->disable_dyn_odm_for_multi_stream = true;
45 pmo_options->disable_dyn_odm_for_stream_with_svp = true;
46
47 /* UCLK P-State options */
48 if (in_dc->debug.dml21_force_pstate_method) {
49 dml_ctx->config.pmo.force_pstate_method_enable = true;
50 for (int i = 0; i < MAX_PIPES; i++)
51 dml_ctx->config.pmo.force_pstate_method_values[i] = in_dc->debug.dml21_force_pstate_method_values[i];
52 } else {
53 dml_ctx->config.pmo.force_pstate_method_enable = false;
54 }
55
56 pmo_options->disable_vblank = ((in_dc->debug.dml21_disable_pstate_method_mask >> 1) & 1);
57
58 /* NOTE: DRR and SubVP Require FAMS2 */
59 disable_fams2 = !in_dc->debug.fams2_config.bits.enable;
60 pmo_options->disable_svp = ((in_dc->debug.dml21_disable_pstate_method_mask >> 2) & 1) ||
61 in_dc->debug.force_disable_subvp ||
62 disable_fams2;
63 pmo_options->disable_drr_clamped = ((in_dc->debug.dml21_disable_pstate_method_mask >> 3) & 1) ||
64 disable_fams2;
65 pmo_options->disable_drr_var = ((in_dc->debug.dml21_disable_pstate_method_mask >> 4) & 1) ||
66 disable_fams2;
67 pmo_options->disable_fams2 = disable_fams2;
68
69 pmo_options->disable_drr_var_when_var_active = in_dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE ||
70 in_dc->debug.disable_fams_gaming == INGAME_FAMS_MULTI_DISP_CLAMPED_ONLY;
71 pmo_options->disable_drr_clamped_when_var_active = in_dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE;
72 }
73
dml21_init(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)74 static void dml21_init(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
75 {
76 switch (in_dc->ctx->dce_version) {
77 case DCN_VERSION_4_01:
78 case DCN_VERSION_3_2: // TODO : Temporary for N-1 validation. Remove this after N-1 validation phase is complete.
79 (*dml_ctx)->v21.dml_init.options.project_id = dml2_project_dcn4x_stage2_auto_drr_svp;
80 break;
81 default:
82 (*dml_ctx)->v21.dml_init.options.project_id = dml2_project_invalid;
83 }
84
85 (*dml_ctx)->architecture = dml2_architecture_21;
86
87 /* Store configuration options */
88 (*dml_ctx)->config = *config;
89
90 /*Initialize SOCBB and DCNIP params */
91 dml21_initialize_soc_bb_params(&(*dml_ctx)->v21.dml_init, config, in_dc);
92 dml21_initialize_ip_params(&(*dml_ctx)->v21.dml_init, config, in_dc);
93 dml21_apply_soc_bb_overrides(&(*dml_ctx)->v21.dml_init, config, in_dc);
94
95 /* apply debug overrides */
96 dml21_apply_debug_options(in_dc, *dml_ctx, config);
97
98 /*Initialize DML21 instance */
99 dml2_initialize_instance(&(*dml_ctx)->v21.dml_init);
100 }
101
dml21_create(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)102 bool dml21_create(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
103 {
104 /* Allocate memory for initializing DML21 instance */
105 if (!dml21_allocate_memory(dml_ctx))
106 return false;
107
108 dml21_init(in_dc, dml_ctx, config);
109
110 return true;
111 }
112
dml21_destroy(struct dml2_context * dml2)113 void dml21_destroy(struct dml2_context *dml2)
114 {
115 kfree(dml2->v21.dml_init.dml2_instance);
116 kfree(dml2->v21.mode_programming.programming);
117 }
118
dml21_calculate_rq_and_dlg_params(const struct dc * dc,struct dc_state * context,struct resource_context * out_new_hw_state,struct dml2_context * in_ctx,unsigned int pipe_cnt)119 static void dml21_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state,
120 struct dml2_context *in_ctx, unsigned int pipe_cnt)
121 {
122 unsigned int dml_prog_idx = 0, dc_pipe_index = 0, num_dpps_required = 0;
123 struct dml2_per_plane_programming *pln_prog = NULL;
124 struct dml2_per_stream_programming *stream_prog = NULL;
125 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
126 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
127 int num_pipes;
128
129 context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
130
131 /* copy global DCHUBBUB arbiter registers */
132 memcpy(&context->bw_ctx.bw.dcn.arb_regs, &in_ctx->v21.mode_programming.programming->global_regs.arb_regs, sizeof(struct dml2_display_arb_regs));
133
134 /* legacy only */
135 context->bw_ctx.bw.dcn.compbuf_size_kb = (int)in_ctx->v21.mode_programming.programming->global_regs.arb_regs.compbuf_size * 64;
136
137 context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0;
138 context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0;
139 context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0;
140
141 for (dml_prog_idx = 0; dml_prog_idx < DML2_MAX_PLANES; dml_prog_idx++) {
142 pln_prog = &in_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
143
144 if (!pln_prog->plane_descriptor)
145 continue;
146
147 stream_prog = &in_ctx->v21.mode_programming.programming->stream_programming[pln_prog->plane_descriptor->stream_index];
148 num_dpps_required = pln_prog->num_dpps_required;
149
150 if (num_dpps_required == 0) {
151 continue;
152 }
153 num_pipes = dml21_find_dc_pipes_for_plane(dc, context, in_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
154
155 if (num_pipes <= 0)
156 continue;
157
158 /* program each pipe */
159 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
160 dml21_program_dc_pipe(in_ctx, context, dc_main_pipes[dc_pipe_index], pln_prog, stream_prog);
161
162 if (pln_prog->phantom_plane.valid && dc_phantom_pipes[dc_pipe_index]) {
163 dml21_program_dc_pipe(in_ctx, context, dc_phantom_pipes[dc_pipe_index], pln_prog, stream_prog);
164 }
165 }
166 }
167
168 /* assign global clocks */
169 context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
170 context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
171 if (in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values > 1) {
172 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz =
173 in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.num_clk_values] * 1000;
174 } else {
175 context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[0] * 1000;
176 }
177
178 if (in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values > 1) {
179 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
180 in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.num_clk_values] * 1000;
181 } else {
182 context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[0] * 1000;
183 }
184
185 /* get global mall allocation */
186 if (dc->res_pool->funcs->calculate_mall_ways_from_bytes) {
187 context->bw_ctx.bw.dcn.clk.num_ways = dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes);
188 } else {
189 context->bw_ctx.bw.dcn.clk.num_ways = 0;
190 }
191 }
192
dml21_mode_check_and_programming(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)193 static bool dml21_mode_check_and_programming(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
194 {
195 bool result = false;
196 struct dml2_build_mode_programming_in_out *mode_programming = &dml_ctx->v21.mode_programming;
197
198 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
199 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
200 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params, 0, sizeof(struct dml2_core_mode_programming_in_out));
201
202 if (!context)
203 return true;
204
205 if (context->stream_count == 0) {
206 dml21_build_fams2_programming(in_dc, context, dml_ctx);
207 return true;
208 }
209
210 /* scrub phantom's from current dc_state */
211 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
212 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
213
214 /* Populate stream, plane mappings and other fields in display config. */
215 result = dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
216 if (!result)
217 return false;
218
219 result = dml2_build_mode_programming(mode_programming);
220 if (!result)
221 return false;
222
223 /* Check and map HW resources */
224 if (result && !dml_ctx->config.skip_hw_state_mapping) {
225 dml21_map_hw_resources(dml_ctx);
226 dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, in_dc->current_state);
227 /* if subvp phantoms are present, expand them into dc context */
228 dml21_handle_phantom_streams_planes(in_dc, context, dml_ctx);
229 }
230
231 /* Copy DML CLK, WM and REG outputs to bandwidth context */
232 if (result && !dml_ctx->config.skip_hw_state_mapping) {
233 dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pipe_count);
234 dml21_copy_clocks_to_dc_state(dml_ctx, context);
235 dml21_extract_watermark_sets(in_dc, &context->bw_ctx.bw.dcn.watermarks, dml_ctx);
236 if (in_dc->ctx->dce_version == DCN_VERSION_3_2) {
237 dml21_extract_legacy_watermark_set(in_dc, &context->bw_ctx.bw.dcn.watermarks.a, DML2_DCHUB_WATERMARK_SET_A, dml_ctx);
238 dml21_extract_legacy_watermark_set(in_dc, &context->bw_ctx.bw.dcn.watermarks.b, DML2_DCHUB_WATERMARK_SET_A, dml_ctx);
239 dml21_extract_legacy_watermark_set(in_dc, &context->bw_ctx.bw.dcn.watermarks.c, DML2_DCHUB_WATERMARK_SET_A, dml_ctx);
240 dml21_extract_legacy_watermark_set(in_dc, &context->bw_ctx.bw.dcn.watermarks.d, DML2_DCHUB_WATERMARK_SET_A, dml_ctx);
241 }
242
243 dml21_build_fams2_programming(in_dc, context, dml_ctx);
244 }
245
246 return true;
247 }
248
dml21_check_mode_support(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)249 static bool dml21_check_mode_support(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
250 {
251 bool is_supported = false;
252 struct dml2_initialize_instance_in_out *dml_init = &dml_ctx->v21.dml_init;
253 struct dml2_check_mode_supported_in_out *mode_support = &dml_ctx->v21.mode_support;
254
255 memset(&dml_ctx->v21.display_config, 0, sizeof(struct dml2_display_cfg));
256 memset(&dml_ctx->v21.dml_to_dc_pipe_mapping, 0, sizeof(struct dml2_dml_to_dc_pipe_mapping));
257 memset(&dml_ctx->v21.mode_programming.dml2_instance->scratch.check_mode_supported_locals.mode_support_params, 0, sizeof(struct dml2_core_mode_support_in_out));
258
259 if (!context || context->stream_count == 0)
260 return true;
261
262 /* Scrub phantom's from current dc_state */
263 dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
264 dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
265
266 mode_support->dml2_instance = dml_init->dml2_instance;
267 dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
268 dml_ctx->v21.mode_programming.dml2_instance->scratch.build_mode_programming_locals.mode_programming_params.programming = dml_ctx->v21.mode_programming.programming;
269 is_supported = dml2_check_mode_supported(mode_support);
270 if (!is_supported)
271 return false;
272
273 return true;
274 }
275
dml21_validate(const struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx,bool fast_validate)276 bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx, bool fast_validate)
277 {
278 bool out = false;
279
280 /* Use dml_validate_only for fast_validate path */
281 if (fast_validate) {
282 out = dml21_check_mode_support(in_dc, context, dml_ctx);
283 } else
284 out = dml21_mode_check_and_programming(in_dc, context, dml_ctx);
285 return out;
286 }
287
dml21_prepare_mcache_programming(struct dc * in_dc,struct dc_state * context,struct dml2_context * dml_ctx)288 void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
289 {
290 unsigned int dml_prog_idx, dml_phantom_prog_idx, dc_pipe_index;
291 int num_pipes;
292 struct pipe_ctx *dc_main_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__];
293 struct pipe_ctx *dc_phantom_pipes[__DML2_WRAPPER_MAX_STREAMS_PLANES__] = {0};
294
295 struct dml2_per_plane_programming *pln_prog = NULL;
296 struct dml2_plane_mcache_configuration_descriptor *mcache_config = NULL;
297 struct prepare_mcache_programming_locals *l = &dml_ctx->v21.scratch.prepare_mcache_locals;
298
299 if (context->stream_count == 0) {
300 return;
301 }
302
303 memset(&l->build_mcache_programming_params, 0, sizeof(struct dml2_build_mcache_programming_in_out));
304 l->build_mcache_programming_params.dml2_instance = dml_ctx->v21.dml_init.dml2_instance;
305
306 /* phantom's start after main planes */
307 dml_phantom_prog_idx = dml_ctx->v21.mode_programming.programming->display_config.num_planes;
308
309 /* Build mcache programming parameters per plane per pipe */
310 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
311 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
312
313 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_prog_idx];
314 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
315 mcache_config->plane_descriptor = pln_prog->plane_descriptor;
316 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx];
317 mcache_config->num_pipes = pln_prog->num_dpps_required;
318 l->build_mcache_programming_params.num_configurations++;
319
320 if (pln_prog->num_dpps_required == 0) {
321 continue;
322 }
323
324 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
325 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
326 dc_main_pipes[0]->plane_state == NULL)
327 continue;
328
329 /* get config for each pipe */
330 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
331 ASSERT(dc_main_pipes[dc_pipe_index]);
332 dml21_get_pipe_mcache_config(context, dc_main_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
333 }
334
335 /* get config for each phantom pipe */
336 if (pln_prog->phantom_plane.valid &&
337 dc_phantom_pipes[0] &&
338 dc_main_pipes[0]->stream &&
339 dc_phantom_pipes[0]->plane_state) {
340 mcache_config = &l->build_mcache_programming_params.mcache_configurations[dml_phantom_prog_idx];
341 memset(mcache_config, 0, sizeof(struct dml2_plane_mcache_configuration_descriptor));
342 mcache_config->plane_descriptor = pln_prog->plane_descriptor;
343 mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx];
344 mcache_config->num_pipes = pln_prog->num_dpps_required;
345 l->build_mcache_programming_params.num_configurations++;
346
347 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
348 ASSERT(dc_phantom_pipes[dc_pipe_index]);
349 dml21_get_pipe_mcache_config(context, dc_phantom_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
350 }
351
352 /* increment phantom index */
353 dml_phantom_prog_idx++;
354 }
355 }
356
357 /* Call to generate mcache programming per plane per pipe for the given display configuration */
358 dml2_build_mcache_programming(&l->build_mcache_programming_params);
359
360 /* get per plane per pipe mcache programming */
361 for (dml_prog_idx = 0; dml_prog_idx < dml_ctx->v21.mode_programming.programming->display_config.num_planes; dml_prog_idx++) {
362 pln_prog = &dml_ctx->v21.mode_programming.programming->plane_programming[dml_prog_idx];
363
364 num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
365 if (num_pipes <= 0 || dc_main_pipes[0]->stream == NULL ||
366 dc_main_pipes[0]->plane_state == NULL)
367 continue;
368
369 /* get config for each pipe */
370 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
371 ASSERT(dc_main_pipes[dc_pipe_index]);
372 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index]) {
373 memcpy(&dc_main_pipes[dc_pipe_index]->mcache_regs,
374 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_prog_idx][dc_pipe_index],
375 sizeof(struct dml2_hubp_pipe_mcache_regs));
376 }
377 }
378
379 /* get config for each phantom pipe */
380 if (pln_prog->phantom_plane.valid &&
381 dc_phantom_pipes[0] &&
382 dc_main_pipes[0]->stream &&
383 dc_phantom_pipes[0]->plane_state) {
384 for (dc_pipe_index = 0; dc_pipe_index < num_pipes; dc_pipe_index++) {
385 ASSERT(dc_phantom_pipes[dc_pipe_index]);
386 if (l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index]) {
387 memcpy(&dc_phantom_pipes[dc_pipe_index]->mcache_regs,
388 l->build_mcache_programming_params.per_plane_pipe_mcache_regs[dml_phantom_prog_idx][dc_pipe_index],
389 sizeof(struct dml2_hubp_pipe_mcache_regs));
390 }
391 }
392 /* increment phantom index */
393 dml_phantom_prog_idx++;
394 }
395 }
396 }
397
dml21_copy(struct dml2_context * dst_dml_ctx,struct dml2_context * src_dml_ctx)398 void dml21_copy(struct dml2_context *dst_dml_ctx,
399 struct dml2_context *src_dml_ctx)
400 {
401 /* Preserve references to internals */
402 struct dml2_instance *dst_dml2_instance = dst_dml_ctx->v21.dml_init.dml2_instance;
403 struct dml2_display_cfg_programming *dst_dml2_programming = dst_dml_ctx->v21.mode_programming.programming;
404
405 /* Copy context */
406 memcpy(dst_dml_ctx, src_dml_ctx, sizeof(struct dml2_context));
407
408 /* Copy Internals */
409 memcpy(dst_dml2_instance, src_dml_ctx->v21.dml_init.dml2_instance, sizeof(struct dml2_instance));
410 memcpy(dst_dml2_programming, src_dml_ctx->v21.mode_programming.programming, sizeof(struct dml2_display_cfg_programming));
411
412 /* Restore references to internals */
413 dst_dml_ctx->v21.dml_init.dml2_instance = dst_dml2_instance;
414
415 dst_dml_ctx->v21.mode_support.dml2_instance = dst_dml2_instance;
416 dst_dml_ctx->v21.mode_programming.dml2_instance = dst_dml2_instance;
417
418 dst_dml_ctx->v21.mode_support.display_config = &dst_dml_ctx->v21.display_config;
419 dst_dml_ctx->v21.mode_programming.display_config = dst_dml_ctx->v21.mode_support.display_config;
420
421 dst_dml_ctx->v21.mode_programming.programming = dst_dml2_programming;
422
423 /* need to initialize copied instance for internal references to be correct */
424 dml2_initialize_instance(&dst_dml_ctx->v21.dml_init);
425 }
426
dml21_create_copy(struct dml2_context ** dst_dml_ctx,struct dml2_context * src_dml_ctx)427 bool dml21_create_copy(struct dml2_context **dst_dml_ctx,
428 struct dml2_context *src_dml_ctx)
429 {
430 /* Allocate memory for initializing DML21 instance */
431 if (!dml21_allocate_memory(dst_dml_ctx))
432 return false;
433
434 dml21_copy(*dst_dml_ctx, src_dml_ctx);
435
436 return true;
437 }
438
dml21_reinit(const struct dc * in_dc,struct dml2_context ** dml_ctx,const struct dml2_configuration_options * config)439 void dml21_reinit(const struct dc *in_dc, struct dml2_context **dml_ctx, const struct dml2_configuration_options *config)
440 {
441 dml21_init(in_dc, dml_ctx, config);
442 }
443
444