1 /*
2  * Copyright (c) 2015-2022, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20  * DEALINGS IN THE SOFTWARE.
21  */
22 
23 #include "nvidia-drm-conftest.h" /* NV_DRM_ATOMIC_MODESET_AVAILABLE */
24 
25 #if defined(NV_DRM_ATOMIC_MODESET_AVAILABLE)
26 
27 #include "nvidia-drm-helper.h"
28 #include "nvidia-drm-priv.h"
29 #include "nvidia-drm-crtc.h"
30 #include "nvidia-drm-connector.h"
31 #include "nvidia-drm-encoder.h"
32 #include "nvidia-drm-utils.h"
33 #include "nvidia-drm-fb.h"
34 #include "nvidia-drm-ioctl.h"
35 #include "nvidia-drm-format.h"
36 
37 #include "nvmisc.h"
38 
39 #include <drm/drm_crtc_helper.h>
40 #include <drm/drm_plane_helper.h>
41 
42 #include <drm/drm_atomic.h>
43 #include <drm/drm_atomic_helper.h>
44 
45 #if defined(NV_LINUX_NVHOST_H_PRESENT) && defined(CONFIG_TEGRA_GRHOST)
46 #include <linux/nvhost.h>
47 #endif
48 
49 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
50 static int
51 nv_drm_atomic_replace_property_blob_from_id(struct drm_device *dev,
52                                             struct drm_property_blob **blob,
53                                             uint64_t blob_id,
54                                             ssize_t expected_size)
55 {
56     struct drm_property_blob *new_blob = NULL;
57 
58     if (blob_id != 0) {
59         new_blob = drm_property_lookup_blob(dev, blob_id);
60         if (new_blob == NULL) {
61             return -EINVAL;
62         }
63 
64         if ((expected_size > 0) &&
65             (new_blob->length != expected_size)) {
66             drm_property_blob_put(new_blob);
67             return -EINVAL;
68         }
69     }
70 
71     drm_property_replace_blob(blob, new_blob);
72     drm_property_blob_put(new_blob);
73 
74     return 0;
75 }
76 #endif
77 
78 static void nv_drm_plane_destroy(struct drm_plane *plane)
79 {
80     struct nv_drm_plane *nv_plane = to_nv_plane(plane);
81 
82     /* plane->state gets freed here */
83     drm_plane_cleanup(plane);
84 
85     nv_drm_free(nv_plane);
86 }
87 
88 static inline void
89 plane_req_config_disable(struct NvKmsKapiLayerRequestedConfig *req_config)
90 {
91     /* Clear layer config */
92     memset(&req_config->config, 0, sizeof(req_config->config));
93 
94     /* Set flags to get cleared layer config applied */
95     req_config->flags.surfaceChanged = NV_TRUE;
96     req_config->flags.srcXYChanged = NV_TRUE;
97     req_config->flags.srcWHChanged = NV_TRUE;
98     req_config->flags.dstXYChanged = NV_TRUE;
99     req_config->flags.dstWHChanged = NV_TRUE;
100 }
101 
102 static inline void
103 cursor_req_config_disable(struct NvKmsKapiCursorRequestedConfig *req_config)
104 {
105     req_config->surface = NULL;
106     req_config->flags.surfaceChanged = NV_TRUE;
107 }
108 
109 static void
110 cursor_plane_req_config_update(struct drm_plane *plane,
111                                struct drm_plane_state *plane_state,
112                                struct NvKmsKapiCursorRequestedConfig *req_config)
113 {
114     struct nv_drm_plane *nv_plane = to_nv_plane(plane);
115     struct NvKmsKapiCursorRequestedConfig old_config = *req_config;
116 
117     if (plane_state->fb == NULL) {
118         cursor_req_config_disable(req_config);
119         return;
120     }
121 
122     *req_config = (struct NvKmsKapiCursorRequestedConfig) {
123         .surface = to_nv_framebuffer(plane_state->fb)->pSurface,
124 
125         .dstX = plane_state->crtc_x,
126         .dstY = plane_state->crtc_y,
127     };
128 
129 #if defined(NV_DRM_ALPHA_BLENDING_AVAILABLE)
130     if (plane->blend_mode_property != NULL && plane->alpha_property != NULL) {
131 
132         switch (plane_state->pixel_blend_mode) {
133             case DRM_MODE_BLEND_PREMULTI:
134                 req_config->compParams.compMode =
135                     NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_SURFACE_ALPHA;
136                 break;
137             case DRM_MODE_BLEND_COVERAGE:
138                 req_config->compParams.compMode =
139                     NVKMS_COMPOSITION_BLENDING_MODE_NON_PREMULT_SURFACE_ALPHA;
140                 break;
141             default:
142                 /*
143                  * We should not hit this, because
144                  * plane_state->pixel_blend_mode should only have values
145                  * registered in
146                  * __nv_drm_plane_create_alpha_blending_properties().
147                  */
148                 WARN_ON("Unsupported blending mode");
149                 break;
150 
151         }
152 
153         req_config->compParams.surfaceAlpha =
154             plane_state->alpha >> 8;
155 
156     } else if (plane->blend_mode_property != NULL) {
157 
158         switch (plane_state->pixel_blend_mode) {
159             case DRM_MODE_BLEND_PREMULTI:
160                 req_config->compParams.compMode =
161                     NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_ALPHA;
162                 break;
163             case DRM_MODE_BLEND_COVERAGE:
164                 req_config->compParams.compMode =
165                     NVKMS_COMPOSITION_BLENDING_MODE_NON_PREMULT_ALPHA;
166                 break;
167             default:
168                 /*
169                  * We should not hit this, because
170                  * plane_state->pixel_blend_mode should only have values
171                  * registered in
172                  * __nv_drm_plane_create_alpha_blending_properties().
173                  */
174                 WARN_ON("Unsupported blending mode");
175                 break;
176 
177         }
178 
179     } else {
180         req_config->compParams.compMode =
181             nv_plane->defaultCompositionMode;
182     }
183 #else
184     req_config->compParams.compMode = nv_plane->defaultCompositionMode;
185 #endif
186 
187     /*
188      * Unconditionally mark the surface as changed, even if nothing changed,
189      * so that we always get a flip event: a DRM client may flip with
190      * the same surface and wait for a flip event.
191      */
192     req_config->flags.surfaceChanged = NV_TRUE;
193 
194     if (old_config.surface == NULL &&
195         old_config.surface != req_config->surface) {
196         req_config->flags.dstXYChanged = NV_TRUE;
197         return;
198     }
199 
200     req_config->flags.dstXYChanged =
201         old_config.dstX != req_config->dstX ||
202         old_config.dstY != req_config->dstY;
203 }
204 
205 static int
206 plane_req_config_update(struct drm_plane *plane,
207                         struct drm_plane_state *plane_state,
208                         struct NvKmsKapiLayerRequestedConfig *req_config)
209 {
210     struct nv_drm_plane *nv_plane = to_nv_plane(plane);
211     struct NvKmsKapiLayerConfig old_config = req_config->config;
212     struct nv_drm_device *nv_dev = to_nv_device(plane->dev);
213     struct nv_drm_plane_state *nv_drm_plane_state =
214         to_nv_drm_plane_state(plane_state);
215 
216     if (plane_state->fb == NULL) {
217         plane_req_config_disable(req_config);
218         return 0;
219     }
220 
221     *req_config = (struct NvKmsKapiLayerRequestedConfig) {
222         .config = {
223             .surface = to_nv_framebuffer(plane_state->fb)->pSurface,
224 
225             /* Source values are 16.16 fixed point */
226             .srcX = plane_state->src_x >> 16,
227             .srcY = plane_state->src_y >> 16,
228             .srcWidth  = plane_state->src_w >> 16,
229             .srcHeight = plane_state->src_h >> 16,
230 
231             .dstX = plane_state->crtc_x,
232             .dstY = plane_state->crtc_y,
233             .dstWidth  = plane_state->crtc_w,
234             .dstHeight = plane_state->crtc_h,
235         },
236     };
237 
238 #if defined(NV_DRM_ROTATION_AVAILABLE)
239     /*
240      * plane_state->rotation is only valid when plane->rotation_property
241      * is non-NULL.
242      */
243     if (plane->rotation_property != NULL) {
244         if (plane_state->rotation & DRM_MODE_REFLECT_X) {
245             req_config->config.rrParams.reflectionX = true;
246         }
247 
248         if (plane_state->rotation & DRM_MODE_REFLECT_Y) {
249             req_config->config.rrParams.reflectionY = true;
250         }
251 
252         switch (plane_state->rotation & DRM_MODE_ROTATE_MASK) {
253             case DRM_MODE_ROTATE_0:
254                 req_config->config.rrParams.rotation = NVKMS_ROTATION_0;
255                 break;
256             case DRM_MODE_ROTATE_90:
257                 req_config->config.rrParams.rotation = NVKMS_ROTATION_90;
258                 break;
259             case DRM_MODE_ROTATE_180:
260                 req_config->config.rrParams.rotation = NVKMS_ROTATION_180;
261                 break;
262             case DRM_MODE_ROTATE_270:
263                 req_config->config.rrParams.rotation = NVKMS_ROTATION_270;
264                 break;
265             default:
266                 /*
267                  * We should not hit this, because
268                  * plane_state->rotation should only have values
269                  * registered in
270                  * __nv_drm_plane_create_rotation_property().
271                  */
272                 WARN_ON("Unsupported rotation");
273                 break;
274         }
275     }
276 #endif
277 
278 #if defined(NV_DRM_ALPHA_BLENDING_AVAILABLE)
279     if (plane->blend_mode_property != NULL && plane->alpha_property != NULL) {
280 
281         switch (plane_state->pixel_blend_mode) {
282             case DRM_MODE_BLEND_PREMULTI:
283                 req_config->config.compParams.compMode =
284                     NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_SURFACE_ALPHA;
285                 break;
286             case DRM_MODE_BLEND_COVERAGE:
287                 req_config->config.compParams.compMode =
288                     NVKMS_COMPOSITION_BLENDING_MODE_NON_PREMULT_SURFACE_ALPHA;
289                 break;
290             default:
291                 /*
292                  * We should not hit this, because
293                  * plane_state->pixel_blend_mode should only have values
294                  * registered in
295                  * __nv_drm_plane_create_alpha_blending_properties().
296                  */
297                 WARN_ON("Unsupported blending mode");
298                 break;
299 
300         }
301 
302         req_config->config.compParams.surfaceAlpha =
303             plane_state->alpha >> 8;
304 
305     } else if (plane->blend_mode_property != NULL) {
306 
307         switch (plane_state->pixel_blend_mode) {
308             case DRM_MODE_BLEND_PREMULTI:
309                 req_config->config.compParams.compMode =
310                     NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_ALPHA;
311                 break;
312             case DRM_MODE_BLEND_COVERAGE:
313                 req_config->config.compParams.compMode =
314                     NVKMS_COMPOSITION_BLENDING_MODE_NON_PREMULT_ALPHA;
315                 break;
316             default:
317                 /*
318                  * We should not hit this, because
319                  * plane_state->pixel_blend_mode should only have values
320                  * registered in
321                  * __nv_drm_plane_create_alpha_blending_properties().
322                  */
323                 WARN_ON("Unsupported blending mode");
324                 break;
325 
326         }
327 
328     } else {
329         req_config->config.compParams.compMode =
330             nv_plane->defaultCompositionMode;
331     }
332 #else
333     req_config->config.compParams.compMode =
334         nv_plane->defaultCompositionMode;
335 #endif
336 
337     req_config->config.inputColorSpace =
338         nv_drm_plane_state->input_colorspace;
339 
340     req_config->config.syncptParams.preSyncptSpecified = false;
341     req_config->config.syncptParams.postSyncptRequested = false;
342 
343     if (plane_state->fence != NULL || nv_drm_plane_state->fd_user_ptr) {
344         if (!nv_dev->supportsSyncpts) {
345             return -1;
346         }
347 
348 #if defined(NV_LINUX_NVHOST_H_PRESENT) && defined(CONFIG_TEGRA_GRHOST)
349 #if defined(NV_NVHOST_DMA_FENCE_UNPACK_PRESENT)
350         if (plane_state->fence != NULL) {
351             int ret = nvhost_dma_fence_unpack(
352                           plane_state->fence,
353                           &req_config->config.syncptParams.preSyncptId,
354                           &req_config->config.syncptParams.preSyncptValue);
355             if (ret != 0) {
356                 return ret;
357             }
358             req_config->config.syncptParams.preSyncptSpecified = true;
359         }
360 #endif
361 
362         if (nv_drm_plane_state->fd_user_ptr) {
363             req_config->config.syncptParams.postSyncptRequested = true;
364         }
365 #else
366         return -1;
367 #endif
368     }
369 
370 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
371     if (nv_drm_plane_state->hdr_output_metadata != NULL) {
372         struct hdr_output_metadata *hdr_metadata =
373             nv_drm_plane_state->hdr_output_metadata->data;
374         struct hdr_metadata_infoframe *info_frame =
375             &hdr_metadata->hdmi_metadata_type1;
376         struct nv_drm_device *nv_dev = to_nv_device(plane->dev);
377         uint32_t i;
378 
379         if (hdr_metadata->metadata_type != HDMI_STATIC_METADATA_TYPE1) {
380             NV_DRM_DEV_LOG_ERR(nv_dev, "Unsupported Metadata Type");
381             return -1;
382         }
383 
384         for (i = 0; i < ARRAY_SIZE(info_frame->display_primaries); i ++) {
385             req_config->config.hdrMetadata.displayPrimaries[i].x =
386                 info_frame->display_primaries[i].x;
387             req_config->config.hdrMetadata.displayPrimaries[i].y =
388                 info_frame->display_primaries[i].y;
389         }
390 
391         req_config->config.hdrMetadata.whitePoint.x =
392             info_frame->white_point.x;
393         req_config->config.hdrMetadata.whitePoint.y =
394             info_frame->white_point.y;
395         req_config->config.hdrMetadata.maxDisplayMasteringLuminance =
396             info_frame->max_display_mastering_luminance;
397         req_config->config.hdrMetadata.minDisplayMasteringLuminance =
398             info_frame->min_display_mastering_luminance;
399         req_config->config.hdrMetadata.maxCLL =
400             info_frame->max_cll;
401         req_config->config.hdrMetadata.maxFALL =
402             info_frame->max_fall;
403 
404         req_config->config.hdrMetadataSpecified = true;
405 
406         switch (info_frame->eotf) {
407             case HDMI_EOTF_SMPTE_ST2084:
408                 req_config->config.tf = NVKMS_OUTPUT_TF_PQ;
409                 break;
410             case HDMI_EOTF_TRADITIONAL_GAMMA_SDR:
411                 req_config->config.tf =
412                     NVKMS_OUTPUT_TF_TRADITIONAL_GAMMA_SDR;
413                 break;
414             default:
415                 NV_DRM_DEV_LOG_ERR(nv_dev, "Unsupported EOTF");
416                 return -1;
417         }
418     } else {
419         req_config->config.hdrMetadataSpecified = false;
420         req_config->config.tf = NVKMS_OUTPUT_TF_NONE;
421     }
422 #endif
423 
424     /*
425      * Unconditionally mark the surface as changed, even if nothing changed,
426      * so that we always get a flip event: a DRM client may flip with
427      * the same surface and wait for a flip event.
428      */
429     req_config->flags.surfaceChanged = NV_TRUE;
430 
431     if (old_config.surface == NULL &&
432         old_config.surface != req_config->config.surface) {
433         req_config->flags.srcXYChanged = NV_TRUE;
434         req_config->flags.srcWHChanged = NV_TRUE;
435         req_config->flags.dstXYChanged = NV_TRUE;
436         req_config->flags.dstWHChanged = NV_TRUE;
437         return 0;
438     }
439 
440     req_config->flags.srcXYChanged =
441         old_config.srcX != req_config->config.srcX ||
442         old_config.srcY != req_config->config.srcY;
443 
444     req_config->flags.srcWHChanged =
445         old_config.srcWidth != req_config->config.srcWidth ||
446         old_config.srcHeight != req_config->config.srcHeight;
447 
448     req_config->flags.dstXYChanged =
449         old_config.dstX != req_config->config.dstX ||
450         old_config.dstY != req_config->config.dstY;
451 
452     req_config->flags.dstWHChanged =
453         old_config.dstWidth != req_config->config.dstWidth ||
454         old_config.dstHeight != req_config->config.dstHeight;
455 
456     return 0;
457 }
458 
459 static bool __is_async_flip_requested(const struct drm_plane *plane,
460                                       const struct drm_crtc_state *crtc_state)
461 {
462     if (plane->type == DRM_PLANE_TYPE_PRIMARY) {
463 #if defined(NV_DRM_CRTC_STATE_HAS_ASYNC_FLIP)
464         return crtc_state->async_flip;
465 #elif defined(NV_DRM_CRTC_STATE_HAS_PAGEFLIP_FLAGS)
466         return !!(crtc_state->pageflip_flags & DRM_MODE_PAGE_FLIP_ASYNC);
467 #endif
468     }
469 
470     return false;
471 }
472 
473 static int __nv_drm_cursor_atomic_check(struct drm_plane *plane,
474                                         struct drm_plane_state *plane_state)
475 {
476     struct nv_drm_plane *nv_plane = to_nv_plane(plane);
477     int i;
478     struct drm_crtc *crtc;
479     struct drm_crtc_state *crtc_state;
480 
481     WARN_ON(nv_plane->layer_idx != NVKMS_KAPI_LAYER_INVALID_IDX);
482 
483     nv_drm_for_each_crtc_in_state(plane_state->state, crtc, crtc_state, i) {
484         struct nv_drm_crtc_state *nv_crtc_state = to_nv_crtc_state(crtc_state);
485         struct NvKmsKapiHeadRequestedConfig *head_req_config =
486             &nv_crtc_state->req_config;
487         struct NvKmsKapiCursorRequestedConfig *cursor_req_config =
488             &head_req_config->cursorRequestedConfig;
489 
490         if (plane->state->crtc == crtc &&
491             plane->state->crtc != plane_state->crtc) {
492             cursor_req_config_disable(cursor_req_config);
493             continue;
494         }
495 
496         if (plane_state->crtc == crtc) {
497             cursor_plane_req_config_update(plane, plane_state,
498                                            cursor_req_config);
499         }
500     }
501 
502     return 0;
503 }
504 
505 #if defined(NV_DRM_PLANE_ATOMIC_CHECK_HAS_ATOMIC_STATE_ARG)
506 static int nv_drm_plane_atomic_check(struct drm_plane *plane,
507                                      struct drm_atomic_state *state)
508 #else
509 static int nv_drm_plane_atomic_check(struct drm_plane *plane,
510                                      struct drm_plane_state *plane_state)
511 #endif
512 {
513     struct nv_drm_plane *nv_plane = to_nv_plane(plane);
514 #if defined(NV_DRM_PLANE_ATOMIC_CHECK_HAS_ATOMIC_STATE_ARG)
515     struct drm_plane_state *plane_state =
516         drm_atomic_get_new_plane_state(state, plane);
517 #endif
518     int i;
519     struct drm_crtc *crtc;
520     struct drm_crtc_state *crtc_state;
521     int ret;
522 
523     if (plane->type == DRM_PLANE_TYPE_CURSOR) {
524         return __nv_drm_cursor_atomic_check(plane, plane_state);
525     }
526 
527     WARN_ON(nv_plane->layer_idx == NVKMS_KAPI_LAYER_INVALID_IDX);
528 
529     nv_drm_for_each_crtc_in_state(plane_state->state, crtc, crtc_state, i) {
530         struct nv_drm_crtc_state *nv_crtc_state = to_nv_crtc_state(crtc_state);
531         struct NvKmsKapiHeadRequestedConfig *head_req_config =
532             &nv_crtc_state->req_config;
533         struct NvKmsKapiLayerRequestedConfig *plane_requested_config =
534             &head_req_config->layerRequestedConfig[nv_plane->layer_idx];
535 
536         if (plane->state->crtc == crtc &&
537             plane->state->crtc != plane_state->crtc) {
538             plane_req_config_disable(plane_requested_config);
539             continue;
540         }
541 
542         if (plane_state->crtc == crtc) {
543             ret = plane_req_config_update(plane,
544                                           plane_state,
545                                           plane_requested_config);
546             if (ret != 0) {
547                 return ret;
548             }
549 
550             if (__is_async_flip_requested(plane, crtc_state)) {
551                 /*
552                  * Async flip requests that the flip happen 'as soon as
553                  * possible', meaning that it not delay waiting for vblank.
554                  * This may cause tearing on the screen.
555                  */
556                 plane_requested_config->config.minPresentInterval = 0;
557                 plane_requested_config->config.tearing = NV_TRUE;
558             } else {
559                 plane_requested_config->config.minPresentInterval = 1;
560                 plane_requested_config->config.tearing = NV_FALSE;
561             }
562         }
563     }
564 
565     return 0;
566 }
567 
568 #if defined(NV_DRM_UNIVERSAL_PLANE_INIT_HAS_FORMAT_MODIFIERS_ARG)
569 static bool nv_drm_plane_format_mod_supported(struct drm_plane *plane,
570                                               uint32_t format,
571                                               uint64_t modifier)
572 {
573     /* All supported modifiers are compatible with all supported formats */
574     return true;
575 }
576 #endif
577 
578 
579 static int nv_drm_plane_atomic_set_property(
580     struct drm_plane *plane,
581     struct drm_plane_state *state,
582     struct drm_property *property,
583     uint64_t val)
584 {
585     struct nv_drm_device *nv_dev = to_nv_device(plane->dev);
586     struct nv_drm_plane_state *nv_drm_plane_state =
587         to_nv_drm_plane_state(state);
588 
589     if (property == nv_dev->nv_out_fence_property) {
590 #if defined(NV_LINUX_NVHOST_H_PRESENT) && defined(CONFIG_TEGRA_GRHOST)
591         nv_drm_plane_state->fd_user_ptr = u64_to_user_ptr(val);
592 #endif
593         return 0;
594     } else if (property == nv_dev->nv_input_colorspace_property) {
595         nv_drm_plane_state->input_colorspace = val;
596         return 0;
597     }
598 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
599     else if (property == nv_dev->nv_hdr_output_metadata_property) {
600         return nv_drm_atomic_replace_property_blob_from_id(
601                 nv_dev->dev,
602                 &nv_drm_plane_state->hdr_output_metadata,
603                 val,
604                 sizeof(struct hdr_output_metadata));
605     }
606 #endif
607 
608     return -EINVAL;
609 }
610 
611 static int nv_drm_plane_atomic_get_property(
612     struct drm_plane *plane,
613     const struct drm_plane_state *state,
614     struct drm_property *property,
615     uint64_t *val)
616 {
617     struct nv_drm_device *nv_dev = to_nv_device(plane->dev);
618     const struct nv_drm_plane_state *nv_drm_plane_state =
619         to_nv_drm_plane_state_const(state);
620 
621     if (property == nv_dev->nv_out_fence_property) {
622         return 0;
623     } else if (property == nv_dev->nv_input_colorspace_property) {
624         *val = nv_drm_plane_state->input_colorspace;
625         return 0;
626     }
627 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
628     else if (property ==  nv_dev->nv_hdr_output_metadata_property) {
629         const struct nv_drm_plane_state *nv_drm_plane_state =
630             to_nv_drm_plane_state_const(state);
631         *val = nv_drm_plane_state->hdr_output_metadata ?
632             nv_drm_plane_state->hdr_output_metadata->base.id : 0;
633         return 0;
634     }
635 #endif
636 
637     return -EINVAL;
638 }
639 
640 static struct drm_plane_state *
641 nv_drm_plane_atomic_duplicate_state(struct drm_plane *plane)
642 {
643     struct nv_drm_plane_state *nv_old_plane_state =
644         to_nv_drm_plane_state(plane->state);
645     struct nv_drm_plane_state *nv_plane_state =
646         nv_drm_calloc(1, sizeof(*nv_plane_state));
647 
648     if (nv_plane_state == NULL) {
649         return NULL;
650     }
651 
652     __drm_atomic_helper_plane_duplicate_state(plane, &nv_plane_state->base);
653 
654     nv_plane_state->fd_user_ptr = nv_old_plane_state->fd_user_ptr;
655     nv_plane_state->input_colorspace = nv_old_plane_state->input_colorspace;
656 
657 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
658     nv_plane_state->hdr_output_metadata = nv_old_plane_state->hdr_output_metadata;
659     if (nv_plane_state->hdr_output_metadata) {
660         drm_property_blob_get(nv_plane_state->hdr_output_metadata);
661     }
662 #endif
663 
664     return &nv_plane_state->base;
665 }
666 
667 static inline void __nv_drm_plane_atomic_destroy_state(
668     struct drm_plane *plane,
669     struct drm_plane_state *state)
670 {
671 #if defined(NV_DRM_ATOMIC_HELPER_PLANE_DESTROY_STATE_HAS_PLANE_ARG)
672     __drm_atomic_helper_plane_destroy_state(plane, state);
673 #else
674     __drm_atomic_helper_plane_destroy_state(state);
675 #endif
676 
677 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
678     struct nv_drm_plane_state *nv_drm_plane_state =
679         to_nv_drm_plane_state(state);
680     drm_property_blob_put(nv_drm_plane_state->hdr_output_metadata);
681 #endif
682 }
683 
684 static void nv_drm_plane_atomic_destroy_state(
685     struct drm_plane *plane,
686     struct drm_plane_state *state)
687 {
688     __nv_drm_plane_atomic_destroy_state(plane, state);
689 
690     nv_drm_free(to_nv_drm_plane_state(state));
691 }
692 
693 static const struct drm_plane_funcs nv_plane_funcs = {
694     .update_plane           = drm_atomic_helper_update_plane,
695     .disable_plane          = drm_atomic_helper_disable_plane,
696     .destroy                = nv_drm_plane_destroy,
697     .reset                  = drm_atomic_helper_plane_reset,
698     .atomic_get_property    = nv_drm_plane_atomic_get_property,
699     .atomic_set_property    = nv_drm_plane_atomic_set_property,
700     .atomic_duplicate_state = nv_drm_plane_atomic_duplicate_state,
701     .atomic_destroy_state   = nv_drm_plane_atomic_destroy_state,
702 #if defined(NV_DRM_UNIVERSAL_PLANE_INIT_HAS_FORMAT_MODIFIERS_ARG)
703     .format_mod_supported   = nv_drm_plane_format_mod_supported,
704 #endif
705 };
706 
707 static const struct drm_plane_helper_funcs nv_plane_helper_funcs = {
708     .atomic_check   = nv_drm_plane_atomic_check,
709 };
710 
711 static void nv_drm_crtc_destroy(struct drm_crtc *crtc)
712 {
713     struct nv_drm_crtc *nv_crtc = to_nv_crtc(crtc);
714 
715     drm_crtc_cleanup(crtc);
716 
717     nv_drm_free(nv_crtc);
718 }
719 
720 static inline void
721 __nv_drm_atomic_helper_crtc_destroy_state(struct drm_crtc *crtc,
722                                           struct drm_crtc_state *crtc_state)
723 {
724 #if defined(NV_DRM_ATOMIC_HELPER_CRTC_DESTROY_STATE_HAS_CRTC_ARG)
725     __drm_atomic_helper_crtc_destroy_state(crtc, crtc_state);
726 #else
727     __drm_atomic_helper_crtc_destroy_state(crtc_state);
728 #endif
729 }
730 
731 static inline void nv_drm_crtc_duplicate_req_head_modeset_config(
732     const struct NvKmsKapiHeadRequestedConfig *old,
733     struct NvKmsKapiHeadRequestedConfig *new)
734 {
735     uint32_t i;
736 
737     /*
738      * Do not duplicate fields like 'modeChanged' flags expressing delta changed
739      * in new configuration with respect to previous/old configuration because
740      * there is no change in new configuration yet with respect
741      * to older one!
742      */
743     *new = (struct NvKmsKapiHeadRequestedConfig) {
744         .modeSetConfig = old->modeSetConfig,
745     };
746 
747     for (i = 0; i < ARRAY_SIZE(old->layerRequestedConfig); i++) {
748         new->layerRequestedConfig[i] = (struct NvKmsKapiLayerRequestedConfig) {
749             .config = old->layerRequestedConfig[i].config,
750         };
751     }
752 }
753 
754 /**
755  * nv_drm_atomic_crtc_duplicate_state - crtc state duplicate hook
756  * @crtc: DRM crtc
757  *
758  * Allocate and accosiate flip state with DRM crtc state, this flip state will
759  * be getting consumed at the time of atomic update commit to hardware by
760  * nv_drm_atomic_helper_commit_tail().
761  */
762 static struct drm_crtc_state*
763 nv_drm_atomic_crtc_duplicate_state(struct drm_crtc *crtc)
764 {
765     struct nv_drm_crtc_state *nv_state = nv_drm_calloc(1, sizeof(*nv_state));
766 
767     if (nv_state == NULL) {
768         return NULL;
769     }
770 
771     if ((nv_state->nv_flip =
772             nv_drm_calloc(1, sizeof(*(nv_state->nv_flip)))) == NULL) {
773         nv_drm_free(nv_state);
774         return NULL;
775     }
776 
777     __drm_atomic_helper_crtc_duplicate_state(crtc, &nv_state->base);
778 
779     INIT_LIST_HEAD(&nv_state->nv_flip->list_entry);
780     INIT_LIST_HEAD(&nv_state->nv_flip->deferred_flip_list);
781 
782     nv_drm_crtc_duplicate_req_head_modeset_config(
783         &(to_nv_crtc_state(crtc->state)->req_config),
784         &nv_state->req_config);
785 
786     return &nv_state->base;
787 }
788 
789 /**
790  * nv_drm_atomic_crtc_destroy_state - crtc state destroy hook
791  * @crtc: DRM crtc
792  * @state: DRM crtc state object to destroy
793  *
794  * Destroy flip state associated with the given crtc state if it haven't get
795  * consumed because failure of atomic commit.
796  */
797 static void nv_drm_atomic_crtc_destroy_state(struct drm_crtc *crtc,
798                                              struct drm_crtc_state *state)
799 {
800     struct nv_drm_crtc_state *nv_state = to_nv_crtc_state(state);
801 
802     if (nv_state->nv_flip != NULL) {
803         nv_drm_free(nv_state->nv_flip);
804         nv_state->nv_flip = NULL;
805     }
806 
807     __nv_drm_atomic_helper_crtc_destroy_state(crtc, &nv_state->base);
808 
809     nv_drm_free(nv_state);
810 }
811 
812 static struct drm_crtc_funcs nv_crtc_funcs = {
813     .set_config             = drm_atomic_helper_set_config,
814     .page_flip              = drm_atomic_helper_page_flip,
815     .reset                  = drm_atomic_helper_crtc_reset,
816     .destroy                = nv_drm_crtc_destroy,
817     .atomic_duplicate_state = nv_drm_atomic_crtc_duplicate_state,
818     .atomic_destroy_state   = nv_drm_atomic_crtc_destroy_state,
819 };
820 
821 /*
822  * In kernel versions before the addition of
823  * drm_crtc_state::connectors_changed, connector changes were
824  * reflected in drm_crtc_state::mode_changed.
825  */
826 static inline bool
827 nv_drm_crtc_state_connectors_changed(struct drm_crtc_state *crtc_state)
828 {
829 #if defined(NV_DRM_CRTC_STATE_HAS_CONNECTORS_CHANGED)
830     return crtc_state->connectors_changed;
831 #else
832     return crtc_state->mode_changed;
833 #endif
834 }
835 
836 static int head_modeset_config_attach_connector(
837     struct nv_drm_connector *nv_connector,
838     struct NvKmsKapiHeadModeSetConfig *head_modeset_config)
839 {
840     struct nv_drm_encoder *nv_encoder = nv_connector->nv_detected_encoder;
841 
842     if (NV_DRM_WARN(nv_encoder == NULL ||
843                     head_modeset_config->numDisplays >=
844                         ARRAY_SIZE(head_modeset_config->displays))) {
845         return -EINVAL;
846     }
847     head_modeset_config->displays[head_modeset_config->numDisplays++] =
848         nv_encoder->hDisplay;
849     return 0;
850 }
851 
852 /**
853  * nv_drm_crtc_atomic_check() can fail after it has modified
854  * the 'nv_drm_crtc_state::req_config', that is fine because 'nv_drm_crtc_state'
855  * will be discarded if ->atomic_check() fails.
856  */
857 #if defined(NV_DRM_CRTC_ATOMIC_CHECK_HAS_ATOMIC_STATE_ARG)
858 static int nv_drm_crtc_atomic_check(struct drm_crtc *crtc,
859                                     struct drm_atomic_state *state)
860 #else
861 static int nv_drm_crtc_atomic_check(struct drm_crtc *crtc,
862                                     struct drm_crtc_state *crtc_state)
863 #endif
864 {
865 #if defined(NV_DRM_CRTC_ATOMIC_CHECK_HAS_ATOMIC_STATE_ARG)
866     struct drm_crtc_state *crtc_state =
867         drm_atomic_get_new_crtc_state(state, crtc);
868 #endif
869     struct nv_drm_crtc_state *nv_crtc_state = to_nv_crtc_state(crtc_state);
870     struct NvKmsKapiHeadRequestedConfig *req_config =
871         &nv_crtc_state->req_config;
872     int ret = 0;
873 
874     if (crtc_state->mode_changed) {
875         drm_mode_to_nvkms_display_mode(&crtc_state->mode,
876                                        &req_config->modeSetConfig.mode);
877         req_config->flags.modeChanged = NV_TRUE;
878     }
879 
880     if (nv_drm_crtc_state_connectors_changed(crtc_state)) {
881         struct NvKmsKapiHeadModeSetConfig *config = &req_config->modeSetConfig;
882         struct drm_connector *connector;
883         struct drm_connector_state *connector_state;
884         int j;
885 
886         config->numDisplays = 0;
887 
888         memset(config->displays, 0, sizeof(config->displays));
889 
890         req_config->flags.displaysChanged = NV_TRUE;
891 
892         nv_drm_for_each_connector_in_state(crtc_state->state,
893                                            connector, connector_state, j) {
894             if (connector_state->crtc != crtc) {
895                 continue;
896             }
897 
898             if ((ret = head_modeset_config_attach_connector(
899                             to_nv_connector(connector),
900                             config)) != 0) {
901                 return ret;
902             }
903         }
904     }
905 
906     if (crtc_state->active_changed) {
907         req_config->modeSetConfig.bActive = crtc_state->active;
908         req_config->flags.activeChanged = NV_TRUE;
909     }
910 
911     return ret;
912 }
913 
914 static bool
915 nv_drm_crtc_mode_fixup(struct drm_crtc *crtc,
916                        const struct drm_display_mode *mode,
917                        struct drm_display_mode *adjusted_mode)
918 {
919     return true;
920 }
921 
922 static const struct drm_crtc_helper_funcs nv_crtc_helper_funcs = {
923     .atomic_check = nv_drm_crtc_atomic_check,
924     .mode_fixup = nv_drm_crtc_mode_fixup,
925 };
926 
927 static void nv_drm_plane_install_properties(
928     struct drm_plane *plane,
929     NvBool supportsHDR)
930 {
931     struct nv_drm_device *nv_dev = to_nv_device(plane->dev);
932 
933     if (nv_dev->nv_out_fence_property) {
934         drm_object_attach_property(
935             &plane->base, nv_dev->nv_out_fence_property, 0);
936     }
937 
938     if (nv_dev->nv_input_colorspace_property) {
939         drm_object_attach_property(
940             &plane->base, nv_dev->nv_input_colorspace_property,
941             NVKMS_INPUT_COLORSPACE_NONE);
942     }
943 
944 #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
945     if (supportsHDR && nv_dev->nv_hdr_output_metadata_property) {
946         drm_object_attach_property(
947             &plane->base, nv_dev->nv_hdr_output_metadata_property, 0);
948     }
949 #endif
950 }
951 
952 static void
953 __nv_drm_plane_create_alpha_blending_properties(struct drm_plane *plane,
954                                                  NvU32 validCompModes)
955 {
956 #if defined(NV_DRM_ALPHA_BLENDING_AVAILABLE)
957     if ((validCompModes &
958          NVBIT(NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_SURFACE_ALPHA)) != 0x0 &&
959         (validCompModes &
960          NVBIT(NVKMS_COMPOSITION_BLENDING_MODE_NON_PREMULT_SURFACE_ALPHA)) != 0x0) {
961 
962         drm_plane_create_alpha_property(plane);
963         drm_plane_create_blend_mode_property(plane,
964                                              NVBIT(DRM_MODE_BLEND_PREMULTI) |
965                                              NVBIT(DRM_MODE_BLEND_COVERAGE));
966     } else if ((validCompModes &
967                 NVBIT(NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_ALPHA)) != 0x0 &&
968                (validCompModes &
969                 NVBIT(NVKMS_COMPOSITION_BLENDING_MODE_NON_PREMULT_ALPHA)) != 0x0) {
970 
971         drm_plane_create_blend_mode_property(plane,
972                                              NVBIT(DRM_MODE_BLEND_PREMULTI) |
973                                              NVBIT(DRM_MODE_BLEND_COVERAGE));
974    }
975 #endif
976 }
977 
978 static void
979 __nv_drm_plane_create_rotation_property(struct drm_plane *plane,
980                                         NvU16 validLayerRRTransforms)
981 {
982 #if defined(NV_DRM_ROTATION_AVAILABLE)
983     enum NvKmsRotation curRotation;
984     NvU32 supported_rotations = 0;
985     struct NvKmsRRParams rrParams = {
986         .rotation = NVKMS_ROTATION_0,
987         .reflectionX = true,
988         .reflectionY = true,
989     };
990 
991     if ((NVBIT(NvKmsRRParamsToCapBit(&rrParams)) &
992         validLayerRRTransforms) != 0) {
993         supported_rotations |= DRM_MODE_REFLECT_X;
994         supported_rotations |= DRM_MODE_REFLECT_Y;
995     }
996 
997     rrParams.reflectionX = false;
998     rrParams.reflectionY = false;
999 
1000     for (curRotation = NVKMS_ROTATION_MIN;
1001          curRotation <= NVKMS_ROTATION_MAX; curRotation++) {
1002         rrParams.rotation = curRotation;
1003         if ((NVBIT(NvKmsRRParamsToCapBit(&rrParams)) &
1004                     validLayerRRTransforms) == 0) {
1005             continue;
1006         }
1007 
1008         switch (curRotation) {
1009             case NVKMS_ROTATION_0:
1010                 supported_rotations |= DRM_MODE_ROTATE_0;
1011                 break;
1012             case NVKMS_ROTATION_90:
1013                 supported_rotations |= DRM_MODE_ROTATE_90;
1014                 break;
1015             case NVKMS_ROTATION_180:
1016                 supported_rotations |= DRM_MODE_ROTATE_180;
1017                 break;
1018             case NVKMS_ROTATION_270:
1019                 supported_rotations |= DRM_MODE_ROTATE_270;
1020                 break;
1021             default:
1022                 break;
1023         }
1024 
1025     }
1026 
1027     if (supported_rotations != 0) {
1028         drm_plane_create_rotation_property(plane, DRM_MODE_ROTATE_0,
1029                                            supported_rotations);
1030     }
1031 #endif
1032 }
1033 
1034 static struct drm_plane*
1035 nv_drm_plane_create(struct drm_device *dev,
1036                     enum drm_plane_type plane_type,
1037                     uint32_t layer_idx,
1038                     NvU32 head,
1039                     const struct NvKmsKapiDeviceResourcesInfo *pResInfo)
1040 {
1041 #if defined(NV_DRM_UNIVERSAL_PLANE_INIT_HAS_FORMAT_MODIFIERS_ARG)
1042     struct nv_drm_device *nv_dev = to_nv_device(dev);
1043     const NvU64 linear_modifiers[] = {
1044         DRM_FORMAT_MOD_LINEAR,
1045         DRM_FORMAT_MOD_INVALID,
1046     };
1047 #endif
1048     enum NvKmsCompositionBlendingMode defaultCompositionMode;
1049     struct nv_drm_plane *nv_plane = NULL;
1050     struct nv_drm_plane_state *nv_plane_state = NULL;
1051     struct drm_plane *plane = NULL;
1052     int ret = -ENOMEM;
1053     uint32_t *formats = NULL;
1054     unsigned int formats_count = 0;
1055     const NvU32 validCompositionModes =
1056         (plane_type == DRM_PLANE_TYPE_CURSOR) ?
1057             pResInfo->caps.validCursorCompositionModes :
1058             pResInfo->caps.layer[layer_idx].validCompositionModes;
1059     const long unsigned int nvkms_formats_mask =
1060         (plane_type == DRM_PLANE_TYPE_CURSOR) ?
1061             pResInfo->caps.supportedCursorSurfaceMemoryFormats :
1062             pResInfo->supportedSurfaceMemoryFormats[layer_idx];
1063     const NvU16 validLayerRRTransforms =
1064         (plane_type == DRM_PLANE_TYPE_CURSOR) ?
1065             0x0 : pResInfo->caps.layer[layer_idx].validRRTransforms;
1066 
1067     if ((validCompositionModes &
1068          NVBIT(NVKMS_COMPOSITION_BLENDING_MODE_OPAQUE)) != 0x0) {
1069         defaultCompositionMode = NVKMS_COMPOSITION_BLENDING_MODE_OPAQUE;
1070     } else if ((validCompositionModes &
1071                 NVBIT(NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_ALPHA)) != 0x0) {
1072         defaultCompositionMode = NVKMS_COMPOSITION_BLENDING_MODE_PREMULT_ALPHA;
1073     } else {
1074         goto failed;
1075     }
1076 
1077     formats =
1078         nv_drm_format_array_alloc(&formats_count,
1079                                   nvkms_formats_mask);
1080     if (formats == NULL) {
1081         goto failed;
1082     }
1083 
1084     if ((nv_plane = nv_drm_calloc(1, sizeof(*nv_plane))) == NULL) {
1085         goto failed_plane_alloc;
1086     }
1087     plane = &nv_plane->base;
1088 
1089     nv_plane->defaultCompositionMode = defaultCompositionMode;
1090     nv_plane->layer_idx = layer_idx;
1091 
1092     if ((nv_plane_state =
1093             nv_drm_calloc(1, sizeof(*nv_plane_state))) == NULL) {
1094         goto failed_state_alloc;
1095     }
1096 
1097     plane->state = &nv_plane_state->base;
1098     plane->state->plane = plane;
1099 
1100     /*
1101      * Possible_crtcs for primary and cursor plane is zero because
1102      * drm_crtc_init_with_planes() will assign the plane's possible_crtcs
1103      * after the crtc is successfully initialized.
1104      */
1105     ret = drm_universal_plane_init(
1106         dev,
1107         plane,
1108         (plane_type == DRM_PLANE_TYPE_OVERLAY) ?
1109         (1 << head) : 0,
1110         &nv_plane_funcs,
1111         formats, formats_count,
1112 #if defined(NV_DRM_UNIVERSAL_PLANE_INIT_HAS_FORMAT_MODIFIERS_ARG)
1113         (plane_type == DRM_PLANE_TYPE_CURSOR) ?
1114         linear_modifiers : nv_dev->modifiers,
1115 #endif
1116         plane_type
1117 #if defined(NV_DRM_UNIVERSAL_PLANE_INIT_HAS_NAME_ARG)
1118         , NULL
1119 #endif
1120         );
1121 
1122     if (ret != 0) {
1123         goto failed_plane_init;
1124     }
1125 
1126     drm_plane_helper_add(plane, &nv_plane_helper_funcs);
1127 
1128     if (plane_type != DRM_PLANE_TYPE_CURSOR) {
1129         nv_drm_plane_install_properties(
1130                 plane,
1131                 pResInfo->supportsHDR[layer_idx]);
1132     }
1133 
1134     __nv_drm_plane_create_alpha_blending_properties(
1135             plane,
1136             validCompositionModes);
1137 
1138     __nv_drm_plane_create_rotation_property(
1139             plane,
1140             validLayerRRTransforms);
1141 
1142     return plane;
1143 
1144 failed_plane_init:
1145     nv_drm_free(nv_plane_state);
1146 
1147 failed_state_alloc:
1148     nv_drm_free(nv_plane);
1149 
1150 failed_plane_alloc:
1151     nv_drm_free(formats);
1152 
1153 failed:
1154     return ERR_PTR(ret);
1155 }
1156 
1157 /*
1158  * Add drm crtc for given head and supported enum NvKmsSurfaceMemoryFormats.
1159  */
1160 static struct drm_crtc *__nv_drm_crtc_create(struct nv_drm_device *nv_dev,
1161                                              struct drm_plane *primary_plane,
1162                                              struct drm_plane *cursor_plane,
1163                                              unsigned int head)
1164 {
1165     struct nv_drm_crtc *nv_crtc = NULL;
1166     struct nv_drm_crtc_state *nv_state = NULL;
1167     int ret = -ENOMEM;
1168 
1169     if ((nv_crtc = nv_drm_calloc(1, sizeof(*nv_crtc))) == NULL) {
1170         goto failed;
1171     }
1172 
1173     nv_state = nv_drm_calloc(1, sizeof(*nv_state));
1174     if (nv_state == NULL) {
1175         goto failed_state_alloc;
1176     }
1177 
1178     nv_crtc->base.state = &nv_state->base;
1179     nv_crtc->base.state->crtc = &nv_crtc->base;
1180 
1181     nv_crtc->head = head;
1182     INIT_LIST_HEAD(&nv_crtc->flip_list);
1183     spin_lock_init(&nv_crtc->flip_list_lock);
1184 
1185     ret = drm_crtc_init_with_planes(nv_dev->dev,
1186                                     &nv_crtc->base,
1187                                     primary_plane, cursor_plane,
1188                                     &nv_crtc_funcs
1189 #if defined(NV_DRM_CRTC_INIT_WITH_PLANES_HAS_NAME_ARG)
1190                                     , NULL
1191 #endif
1192                                     );
1193 
1194     if (ret != 0) {
1195         NV_DRM_DEV_LOG_ERR(
1196             nv_dev,
1197             "Failed to init crtc for head %u with planes", head);
1198         goto failed_init_crtc;
1199     }
1200 
1201     /* Add crtc to drm sub-system */
1202 
1203     drm_crtc_helper_add(&nv_crtc->base, &nv_crtc_helper_funcs);
1204 
1205     return &nv_crtc->base;
1206 
1207 failed_init_crtc:
1208     nv_drm_free(nv_state);
1209 
1210 failed_state_alloc:
1211     nv_drm_free(nv_crtc);
1212 
1213 failed:
1214     return ERR_PTR(ret);
1215 }
1216 
1217 void nv_drm_enumerate_crtcs_and_planes(
1218     struct nv_drm_device *nv_dev,
1219     const struct NvKmsKapiDeviceResourcesInfo *pResInfo)
1220 {
1221     unsigned int i;
1222 
1223     for (i = 0; i < pResInfo->numHeads; i++) {
1224         struct drm_plane *primary_plane = NULL, *cursor_plane = NULL;
1225         NvU32 layer;
1226 
1227         if (pResInfo->numLayers[i] <= NVKMS_KAPI_LAYER_PRIMARY_IDX) {
1228             continue;
1229         }
1230 
1231         primary_plane =
1232             nv_drm_plane_create(nv_dev->dev,
1233                                 DRM_PLANE_TYPE_PRIMARY,
1234                                 NVKMS_KAPI_LAYER_PRIMARY_IDX,
1235                                 i,
1236                                 pResInfo);
1237 
1238         if (IS_ERR(primary_plane)) {
1239             NV_DRM_DEV_LOG_ERR(
1240                 nv_dev,
1241                 "Failed to create primary plane for head %u, error = %ld",
1242                 i, PTR_ERR(primary_plane));
1243             continue;
1244         }
1245 
1246         cursor_plane =
1247             nv_drm_plane_create(nv_dev->dev,
1248                                 DRM_PLANE_TYPE_CURSOR,
1249                                 NVKMS_KAPI_LAYER_INVALID_IDX,
1250                                 i,
1251                                 pResInfo);
1252         if (IS_ERR(cursor_plane)) {
1253             NV_DRM_DEV_LOG_ERR(
1254                 nv_dev,
1255                 "Failed to create cursor plane for head %u, error = %ld",
1256                 i, PTR_ERR(cursor_plane));
1257             cursor_plane = NULL;
1258         }
1259 
1260         /* Create crtc with the primary and cursor planes */
1261         {
1262             struct drm_crtc *crtc =
1263                 __nv_drm_crtc_create(nv_dev,
1264                                      primary_plane, cursor_plane,
1265                                      i);
1266             if (IS_ERR(crtc)) {
1267                 nv_drm_plane_destroy(primary_plane);
1268 
1269                 if (cursor_plane != NULL) {
1270                     nv_drm_plane_destroy(cursor_plane);
1271                 }
1272 
1273                 NV_DRM_DEV_LOG_ERR(
1274                     nv_dev,
1275                     "Failed to add DRM CRTC for head %u, error = %ld",
1276                     i, PTR_ERR(crtc));
1277                 continue;
1278             }
1279         }
1280 
1281         for (layer = 0; layer < pResInfo->numLayers[i]; layer++) {
1282             struct drm_plane *overlay_plane = NULL;
1283 
1284             if (layer == NVKMS_KAPI_LAYER_PRIMARY_IDX) {
1285                 continue;
1286             }
1287 
1288             overlay_plane =
1289                 nv_drm_plane_create(nv_dev->dev,
1290                                     DRM_PLANE_TYPE_OVERLAY,
1291                                     layer,
1292                                     i,
1293                                     pResInfo);
1294 
1295             if (IS_ERR(overlay_plane)) {
1296                 NV_DRM_DEV_LOG_ERR(
1297                     nv_dev,
1298                     "Failed to create plane for layer-%u of head %u, error = %ld",
1299                     layer, i, PTR_ERR(overlay_plane));
1300             }
1301         }
1302 
1303     }
1304 }
1305 /*
1306  * Helper function to convert NvKmsKapiCrcs to drm_nvidia_crtc_crc32_out.
1307  */
1308 static void NvKmsKapiCrcsToDrm(const struct NvKmsKapiCrcs *crcs,
1309                                struct drm_nvidia_crtc_crc32_v2_out *drmCrcs)
1310 {
1311     drmCrcs->outputCrc32.value = crcs->outputCrc32.value;
1312     drmCrcs->outputCrc32.supported = crcs->outputCrc32.supported;
1313     drmCrcs->rasterGeneratorCrc32.value = crcs->rasterGeneratorCrc32.value;
1314     drmCrcs->rasterGeneratorCrc32.supported = crcs->rasterGeneratorCrc32.supported;
1315     drmCrcs->compositorCrc32.value = crcs->compositorCrc32.value;
1316     drmCrcs->compositorCrc32.supported = crcs->compositorCrc32.supported;
1317 }
1318 
1319 int nv_drm_get_crtc_crc32_v2_ioctl(struct drm_device *dev,
1320                                    void *data, struct drm_file *filep)
1321 {
1322     struct drm_nvidia_get_crtc_crc32_v2_params *params = data;
1323     struct nv_drm_device *nv_dev = to_nv_device(dev);
1324     struct drm_crtc *crtc = NULL;
1325     struct nv_drm_crtc *nv_crtc = NULL;
1326     struct NvKmsKapiCrcs crc32;
1327 
1328     if (!drm_core_check_feature(dev, DRIVER_MODESET)) {
1329         return -ENOENT;
1330     }
1331 
1332     crtc = nv_drm_crtc_find(dev, params->crtc_id);
1333     if (!crtc) {
1334         return -ENOENT;
1335     }
1336 
1337     nv_crtc = to_nv_crtc(crtc);
1338 
1339     if (!nvKms->getCRC32(nv_dev->pDevice, nv_crtc->head, &crc32)) {
1340         return -ENODEV;
1341     }
1342     NvKmsKapiCrcsToDrm(&crc32, &params->crc32);
1343 
1344     return 0;
1345 }
1346 
1347 int nv_drm_get_crtc_crc32_ioctl(struct drm_device *dev,
1348                                 void *data, struct drm_file *filep)
1349 {
1350     struct drm_nvidia_get_crtc_crc32_params *params = data;
1351     struct nv_drm_device *nv_dev = to_nv_device(dev);
1352     struct drm_crtc *crtc = NULL;
1353     struct nv_drm_crtc *nv_crtc = NULL;
1354     struct NvKmsKapiCrcs crc32;
1355 
1356     if (!drm_core_check_feature(dev, DRIVER_MODESET)) {
1357         return -ENOENT;
1358     }
1359 
1360     crtc = nv_drm_crtc_find(dev, params->crtc_id);
1361     if (!crtc) {
1362         return -ENOENT;
1363     }
1364 
1365     nv_crtc = to_nv_crtc(crtc);
1366 
1367     if (!nvKms->getCRC32(nv_dev->pDevice, nv_crtc->head, &crc32)) {
1368         return -ENODEV;
1369     }
1370     params->crc32 = crc32.outputCrc32.value;
1371 
1372     return 0;
1373 }
1374 
1375 #endif
1376