11739a20eSAndy Ritger /*
2758b4ee8SAndy Ritger  * Copyright (c) 2016-2022, NVIDIA CORPORATION. All rights reserved.
31739a20eSAndy Ritger  *
41739a20eSAndy Ritger  * Permission is hereby granted, free of charge, to any person obtaining a
51739a20eSAndy Ritger  * copy of this software and associated documentation files (the "Software"),
61739a20eSAndy Ritger  * to deal in the Software without restriction, including without limitation
71739a20eSAndy Ritger  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
81739a20eSAndy Ritger  * and/or sell copies of the Software, and to permit persons to whom the
91739a20eSAndy Ritger  * Software is furnished to do so, subject to the following conditions:
101739a20eSAndy Ritger  *
111739a20eSAndy Ritger  * The above copyright notice and this permission notice shall be included in
121739a20eSAndy Ritger  * all copies or substantial portions of the Software.
131739a20eSAndy Ritger  *
141739a20eSAndy Ritger  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
151739a20eSAndy Ritger  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
161739a20eSAndy Ritger  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
171739a20eSAndy Ritger  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
181739a20eSAndy Ritger  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
191739a20eSAndy Ritger  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
201739a20eSAndy Ritger  * DEALINGS IN THE SOFTWARE.
211739a20eSAndy Ritger  */
221739a20eSAndy Ritger 
231739a20eSAndy Ritger #ifndef __NVIDIA_DRM_CRTC_H__
241739a20eSAndy Ritger #define __NVIDIA_DRM_CRTC_H__
251739a20eSAndy Ritger 
261739a20eSAndy Ritger #include "nvidia-drm-conftest.h"
271739a20eSAndy Ritger 
281739a20eSAndy Ritger #if defined(NV_DRM_ATOMIC_MODESET_AVAILABLE)
291739a20eSAndy Ritger 
301739a20eSAndy Ritger #include "nvidia-drm-helper.h"
311739a20eSAndy Ritger 
321739a20eSAndy Ritger #if defined(NV_DRM_DRMP_H_PRESENT)
331739a20eSAndy Ritger #include <drm/drmP.h>
341739a20eSAndy Ritger #endif
351739a20eSAndy Ritger 
361739a20eSAndy Ritger #include <drm/drm_crtc.h>
371739a20eSAndy Ritger 
381739a20eSAndy Ritger #include "nvtypes.h"
391739a20eSAndy Ritger #include "nvkms-kapi.h"
401739a20eSAndy Ritger 
411739a20eSAndy Ritger struct nv_drm_crtc {
421739a20eSAndy Ritger     NvU32 head;
431739a20eSAndy Ritger 
441739a20eSAndy Ritger     /**
451739a20eSAndy Ritger      * @flip_list:
461739a20eSAndy Ritger      *
471739a20eSAndy Ritger      * List of flips pending to get processed by __nv_drm_handle_flip_event().
481739a20eSAndy Ritger      * Protected by @flip_list_lock.
491739a20eSAndy Ritger      */
501739a20eSAndy Ritger     struct list_head flip_list;
511739a20eSAndy Ritger 
521739a20eSAndy Ritger     /**
531739a20eSAndy Ritger      * @flip_list_lock:
541739a20eSAndy Ritger      *
551739a20eSAndy Ritger      * Spinlock to protect @flip_list.
561739a20eSAndy Ritger      */
571739a20eSAndy Ritger     spinlock_t flip_list_lock;
581739a20eSAndy Ritger 
594397463eSAndy Ritger     /**
604397463eSAndy Ritger      * @modeset_permission_filep:
614397463eSAndy Ritger      *
624397463eSAndy Ritger      * The filep using this crtc with DRM_IOCTL_NVIDIA_GRANT_PERMISSIONS.
634397463eSAndy Ritger      */
644397463eSAndy Ritger     struct drm_file *modeset_permission_filep;
654397463eSAndy Ritger 
661739a20eSAndy Ritger     struct drm_crtc base;
671739a20eSAndy Ritger };
681739a20eSAndy Ritger 
691739a20eSAndy Ritger /**
701739a20eSAndy Ritger  * struct nv_drm_flip - flip state
711739a20eSAndy Ritger  *
721739a20eSAndy Ritger  * This state is getting used to consume DRM completion event associated
731739a20eSAndy Ritger  * with each crtc state from atomic commit.
741739a20eSAndy Ritger  *
751739a20eSAndy Ritger  * Function nv_drm_atomic_apply_modeset_config() consumes DRM completion
761739a20eSAndy Ritger  * event, save it into flip state associated with crtc and queue flip state into
771739a20eSAndy Ritger  * crtc's flip list and commits atomic update to hardware.
781739a20eSAndy Ritger  */
791739a20eSAndy Ritger struct nv_drm_flip {
801739a20eSAndy Ritger     /**
811739a20eSAndy Ritger      * @event:
821739a20eSAndy Ritger      *
831739a20eSAndy Ritger      * Optional pointer to a DRM event to signal upon completion of
841739a20eSAndy Ritger      * the state update.
851739a20eSAndy Ritger      */
861739a20eSAndy Ritger     struct drm_pending_vblank_event *event;
871739a20eSAndy Ritger 
881739a20eSAndy Ritger     /**
891739a20eSAndy Ritger      * @pending_events
901739a20eSAndy Ritger      *
911739a20eSAndy Ritger      * Number of HW events pending to signal completion of the state
921739a20eSAndy Ritger      * update.
931739a20eSAndy Ritger      */
941739a20eSAndy Ritger     uint32_t pending_events;
951739a20eSAndy Ritger 
961739a20eSAndy Ritger     /**
971739a20eSAndy Ritger      * @list_entry:
981739a20eSAndy Ritger      *
991739a20eSAndy Ritger      * Entry on the per-CRTC &nv_drm_crtc.flip_list. Protected by
1001739a20eSAndy Ritger      * &nv_drm_crtc.flip_list_lock.
1011739a20eSAndy Ritger      */
1021739a20eSAndy Ritger     struct list_head list_entry;
1031739a20eSAndy Ritger 
1041739a20eSAndy Ritger     /**
1051739a20eSAndy Ritger      * @deferred_flip_list
1061739a20eSAndy Ritger      *
1071739a20eSAndy Ritger      * List flip objects whose processing is deferred until processing of
1081739a20eSAndy Ritger      * this flip object. Protected by &nv_drm_crtc.flip_list_lock.
1091739a20eSAndy Ritger      * nv_drm_atomic_commit() gets last flip object from
1101739a20eSAndy Ritger      * nv_drm_crtc:flip_list and add deferred flip objects into
1111739a20eSAndy Ritger      * @deferred_flip_list, __nv_drm_handle_flip_event() processes
1121739a20eSAndy Ritger      * @deferred_flip_list.
1131739a20eSAndy Ritger      */
1141739a20eSAndy Ritger     struct list_head deferred_flip_list;
1151739a20eSAndy Ritger };
1161739a20eSAndy Ritger 
1171739a20eSAndy Ritger struct nv_drm_crtc_state {
1181739a20eSAndy Ritger     /**
1191739a20eSAndy Ritger      * @base:
1201739a20eSAndy Ritger      *
1211739a20eSAndy Ritger      * Base DRM crtc state object for this.
1221739a20eSAndy Ritger      */
1231739a20eSAndy Ritger     struct drm_crtc_state base;
1241739a20eSAndy Ritger 
1251739a20eSAndy Ritger     /**
1261739a20eSAndy Ritger      * @head_req_config:
1271739a20eSAndy Ritger      *
1281739a20eSAndy Ritger      * Requested head's modeset configuration corresponding to this crtc state.
1291739a20eSAndy Ritger      */
1301739a20eSAndy Ritger     struct NvKmsKapiHeadRequestedConfig req_config;
1311739a20eSAndy Ritger 
132*b5bf85a8SAndy Ritger     struct NvKmsLutRamps *ilut_ramps;
133*b5bf85a8SAndy Ritger     struct NvKmsLutRamps *olut_ramps;
134*b5bf85a8SAndy Ritger 
1351739a20eSAndy Ritger     /**
1361739a20eSAndy Ritger      * @nv_flip:
1371739a20eSAndy Ritger      *
1381739a20eSAndy Ritger      * Flip state associated with this crtc state, gets allocated
1391739a20eSAndy Ritger      * by nv_drm_atomic_crtc_duplicate_state(), on successful commit it gets
1401739a20eSAndy Ritger      * consumed and queued into flip list by
1411739a20eSAndy Ritger      * nv_drm_atomic_apply_modeset_config() and finally gets destroyed
1421739a20eSAndy Ritger      * by __nv_drm_handle_flip_event() after getting processed.
1431739a20eSAndy Ritger      *
1441739a20eSAndy Ritger      * In case of failure of atomic commit, this flip state getting destroyed by
1451739a20eSAndy Ritger      * nv_drm_atomic_crtc_destroy_state().
1461739a20eSAndy Ritger      */
1471739a20eSAndy Ritger     struct nv_drm_flip *nv_flip;
1481739a20eSAndy Ritger };
1491739a20eSAndy Ritger 
to_nv_crtc_state(struct drm_crtc_state * state)1501739a20eSAndy Ritger static inline struct nv_drm_crtc_state *to_nv_crtc_state(struct drm_crtc_state *state)
1511739a20eSAndy Ritger {
1521739a20eSAndy Ritger     return container_of(state, struct nv_drm_crtc_state, base);
1531739a20eSAndy Ritger }
1541739a20eSAndy Ritger 
1551739a20eSAndy Ritger struct nv_drm_plane {
1561739a20eSAndy Ritger     /**
1571739a20eSAndy Ritger      * @base:
1581739a20eSAndy Ritger      *
1591739a20eSAndy Ritger      * Base DRM plane object for this plane.
1601739a20eSAndy Ritger      */
1611739a20eSAndy Ritger     struct drm_plane base;
1621739a20eSAndy Ritger 
1631739a20eSAndy Ritger     /**
1641739a20eSAndy Ritger      * @defaultCompositionMode:
1651739a20eSAndy Ritger      *
1661739a20eSAndy Ritger      * Default composition blending mode of this plane.
1671739a20eSAndy Ritger      */
1681739a20eSAndy Ritger     enum NvKmsCompositionBlendingMode defaultCompositionMode;
1691739a20eSAndy Ritger 
1701739a20eSAndy Ritger     /**
1711739a20eSAndy Ritger      * @layer_idx
1721739a20eSAndy Ritger      *
1731739a20eSAndy Ritger      * Index of this plane in the per head array of layers.
1741739a20eSAndy Ritger      */
1751739a20eSAndy Ritger     uint32_t layer_idx;
1761739a20eSAndy Ritger };
1771739a20eSAndy Ritger 
to_nv_plane(struct drm_plane * plane)1781739a20eSAndy Ritger static inline struct nv_drm_plane *to_nv_plane(struct drm_plane *plane)
1791739a20eSAndy Ritger {
1801739a20eSAndy Ritger     if (plane == NULL) {
1811739a20eSAndy Ritger         return NULL;
1821739a20eSAndy Ritger     }
1831739a20eSAndy Ritger     return container_of(plane, struct nv_drm_plane, base);
1841739a20eSAndy Ritger }
1851739a20eSAndy Ritger 
1861739a20eSAndy Ritger struct nv_drm_plane_state {
1871739a20eSAndy Ritger     struct drm_plane_state base;
1881739a20eSAndy Ritger     s32 __user *fd_user_ptr;
189758b4ee8SAndy Ritger     enum NvKmsInputColorSpace input_colorspace;
190758b4ee8SAndy Ritger #if defined(NV_DRM_HAS_HDR_OUTPUT_METADATA)
191758b4ee8SAndy Ritger     struct drm_property_blob *hdr_output_metadata;
192758b4ee8SAndy Ritger #endif
1931739a20eSAndy Ritger };
1941739a20eSAndy Ritger 
to_nv_drm_plane_state(struct drm_plane_state * state)1951739a20eSAndy Ritger static inline struct nv_drm_plane_state *to_nv_drm_plane_state(struct drm_plane_state *state)
1961739a20eSAndy Ritger {
1971739a20eSAndy Ritger     return container_of(state, struct nv_drm_plane_state, base);
1981739a20eSAndy Ritger }
1991739a20eSAndy Ritger 
to_nv_drm_plane_state_const(const struct drm_plane_state * state)200758b4ee8SAndy Ritger static inline const struct nv_drm_plane_state *to_nv_drm_plane_state_const(const struct drm_plane_state *state)
201758b4ee8SAndy Ritger {
202758b4ee8SAndy Ritger     return container_of(state, const struct nv_drm_plane_state, base);
203758b4ee8SAndy Ritger }
204758b4ee8SAndy Ritger 
to_nv_crtc(struct drm_crtc * crtc)2051739a20eSAndy Ritger static inline struct nv_drm_crtc *to_nv_crtc(struct drm_crtc *crtc)
2061739a20eSAndy Ritger {
2071739a20eSAndy Ritger     if (crtc == NULL) {
2081739a20eSAndy Ritger         return NULL;
2091739a20eSAndy Ritger     }
2101739a20eSAndy Ritger     return container_of(crtc, struct nv_drm_crtc, base);
2111739a20eSAndy Ritger }
2121739a20eSAndy Ritger 
2131739a20eSAndy Ritger /*
2141739a20eSAndy Ritger  * CRTCs are static objects, list does not change once after initialization and
2151739a20eSAndy Ritger  * before teardown of device. Initialization/teardown paths are single
2161739a20eSAndy Ritger  * threaded, so no locking required.
2171739a20eSAndy Ritger  */
2181739a20eSAndy Ritger static inline
nv_drm_crtc_lookup(struct nv_drm_device * nv_dev,NvU32 head)2191739a20eSAndy Ritger struct nv_drm_crtc *nv_drm_crtc_lookup(struct nv_drm_device *nv_dev, NvU32 head)
2201739a20eSAndy Ritger {
2211739a20eSAndy Ritger     struct drm_crtc *crtc;
2221739a20eSAndy Ritger     nv_drm_for_each_crtc(crtc, nv_dev->dev) {
2231739a20eSAndy Ritger         struct nv_drm_crtc *nv_crtc = to_nv_crtc(crtc);
2241739a20eSAndy Ritger 
2251739a20eSAndy Ritger         if (nv_crtc->head == head)  {
2261739a20eSAndy Ritger             return nv_crtc;
2271739a20eSAndy Ritger         }
2281739a20eSAndy Ritger     }
2291739a20eSAndy Ritger     return NULL;
2301739a20eSAndy Ritger }
2311739a20eSAndy Ritger 
2321739a20eSAndy Ritger /**
2331739a20eSAndy Ritger  * nv_drm_crtc_enqueue_flip - Enqueue nv_drm_flip object to flip_list of crtc.
2341739a20eSAndy Ritger  */
nv_drm_crtc_enqueue_flip(struct nv_drm_crtc * nv_crtc,struct nv_drm_flip * nv_flip)2351739a20eSAndy Ritger static inline void nv_drm_crtc_enqueue_flip(struct nv_drm_crtc *nv_crtc,
2361739a20eSAndy Ritger                                             struct nv_drm_flip *nv_flip)
2371739a20eSAndy Ritger {
2381739a20eSAndy Ritger     spin_lock(&nv_crtc->flip_list_lock);
2391739a20eSAndy Ritger     list_add(&nv_flip->list_entry, &nv_crtc->flip_list);
2401739a20eSAndy Ritger     spin_unlock(&nv_crtc->flip_list_lock);
2411739a20eSAndy Ritger }
2421739a20eSAndy Ritger 
2431739a20eSAndy Ritger /**
2441739a20eSAndy Ritger  * nv_drm_crtc_dequeue_flip - Dequeue nv_drm_flip object to flip_list of crtc.
2451739a20eSAndy Ritger  */
2461739a20eSAndy Ritger static inline
nv_drm_crtc_dequeue_flip(struct nv_drm_crtc * nv_crtc)2471739a20eSAndy Ritger struct nv_drm_flip *nv_drm_crtc_dequeue_flip(struct nv_drm_crtc *nv_crtc)
2481739a20eSAndy Ritger {
2491739a20eSAndy Ritger     struct nv_drm_flip *nv_flip = NULL;
2501739a20eSAndy Ritger     uint32_t pending_events = 0;
2511739a20eSAndy Ritger 
2521739a20eSAndy Ritger     spin_lock(&nv_crtc->flip_list_lock);
2531739a20eSAndy Ritger     nv_flip = list_first_entry_or_null(&nv_crtc->flip_list,
2541739a20eSAndy Ritger                                        struct nv_drm_flip, list_entry);
2551739a20eSAndy Ritger     if (likely(nv_flip != NULL)) {
2561739a20eSAndy Ritger         /*
2571739a20eSAndy Ritger          * Decrement pending_event count and dequeue flip object if
2581739a20eSAndy Ritger          * pending_event count becomes 0.
2591739a20eSAndy Ritger          */
2601739a20eSAndy Ritger         pending_events = --nv_flip->pending_events;
2611739a20eSAndy Ritger         if (!pending_events) {
2621739a20eSAndy Ritger             list_del(&nv_flip->list_entry);
2631739a20eSAndy Ritger         }
2641739a20eSAndy Ritger     }
2651739a20eSAndy Ritger     spin_unlock(&nv_crtc->flip_list_lock);
2661739a20eSAndy Ritger 
2671739a20eSAndy Ritger     if (WARN_ON(nv_flip == NULL) || pending_events) {
2681739a20eSAndy Ritger         return NULL;
2691739a20eSAndy Ritger     }
2701739a20eSAndy Ritger 
2711739a20eSAndy Ritger     return nv_flip;
2721739a20eSAndy Ritger }
2731739a20eSAndy Ritger 
2741739a20eSAndy Ritger void nv_drm_enumerate_crtcs_and_planes(
2751739a20eSAndy Ritger     struct nv_drm_device *nv_dev,
2761739a20eSAndy Ritger     const struct NvKmsKapiDeviceResourcesInfo *pResInfo);
2771739a20eSAndy Ritger 
2781739a20eSAndy Ritger int nv_drm_get_crtc_crc32_ioctl(struct drm_device *dev,
2791739a20eSAndy Ritger                                 void *data, struct drm_file *filep);
2801739a20eSAndy Ritger 
2811739a20eSAndy Ritger int nv_drm_get_crtc_crc32_v2_ioctl(struct drm_device *dev,
2821739a20eSAndy Ritger                                    void *data, struct drm_file *filep);
2831739a20eSAndy Ritger 
2841739a20eSAndy Ritger #endif /* NV_DRM_ATOMIC_MODESET_AVAILABLE */
2851739a20eSAndy Ritger 
2861739a20eSAndy Ritger #endif /* __NVIDIA_DRM_CRTC_H__ */
287