1 #ifndef _G_KERN_DISP_NVOC_H_
2 #define _G_KERN_DISP_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2020-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kern_disp_nvoc.h"
33 
34 #ifndef KERN_DISP_H
35 #define KERN_DISP_H
36 
37 /******************************************************************************
38 *
39 *       Kernel Display module header
40 *       This file contains functions managing display on CPU RM
41 *
42 ******************************************************************************/
43 
44 #include "gpu/eng_state.h"
45 #include "gpu/gpu_halspec.h"
46 #include "gpu/disp/kern_disp_type.h"
47 #include "gpu/disp/kern_disp_max.h"
48 #include "gpu/mem_mgr/context_dma.h"
49 #include "gpu/disp/vblank_callback/vblank.h"
50 
51 #include "kernel/gpu/intr/intr_service.h"
52 
53 #include "ctrl/ctrl2080/ctrl2080internal.h"
54 
55 typedef NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS KernelDisplayStaticInfo;
56 
57 typedef struct
58 {
59     NvU32 kHeadVblankCount[OBJ_MAX_HEADS];
60 } KernelDisplaySharedMem;
61 
62 struct DispChannel;
63 
64 #ifndef __NVOC_CLASS_DispChannel_TYPEDEF__
65 #define __NVOC_CLASS_DispChannel_TYPEDEF__
66 typedef struct DispChannel DispChannel;
67 #endif /* __NVOC_CLASS_DispChannel_TYPEDEF__ */
68 
69 #ifndef __nvoc_class_id_DispChannel
70 #define __nvoc_class_id_DispChannel 0xbd2ff3
71 #endif /* __nvoc_class_id_DispChannel */
72 
73 
74 struct RgLineCallback;
75 
76 #ifndef __NVOC_CLASS_RgLineCallback_TYPEDEF__
77 #define __NVOC_CLASS_RgLineCallback_TYPEDEF__
78 typedef struct RgLineCallback RgLineCallback;
79 #endif /* __NVOC_CLASS_RgLineCallback_TYPEDEF__ */
80 
81 #ifndef __nvoc_class_id_RgLineCallback
82 #define __nvoc_class_id_RgLineCallback 0xa3ff1c
83 #endif /* __nvoc_class_id_RgLineCallback */
84 
85 
86 
87 #define KDISP_GET_HEAD(pKernelDisplay, headID)    (RMCFG_MODULE_KERNEL_HEAD ? kdispGetHead(pKernelDisplay, headID) : NULL)
88 
89 /*!
90  * KernelDisp is a logical abstraction of the GPU Display Engine. The
91  * Public API of the Display Engine is exposed through this object, and any
92  * interfaces which do not manage the underlying Display hardware can be
93  * managed by this object.
94  */
95 
96 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
97 // the matching C source file, but causes diagnostics to be issued if another
98 // source file references the field.
99 #ifdef NVOC_KERN_DISP_H_PRIVATE_ACCESS_ALLOWED
100 #define PRIVATE_FIELD(x) x
101 #else
102 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
103 #endif
104 
105 struct KernelDisplay {
106     const struct NVOC_RTTI *__nvoc_rtti;
107     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
108     struct IntrService __nvoc_base_IntrService;
109     struct Object *__nvoc_pbase_Object;
110     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
111     struct IntrService *__nvoc_pbase_IntrService;
112     struct KernelDisplay *__nvoc_pbase_KernelDisplay;
113     NV_STATUS (*__kdispConstructEngine__)(OBJGPU *, struct KernelDisplay *, ENGDESCRIPTOR);
114     NV_STATUS (*__kdispStatePreInitLocked__)(OBJGPU *, struct KernelDisplay *);
115     NV_STATUS (*__kdispStateInitLocked__)(OBJGPU *, struct KernelDisplay *);
116     void (*__kdispStateDestroy__)(OBJGPU *, struct KernelDisplay *);
117     NV_STATUS (*__kdispStateLoad__)(OBJGPU *, struct KernelDisplay *, NvU32);
118     NV_STATUS (*__kdispStateUnload__)(OBJGPU *, struct KernelDisplay *, NvU32);
119     void (*__kdispRegisterIntrService__)(OBJGPU *, struct KernelDisplay *, IntrServiceRecord *);
120     NvU32 (*__kdispServiceInterrupt__)(OBJGPU *, struct KernelDisplay *, IntrServiceServiceInterruptArguments *);
121     NV_STATUS (*__kdispSelectClass__)(OBJGPU *, struct KernelDisplay *, NvU32);
122     NV_STATUS (*__kdispGetChannelNum__)(struct KernelDisplay *, DISPCHNCLASS, NvU32, NvU32 *);
123     void (*__kdispGetDisplayCapsBaseAndSize__)(OBJGPU *, struct KernelDisplay *, NvU32 *, NvU32 *);
124     void (*__kdispGetDisplaySfUserBaseAndSize__)(OBJGPU *, struct KernelDisplay *, NvU32 *, NvU32 *);
125     NV_STATUS (*__kdispGetDisplayChannelUserBaseAndSize__)(OBJGPU *, struct KernelDisplay *, DISPCHNCLASS, NvU32, NvU32 *, NvU32 *);
126     NvBool (*__kdispGetVgaWorkspaceBase__)(OBJGPU *, struct KernelDisplay *, NvU64 *);
127     NV_STATUS (*__kdispReadRgLineCountAndFrameCount__)(OBJGPU *, struct KernelDisplay *, NvU32, NvU32 *, NvU32 *);
128     void (*__kdispRestoreOriginalLsrMinTime__)(OBJGPU *, struct KernelDisplay *, NvU32, NvU32);
129     NV_STATUS (*__kdispComputeLsrMinTimeValue__)(OBJGPU *, struct KernelDisplay *, NvU32, NvU32, NvU32 *);
130     void (*__kdispSetSwapBarrierLsrMinTime__)(OBJGPU *, struct KernelDisplay *, NvU32, NvU32 *, NvU32);
131     NV_STATUS (*__kdispGetRgScanLock__)(OBJGPU *, struct KernelDisplay *, NvU32, OBJGPU *, NvU32, NvBool *, NvU32 *, NvBool *, NvU32 *);
132     NV_STATUS (*__kdispDetectSliLink__)(struct KernelDisplay *, OBJGPU *, OBJGPU *, NvU32, NvU32);
133     void (*__kdispInitRegistryOverrides__)(OBJGPU *, struct KernelDisplay *);
134     NvU32 (*__kdispGetPBTargetAperture__)(OBJGPU *, struct KernelDisplay *, NvU32, NvU32);
135     NV_STATUS (*__kdispServiceNotificationInterrupt__)(OBJGPU *, struct KernelDisplay *, IntrServiceServiceNotificationInterruptArguments *);
136     NV_STATUS (*__kdispStatePreLoad__)(POBJGPU, struct KernelDisplay *, NvU32);
137     NV_STATUS (*__kdispStatePostUnload__)(POBJGPU, struct KernelDisplay *, NvU32);
138     NV_STATUS (*__kdispStatePreUnload__)(POBJGPU, struct KernelDisplay *, NvU32);
139     NV_STATUS (*__kdispStateInitUnlocked__)(POBJGPU, struct KernelDisplay *);
140     void (*__kdispInitMissing__)(POBJGPU, struct KernelDisplay *);
141     NV_STATUS (*__kdispStatePreInitUnlocked__)(POBJGPU, struct KernelDisplay *);
142     NvBool (*__kdispClearInterrupt__)(OBJGPU *, struct KernelDisplay *, IntrServiceClearInterruptArguments *);
143     NV_STATUS (*__kdispStatePostLoad__)(POBJGPU, struct KernelDisplay *, NvU32);
144     NvBool (*__kdispIsPresent__)(POBJGPU, struct KernelDisplay *);
145     NvBool PDB_PROP_KDISP_IMP_ENABLE;
146     NvBool PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS;
147     NvBool PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE;
148     NvBool PDB_PROP_KDISP_IN_AWAKEN_INTR;
149     struct DisplayInstanceMemory *pInst;
150     struct KernelHead *pKernelHead[8];
151     const KernelDisplayStaticInfo *pStaticInfo;
152     NvBool bWarPurgeSatellitesOnCoreFree;
153     struct RgLineCallback *rgLineCallbackPerHead[8][2];
154     NvU32 isrVblankHeads;
155     NvBool bExtdevIntrSupported;
156     NvU32 numHeads;
157     NvU32 deferredVblankHeadMask;
158     NvHandle hInternalClient;
159     NvHandle hInternalDevice;
160     NvHandle hInternalSubdevice;
161     NvHandle hDispCommonHandle;
162     MEMORY_DESCRIPTOR *pSharedMemDesc;
163     KernelDisplaySharedMem *pSharedData;
164 };
165 
166 #ifndef __NVOC_CLASS_KernelDisplay_TYPEDEF__
167 #define __NVOC_CLASS_KernelDisplay_TYPEDEF__
168 typedef struct KernelDisplay KernelDisplay;
169 #endif /* __NVOC_CLASS_KernelDisplay_TYPEDEF__ */
170 
171 #ifndef __nvoc_class_id_KernelDisplay
172 #define __nvoc_class_id_KernelDisplay 0x55952e
173 #endif /* __nvoc_class_id_KernelDisplay */
174 
175 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelDisplay;
176 
177 #define __staticCast_KernelDisplay(pThis) \
178     ((pThis)->__nvoc_pbase_KernelDisplay)
179 
180 #ifdef __nvoc_kern_disp_h_disabled
181 #define __dynamicCast_KernelDisplay(pThis) ((KernelDisplay*)NULL)
182 #else //__nvoc_kern_disp_h_disabled
183 #define __dynamicCast_KernelDisplay(pThis) \
184     ((KernelDisplay*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelDisplay)))
185 #endif //__nvoc_kern_disp_h_disabled
186 
187 #define PDB_PROP_KDISP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
188 #define PDB_PROP_KDISP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
189 #define PDB_PROP_KDISP_IN_AWAKEN_INTR_BASE_CAST
190 #define PDB_PROP_KDISP_IN_AWAKEN_INTR_BASE_NAME PDB_PROP_KDISP_IN_AWAKEN_INTR
191 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS_BASE_CAST
192 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS_BASE_NAME PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS
193 #define PDB_PROP_KDISP_IMP_ENABLE_BASE_CAST
194 #define PDB_PROP_KDISP_IMP_ENABLE_BASE_NAME PDB_PROP_KDISP_IMP_ENABLE
195 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE_BASE_CAST
196 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE_BASE_NAME PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE
197 
198 NV_STATUS __nvoc_objCreateDynamic_KernelDisplay(KernelDisplay**, Dynamic*, NvU32, va_list);
199 
200 NV_STATUS __nvoc_objCreate_KernelDisplay(KernelDisplay**, Dynamic*, NvU32);
201 #define __objCreate_KernelDisplay(ppNewObj, pParent, createFlags) \
202     __nvoc_objCreate_KernelDisplay((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
203 
204 #define kdispConstructEngine(pGpu, pKernelDisplay, engDesc) kdispConstructEngine_DISPATCH(pGpu, pKernelDisplay, engDesc)
205 #define kdispStatePreInitLocked(pGpu, pKernelDisplay) kdispStatePreInitLocked_DISPATCH(pGpu, pKernelDisplay)
206 #define kdispStateInitLocked(pGpu, pKernelDisplay) kdispStateInitLocked_DISPATCH(pGpu, pKernelDisplay)
207 #define kdispStateDestroy(pGpu, pKernelDisplay) kdispStateDestroy_DISPATCH(pGpu, pKernelDisplay)
208 #define kdispStateLoad(pGpu, pKernelDisplay, flags) kdispStateLoad_DISPATCH(pGpu, pKernelDisplay, flags)
209 #define kdispStateUnload(pGpu, pKernelDisplay, flags) kdispStateUnload_DISPATCH(pGpu, pKernelDisplay, flags)
210 #define kdispRegisterIntrService(pGpu, pKernelDisplay, pRecords) kdispRegisterIntrService_DISPATCH(pGpu, pKernelDisplay, pRecords)
211 #define kdispServiceInterrupt(pGpu, pKernelDisplay, pParams) kdispServiceInterrupt_DISPATCH(pGpu, pKernelDisplay, pParams)
212 #define kdispServiceInterrupt_HAL(pGpu, pKernelDisplay, pParams) kdispServiceInterrupt_DISPATCH(pGpu, pKernelDisplay, pParams)
213 #define kdispSelectClass(pGpu, pKernelDisplay, swClass) kdispSelectClass_DISPATCH(pGpu, pKernelDisplay, swClass)
214 #define kdispSelectClass_HAL(pGpu, pKernelDisplay, swClass) kdispSelectClass_DISPATCH(pGpu, pKernelDisplay, swClass)
215 #define kdispGetChannelNum(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_DISPATCH(pKernelDisplay, channelClass, channelInstance, pChannelNum)
216 #define kdispGetChannelNum_HAL(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_DISPATCH(pKernelDisplay, channelClass, channelInstance, pChannelNum)
217 #define kdispGetDisplayCapsBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
218 #define kdispGetDisplayCapsBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
219 #define kdispGetDisplaySfUserBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
220 #define kdispGetDisplaySfUserBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
221 #define kdispGetDisplayChannelUserBaseAndSize(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
222 #define kdispGetDisplayChannelUserBaseAndSize_HAL(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
223 #define kdispGetVgaWorkspaceBase(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_DISPATCH(pGpu, pKernelDisplay, pOffset)
224 #define kdispGetVgaWorkspaceBase_HAL(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_DISPATCH(pGpu, pKernelDisplay, pOffset)
225 #define kdispReadRgLineCountAndFrameCount(pGpu, pKernelDisplay, head, pLineCount, pFrameCount) kdispReadRgLineCountAndFrameCount_DISPATCH(pGpu, pKernelDisplay, head, pLineCount, pFrameCount)
226 #define kdispReadRgLineCountAndFrameCount_HAL(pGpu, pKernelDisplay, head, pLineCount, pFrameCount) kdispReadRgLineCountAndFrameCount_DISPATCH(pGpu, pKernelDisplay, head, pLineCount, pFrameCount)
227 #define kdispRestoreOriginalLsrMinTime(pGpu, pKernelDisplay, head, origLsrMinTime) kdispRestoreOriginalLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, origLsrMinTime)
228 #define kdispRestoreOriginalLsrMinTime_HAL(pGpu, pKernelDisplay, head, origLsrMinTime) kdispRestoreOriginalLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, origLsrMinTime)
229 #define kdispComputeLsrMinTimeValue(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime) kdispComputeLsrMinTimeValue_DISPATCH(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime)
230 #define kdispComputeLsrMinTimeValue_HAL(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime) kdispComputeLsrMinTimeValue_DISPATCH(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime)
231 #define kdispSetSwapBarrierLsrMinTime(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime) kdispSetSwapBarrierLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime)
232 #define kdispSetSwapBarrierLsrMinTime_HAL(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime) kdispSetSwapBarrierLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime)
233 #define kdispGetRgScanLock(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin) kdispGetRgScanLock_DISPATCH(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin)
234 #define kdispGetRgScanLock_HAL(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin) kdispGetRgScanLock_DISPATCH(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin)
235 #define kdispDetectSliLink(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort) kdispDetectSliLink_DISPATCH(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort)
236 #define kdispDetectSliLink_HAL(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort) kdispDetectSliLink_DISPATCH(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort)
237 #define kdispInitRegistryOverrides(pGpu, pKernelDisplay) kdispInitRegistryOverrides_DISPATCH(pGpu, pKernelDisplay)
238 #define kdispInitRegistryOverrides_HAL(pGpu, pKernelDisplay) kdispInitRegistryOverrides_DISPATCH(pGpu, pKernelDisplay)
239 #define kdispGetPBTargetAperture(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop) kdispGetPBTargetAperture_DISPATCH(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop)
240 #define kdispGetPBTargetAperture_HAL(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop) kdispGetPBTargetAperture_DISPATCH(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop)
241 #define kdispServiceNotificationInterrupt(pGpu, pIntrService, pParams) kdispServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams)
242 #define kdispStatePreLoad(pGpu, pEngstate, arg0) kdispStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
243 #define kdispStatePostUnload(pGpu, pEngstate, arg0) kdispStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
244 #define kdispStatePreUnload(pGpu, pEngstate, arg0) kdispStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
245 #define kdispStateInitUnlocked(pGpu, pEngstate) kdispStateInitUnlocked_DISPATCH(pGpu, pEngstate)
246 #define kdispInitMissing(pGpu, pEngstate) kdispInitMissing_DISPATCH(pGpu, pEngstate)
247 #define kdispStatePreInitUnlocked(pGpu, pEngstate) kdispStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
248 #define kdispClearInterrupt(pGpu, pIntrService, pParams) kdispClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
249 #define kdispStatePostLoad(pGpu, pEngstate, arg0) kdispStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
250 #define kdispIsPresent(pGpu, pEngstate) kdispIsPresent_DISPATCH(pGpu, pEngstate)
251 void kdispServiceVblank_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
252 
253 
254 #ifdef __nvoc_kern_disp_h_disabled
kdispServiceVblank(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 arg0,NvU32 arg1,struct THREAD_STATE_NODE * arg2)255 static inline void kdispServiceVblank(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
256     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
257 }
258 #else //__nvoc_kern_disp_h_disabled
259 #define kdispServiceVblank(pGpu, pKernelDisplay, arg0, arg1, arg2) kdispServiceVblank_KERNEL(pGpu, pKernelDisplay, arg0, arg1, arg2)
260 #endif //__nvoc_kern_disp_h_disabled
261 
262 #define kdispServiceVblank_HAL(pGpu, pKernelDisplay, arg0, arg1, arg2) kdispServiceVblank(pGpu, pKernelDisplay, arg0, arg1, arg2)
263 
264 NV_STATUS kdispConstructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
265 
266 
267 #ifdef __nvoc_kern_disp_h_disabled
kdispConstructInstMem(struct KernelDisplay * pKernelDisplay)268 static inline NV_STATUS kdispConstructInstMem(struct KernelDisplay *pKernelDisplay) {
269     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
270     return NV_ERR_NOT_SUPPORTED;
271 }
272 #else //__nvoc_kern_disp_h_disabled
273 #define kdispConstructInstMem(pKernelDisplay) kdispConstructInstMem_IMPL(pKernelDisplay)
274 #endif //__nvoc_kern_disp_h_disabled
275 
276 #define kdispConstructInstMem_HAL(pKernelDisplay) kdispConstructInstMem(pKernelDisplay)
277 
278 void kdispDestructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
279 
280 
281 #ifdef __nvoc_kern_disp_h_disabled
kdispDestructInstMem(struct KernelDisplay * pKernelDisplay)282 static inline void kdispDestructInstMem(struct KernelDisplay *pKernelDisplay) {
283     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
284 }
285 #else //__nvoc_kern_disp_h_disabled
286 #define kdispDestructInstMem(pKernelDisplay) kdispDestructInstMem_IMPL(pKernelDisplay)
287 #endif //__nvoc_kern_disp_h_disabled
288 
289 #define kdispDestructInstMem_HAL(pKernelDisplay) kdispDestructInstMem(pKernelDisplay)
290 
kdispGetBaseOffset_4a4dee(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)291 static inline NvS32 kdispGetBaseOffset_4a4dee(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
292     return 0;
293 }
294 
295 
296 #ifdef __nvoc_kern_disp_h_disabled
kdispGetBaseOffset(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)297 static inline NvS32 kdispGetBaseOffset(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
298     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
299     return 0;
300 }
301 #else //__nvoc_kern_disp_h_disabled
302 #define kdispGetBaseOffset(pGpu, pKernelDisplay) kdispGetBaseOffset_4a4dee(pGpu, pKernelDisplay)
303 #endif //__nvoc_kern_disp_h_disabled
304 
305 #define kdispGetBaseOffset_HAL(pGpu, pKernelDisplay) kdispGetBaseOffset(pGpu, pKernelDisplay)
306 
kdispImportImpData_56cd7a(struct KernelDisplay * pKernelDisplay)307 static inline NV_STATUS kdispImportImpData_56cd7a(struct KernelDisplay *pKernelDisplay) {
308     return NV_OK;
309 }
310 
311 
312 #ifdef __nvoc_kern_disp_h_disabled
kdispImportImpData(struct KernelDisplay * pKernelDisplay)313 static inline NV_STATUS kdispImportImpData(struct KernelDisplay *pKernelDisplay) {
314     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
315     return NV_ERR_NOT_SUPPORTED;
316 }
317 #else //__nvoc_kern_disp_h_disabled
318 #define kdispImportImpData(pKernelDisplay) kdispImportImpData_56cd7a(pKernelDisplay)
319 #endif //__nvoc_kern_disp_h_disabled
320 
321 #define kdispImportImpData_HAL(pKernelDisplay) kdispImportImpData(pKernelDisplay)
322 
kdispArbAndAllocDisplayBandwidth_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,enum DISPLAY_ICC_BW_CLIENT iccBwClient,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)323 static inline NV_STATUS kdispArbAndAllocDisplayBandwidth_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
324     return NV_ERR_NOT_SUPPORTED;
325 }
326 
327 
328 #ifdef __nvoc_kern_disp_h_disabled
kdispArbAndAllocDisplayBandwidth(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,enum DISPLAY_ICC_BW_CLIENT iccBwClient,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)329 static inline NV_STATUS kdispArbAndAllocDisplayBandwidth(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
330     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
331     return NV_ERR_NOT_SUPPORTED;
332 }
333 #else //__nvoc_kern_disp_h_disabled
334 #define kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth_46f6a7(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
335 #endif //__nvoc_kern_disp_h_disabled
336 
337 #define kdispArbAndAllocDisplayBandwidth_HAL(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
338 
339 NV_STATUS kdispSetPushBufferParamsToPhysical_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass);
340 
341 
342 #ifdef __nvoc_kern_disp_h_disabled
kdispSetPushBufferParamsToPhysical(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvHandle hObjectBuffer,struct ContextDma * pBufferContextDma,NvU32 hClass,NvU32 channelInstance,DISPCHNCLASS internalDispChnClass)343 static inline NV_STATUS kdispSetPushBufferParamsToPhysical(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass) {
344     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
345     return NV_ERR_NOT_SUPPORTED;
346 }
347 #else //__nvoc_kern_disp_h_disabled
348 #define kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass) kdispSetPushBufferParamsToPhysical_IMPL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass)
349 #endif //__nvoc_kern_disp_h_disabled
350 
351 #define kdispSetPushBufferParamsToPhysical_HAL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass) kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass)
352 
kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvU32 channelInstance,NvHandle hObjectBuffer,NvU32 initialGetPutOffset,NvBool allowGrabWithinSameClient,NvBool connectPbAtGrab)353 static inline NV_STATUS kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
354     return NV_OK;
355 }
356 
357 
358 #ifdef __nvoc_kern_disp_h_disabled
kdispAcquireDispChannelHw(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvU32 channelInstance,NvHandle hObjectBuffer,NvU32 initialGetPutOffset,NvBool allowGrabWithinSameClient,NvBool connectPbAtGrab)359 static inline NV_STATUS kdispAcquireDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
360     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
361     return NV_ERR_NOT_SUPPORTED;
362 }
363 #else //__nvoc_kern_disp_h_disabled
364 #define kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw_56cd7a(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
365 #endif //__nvoc_kern_disp_h_disabled
366 
367 #define kdispAcquireDispChannelHw_HAL(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
368 
kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)369 static inline NV_STATUS kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
370     return NV_OK;
371 }
372 
373 
374 #ifdef __nvoc_kern_disp_h_disabled
kdispReleaseDispChannelHw(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)375 static inline NV_STATUS kdispReleaseDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
376     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
377     return NV_ERR_NOT_SUPPORTED;
378 }
379 #else //__nvoc_kern_disp_h_disabled
380 #define kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw_56cd7a(pKernelDisplay, pDispChannel)
381 #endif //__nvoc_kern_disp_h_disabled
382 
383 #define kdispReleaseDispChannelHw_HAL(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel)
384 
385 NV_STATUS kdispMapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
386 
387 
388 #ifdef __nvoc_kern_disp_h_disabled
kdispMapDispChannel(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)389 static inline NV_STATUS kdispMapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
390     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
391     return NV_ERR_NOT_SUPPORTED;
392 }
393 #else //__nvoc_kern_disp_h_disabled
394 #define kdispMapDispChannel(pKernelDisplay, pDispChannel) kdispMapDispChannel_IMPL(pKernelDisplay, pDispChannel)
395 #endif //__nvoc_kern_disp_h_disabled
396 
397 #define kdispMapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispMapDispChannel(pKernelDisplay, pDispChannel)
398 
399 void kdispUnbindUnmapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
400 
401 
402 #ifdef __nvoc_kern_disp_h_disabled
kdispUnbindUnmapDispChannel(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)403 static inline void kdispUnbindUnmapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
404     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
405 }
406 #else //__nvoc_kern_disp_h_disabled
407 #define kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel_IMPL(pKernelDisplay, pDispChannel)
408 #endif //__nvoc_kern_disp_h_disabled
409 
410 #define kdispUnbindUnmapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel)
411 
412 NV_STATUS kdispRegisterRgLineCallback_IMPL(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable);
413 
414 
415 #ifdef __nvoc_kern_disp_h_disabled
kdispRegisterRgLineCallback(struct KernelDisplay * pKernelDisplay,struct RgLineCallback * pRgLineCallback,NvU32 head,NvU32 rgIntrLine,NvBool bEnable)416 static inline NV_STATUS kdispRegisterRgLineCallback(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable) {
417     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
418     return NV_ERR_NOT_SUPPORTED;
419 }
420 #else //__nvoc_kern_disp_h_disabled
421 #define kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback_IMPL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
422 #endif //__nvoc_kern_disp_h_disabled
423 
424 #define kdispRegisterRgLineCallback_HAL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
425 
426 void kdispInvokeRgLineCallback_KERNEL(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr);
427 
428 
429 #ifdef __nvoc_kern_disp_h_disabled
kdispInvokeRgLineCallback(struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 rgIntrLine,NvBool bIsIrqlIsr)430 static inline void kdispInvokeRgLineCallback(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr) {
431     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
432 }
433 #else //__nvoc_kern_disp_h_disabled
434 #define kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback_KERNEL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
435 #endif //__nvoc_kern_disp_h_disabled
436 
437 #define kdispInvokeRgLineCallback_HAL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
438 
439 NvU32 kdispReadPendingVblank_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct THREAD_STATE_NODE *arg0);
440 
441 
442 #ifdef __nvoc_kern_disp_h_disabled
kdispReadPendingVblank(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,struct THREAD_STATE_NODE * arg0)443 static inline NvU32 kdispReadPendingVblank(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct THREAD_STATE_NODE *arg0) {
444     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
445     return 0;
446 }
447 #else //__nvoc_kern_disp_h_disabled
448 #define kdispReadPendingVblank(pGpu, pKernelDisplay, arg0) kdispReadPendingVblank_IMPL(pGpu, pKernelDisplay, arg0)
449 #endif //__nvoc_kern_disp_h_disabled
450 
451 #define kdispReadPendingVblank_HAL(pGpu, pKernelDisplay, arg0) kdispReadPendingVblank(pGpu, pKernelDisplay, arg0)
452 
kdispInvokeDisplayModesetCallback_b3696a(struct KernelDisplay * pKernelDisplay,NvBool bModesetStart,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)453 static inline void kdispInvokeDisplayModesetCallback_b3696a(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
454     return;
455 }
456 
457 
458 #ifdef __nvoc_kern_disp_h_disabled
kdispInvokeDisplayModesetCallback(struct KernelDisplay * pKernelDisplay,NvBool bModesetStart,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)459 static inline void kdispInvokeDisplayModesetCallback(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
460     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
461 }
462 #else //__nvoc_kern_disp_h_disabled
463 #define kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback_b3696a(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
464 #endif //__nvoc_kern_disp_h_disabled
465 
466 #define kdispInvokeDisplayModesetCallback_HAL(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
467 
kdispDsmMxmMxcbExecuteAcpi_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,void * pInOutData,NvU16 * outDataSize)468 static inline NV_STATUS kdispDsmMxmMxcbExecuteAcpi_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, void *pInOutData, NvU16 *outDataSize) {
469     NV_ASSERT_PRECOMP(0);
470     return NV_ERR_NOT_SUPPORTED;
471 }
472 
473 
474 #ifdef __nvoc_kern_disp_h_disabled
kdispDsmMxmMxcbExecuteAcpi(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,void * pInOutData,NvU16 * outDataSize)475 static inline NV_STATUS kdispDsmMxmMxcbExecuteAcpi(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, void *pInOutData, NvU16 *outDataSize) {
476     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
477     return NV_ERR_NOT_SUPPORTED;
478 }
479 #else //__nvoc_kern_disp_h_disabled
480 #define kdispDsmMxmMxcbExecuteAcpi(pGpu, pKernelDisplay, pInOutData, outDataSize) kdispDsmMxmMxcbExecuteAcpi_92bfc3(pGpu, pKernelDisplay, pInOutData, outDataSize)
481 #endif //__nvoc_kern_disp_h_disabled
482 
483 #define kdispDsmMxmMxcbExecuteAcpi_HAL(pGpu, pKernelDisplay, pInOutData, outDataSize) kdispDsmMxmMxcbExecuteAcpi(pGpu, pKernelDisplay, pInOutData, outDataSize)
484 
485 NV_STATUS kdispInitBrightcStateLoad_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
486 
487 
488 #ifdef __nvoc_kern_disp_h_disabled
kdispInitBrightcStateLoad(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)489 static inline NV_STATUS kdispInitBrightcStateLoad(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
490     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
491     return NV_ERR_NOT_SUPPORTED;
492 }
493 #else //__nvoc_kern_disp_h_disabled
494 #define kdispInitBrightcStateLoad(pGpu, pKernelDisplay) kdispInitBrightcStateLoad_IMPL(pGpu, pKernelDisplay)
495 #endif //__nvoc_kern_disp_h_disabled
496 
497 #define kdispInitBrightcStateLoad_HAL(pGpu, pKernelDisplay) kdispInitBrightcStateLoad(pGpu, pKernelDisplay)
498 
499 NV_STATUS kdispSetupAcpiEdid_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
500 
501 
502 #ifdef __nvoc_kern_disp_h_disabled
kdispSetupAcpiEdid(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)503 static inline NV_STATUS kdispSetupAcpiEdid(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
504     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
505     return NV_ERR_NOT_SUPPORTED;
506 }
507 #else //__nvoc_kern_disp_h_disabled
508 #define kdispSetupAcpiEdid(pGpu, pKernelDisplay) kdispSetupAcpiEdid_IMPL(pGpu, pKernelDisplay)
509 #endif //__nvoc_kern_disp_h_disabled
510 
511 #define kdispSetupAcpiEdid_HAL(pGpu, pKernelDisplay) kdispSetupAcpiEdid(pGpu, pKernelDisplay)
512 
kdispReadPendingAwakenIntr_ceaee8(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg0)513 static inline NvBool kdispReadPendingAwakenIntr_ceaee8(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg0) {
514     NV_ASSERT_PRECOMP(0);
515     return ((NvBool)(0 != 0));
516 }
517 
518 NvBool kdispReadPendingAwakenIntr_v03_00_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg0);
519 
kdispReadPendingAwakenIntr_491d52(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg0)520 static inline NvBool kdispReadPendingAwakenIntr_491d52(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg0) {
521     return ((NvBool)(0 != 0));
522 }
523 
524 
525 #ifdef __nvoc_kern_disp_h_disabled
kdispReadPendingAwakenIntr(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg0)526 static inline NvBool kdispReadPendingAwakenIntr(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg0) {
527     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
528     return NV_FALSE;
529 }
530 #else //__nvoc_kern_disp_h_disabled
531 #define kdispReadPendingAwakenIntr(pGpu, pKernelDisplay, pCachedIntr, arg0) kdispReadPendingAwakenIntr_ceaee8(pGpu, pKernelDisplay, pCachedIntr, arg0)
532 #endif //__nvoc_kern_disp_h_disabled
533 
534 #define kdispReadPendingAwakenIntr_HAL(pGpu, pKernelDisplay, pCachedIntr, arg0) kdispReadPendingAwakenIntr(pGpu, pKernelDisplay, pCachedIntr, arg0)
535 
kdispReadAwakenChannelNumMask_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg0,DISPCHNCLASS arg1,struct THREAD_STATE_NODE * arg2)536 static inline NV_STATUS kdispReadAwakenChannelNumMask_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg0, DISPCHNCLASS arg1, struct THREAD_STATE_NODE *arg2) {
537     NV_ASSERT_PRECOMP(0);
538     return NV_ERR_NOT_SUPPORTED;
539 }
540 
541 NV_STATUS kdispReadAwakenChannelNumMask_v03_00_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg0, DISPCHNCLASS arg1, struct THREAD_STATE_NODE *arg2);
542 
kdispReadAwakenChannelNumMask_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg0,DISPCHNCLASS arg1,struct THREAD_STATE_NODE * arg2)543 static inline NV_STATUS kdispReadAwakenChannelNumMask_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg0, DISPCHNCLASS arg1, struct THREAD_STATE_NODE *arg2) {
544     return NV_ERR_NOT_SUPPORTED;
545 }
546 
547 
548 #ifdef __nvoc_kern_disp_h_disabled
kdispReadAwakenChannelNumMask(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg0,DISPCHNCLASS arg1,struct THREAD_STATE_NODE * arg2)549 static inline NV_STATUS kdispReadAwakenChannelNumMask(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg0, DISPCHNCLASS arg1, struct THREAD_STATE_NODE *arg2) {
550     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
551     return NV_ERR_NOT_SUPPORTED;
552 }
553 #else //__nvoc_kern_disp_h_disabled
554 #define kdispReadAwakenChannelNumMask(pGpu, pKernelDisplay, arg0, arg1, arg2) kdispReadAwakenChannelNumMask_92bfc3(pGpu, pKernelDisplay, arg0, arg1, arg2)
555 #endif //__nvoc_kern_disp_h_disabled
556 
557 #define kdispReadAwakenChannelNumMask_HAL(pGpu, pKernelDisplay, arg0, arg1, arg2) kdispReadAwakenChannelNumMask(pGpu, pKernelDisplay, arg0, arg1, arg2)
558 
559 NV_STATUS kdispAllocateCommonHandle_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
560 
561 
562 #ifdef __nvoc_kern_disp_h_disabled
kdispAllocateCommonHandle(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)563 static inline NV_STATUS kdispAllocateCommonHandle(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
564     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
565     return NV_ERR_NOT_SUPPORTED;
566 }
567 #else //__nvoc_kern_disp_h_disabled
568 #define kdispAllocateCommonHandle(pGpu, pKernelDisplay) kdispAllocateCommonHandle_IMPL(pGpu, pKernelDisplay)
569 #endif //__nvoc_kern_disp_h_disabled
570 
571 #define kdispAllocateCommonHandle_HAL(pGpu, pKernelDisplay) kdispAllocateCommonHandle(pGpu, pKernelDisplay)
572 
573 void kdispDestroyCommonHandle_IMPL(struct KernelDisplay *pKernelDisplay);
574 
575 
576 #ifdef __nvoc_kern_disp_h_disabled
kdispDestroyCommonHandle(struct KernelDisplay * pKernelDisplay)577 static inline void kdispDestroyCommonHandle(struct KernelDisplay *pKernelDisplay) {
578     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
579 }
580 #else //__nvoc_kern_disp_h_disabled
581 #define kdispDestroyCommonHandle(pKernelDisplay) kdispDestroyCommonHandle_IMPL(pKernelDisplay)
582 #endif //__nvoc_kern_disp_h_disabled
583 
584 #define kdispDestroyCommonHandle_HAL(pKernelDisplay) kdispDestroyCommonHandle(pKernelDisplay)
585 
586 NV_STATUS kdispAllocateSharedMem_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
587 
588 
589 #ifdef __nvoc_kern_disp_h_disabled
kdispAllocateSharedMem(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)590 static inline NV_STATUS kdispAllocateSharedMem(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
591     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
592     return NV_ERR_NOT_SUPPORTED;
593 }
594 #else //__nvoc_kern_disp_h_disabled
595 #define kdispAllocateSharedMem(pGpu, pKernelDisplay) kdispAllocateSharedMem_IMPL(pGpu, pKernelDisplay)
596 #endif //__nvoc_kern_disp_h_disabled
597 
598 #define kdispAllocateSharedMem_HAL(pGpu, pKernelDisplay) kdispAllocateSharedMem(pGpu, pKernelDisplay)
599 
600 void kdispFreeSharedMem_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
601 
602 
603 #ifdef __nvoc_kern_disp_h_disabled
kdispFreeSharedMem(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)604 static inline void kdispFreeSharedMem(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
605     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
606 }
607 #else //__nvoc_kern_disp_h_disabled
608 #define kdispFreeSharedMem(pGpu, pKernelDisplay) kdispFreeSharedMem_IMPL(pGpu, pKernelDisplay)
609 #endif //__nvoc_kern_disp_h_disabled
610 
611 #define kdispFreeSharedMem_HAL(pGpu, pKernelDisplay) kdispFreeSharedMem(pGpu, pKernelDisplay)
612 
613 NV_STATUS kdispConstructEngine_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc);
614 
kdispConstructEngine_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,ENGDESCRIPTOR engDesc)615 static inline NV_STATUS kdispConstructEngine_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc) {
616     return pKernelDisplay->__kdispConstructEngine__(pGpu, pKernelDisplay, engDesc);
617 }
618 
619 NV_STATUS kdispStatePreInitLocked_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
620 
kdispStatePreInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)621 static inline NV_STATUS kdispStatePreInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
622     return pKernelDisplay->__kdispStatePreInitLocked__(pGpu, pKernelDisplay);
623 }
624 
625 NV_STATUS kdispStateInitLocked_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
626 
kdispStateInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)627 static inline NV_STATUS kdispStateInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
628     return pKernelDisplay->__kdispStateInitLocked__(pGpu, pKernelDisplay);
629 }
630 
631 void kdispStateDestroy_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
632 
kdispStateDestroy_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)633 static inline void kdispStateDestroy_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
634     pKernelDisplay->__kdispStateDestroy__(pGpu, pKernelDisplay);
635 }
636 
637 NV_STATUS kdispStateLoad_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
638 
kdispStateLoad_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 flags)639 static inline NV_STATUS kdispStateLoad_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
640     return pKernelDisplay->__kdispStateLoad__(pGpu, pKernelDisplay, flags);
641 }
642 
643 NV_STATUS kdispStateUnload_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
644 
kdispStateUnload_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 flags)645 static inline NV_STATUS kdispStateUnload_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
646     return pKernelDisplay->__kdispStateUnload__(pGpu, pKernelDisplay, flags);
647 }
648 
649 void kdispRegisterIntrService_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceRecord pRecords[171]);
650 
kdispRegisterIntrService_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceRecord pRecords[171])651 static inline void kdispRegisterIntrService_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceRecord pRecords[171]) {
652     pKernelDisplay->__kdispRegisterIntrService__(pGpu, pKernelDisplay, pRecords);
653 }
654 
kdispServiceInterrupt_acff5e(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceServiceInterruptArguments * pParams)655 static inline NvU32 kdispServiceInterrupt_acff5e(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceServiceInterruptArguments *pParams) {
656     kdispServiceVblank(pGpu, pKernelDisplay, 0, (4), ((void *)0));
657     return NV_OK;
658 }
659 
kdispServiceInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceServiceInterruptArguments * pParams)660 static inline NvU32 kdispServiceInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceServiceInterruptArguments *pParams) {
661     return pKernelDisplay->__kdispServiceInterrupt__(pGpu, pKernelDisplay, pParams);
662 }
663 
kdispSelectClass_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 swClass)664 static inline NV_STATUS kdispSelectClass_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
665     return NV_ERR_NOT_SUPPORTED;
666 }
667 
668 NV_STATUS kdispSelectClass_v03_00_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass);
669 
kdispSelectClass_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 swClass)670 static inline NV_STATUS kdispSelectClass_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
671     return pKernelDisplay->__kdispSelectClass__(pGpu, pKernelDisplay, swClass);
672 }
673 
kdispGetChannelNum_46f6a7(struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pChannelNum)674 static inline NV_STATUS kdispGetChannelNum_46f6a7(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
675     return NV_ERR_NOT_SUPPORTED;
676 }
677 
678 NV_STATUS kdispGetChannelNum_v03_00(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum);
679 
kdispGetChannelNum_DISPATCH(struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pChannelNum)680 static inline NV_STATUS kdispGetChannelNum_DISPATCH(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
681     return pKernelDisplay->__kdispGetChannelNum__(pKernelDisplay, channelClass, channelInstance, pChannelNum);
682 }
683 
kdispGetDisplayCapsBaseAndSize_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)684 static inline void kdispGetDisplayCapsBaseAndSize_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
685     return;
686 }
687 
688 void kdispGetDisplayCapsBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
689 
kdispGetDisplayCapsBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)690 static inline void kdispGetDisplayCapsBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
691     pKernelDisplay->__kdispGetDisplayCapsBaseAndSize__(pGpu, pKernelDisplay, pOffset, pSize);
692 }
693 
kdispGetDisplaySfUserBaseAndSize_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)694 static inline void kdispGetDisplaySfUserBaseAndSize_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
695     return;
696 }
697 
698 void kdispGetDisplaySfUserBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
699 
kdispGetDisplaySfUserBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)700 static inline void kdispGetDisplaySfUserBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
701     pKernelDisplay->__kdispGetDisplaySfUserBaseAndSize__(pGpu, pKernelDisplay, pOffset, pSize);
702 }
703 
kdispGetDisplayChannelUserBaseAndSize_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pOffset,NvU32 * pSize)704 static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
705     return NV_ERR_NOT_SUPPORTED;
706 }
707 
708 NV_STATUS kdispGetDisplayChannelUserBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize);
709 
kdispGetDisplayChannelUserBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pOffset,NvU32 * pSize)710 static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
711     return pKernelDisplay->__kdispGetDisplayChannelUserBaseAndSize__(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize);
712 }
713 
714 NvBool kdispGetVgaWorkspaceBase_v04_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset);
715 
kdispGetVgaWorkspaceBase_491d52(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU64 * pOffset)716 static inline NvBool kdispGetVgaWorkspaceBase_491d52(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
717     return ((NvBool)(0 != 0));
718 }
719 
kdispGetVgaWorkspaceBase_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU64 * pOffset)720 static inline NvBool kdispGetVgaWorkspaceBase_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
721     return pKernelDisplay->__kdispGetVgaWorkspaceBase__(pGpu, pKernelDisplay, pOffset);
722 }
723 
724 NV_STATUS kdispReadRgLineCountAndFrameCount_v03_00_PHYSICAL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount);
725 
kdispReadRgLineCountAndFrameCount_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pLineCount,NvU32 * pFrameCount)726 static inline NV_STATUS kdispReadRgLineCountAndFrameCount_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount) {
727     return NV_ERR_NOT_SUPPORTED;
728 }
729 
730 NV_STATUS kdispReadRgLineCountAndFrameCount_v03_00_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount);
731 
kdispReadRgLineCountAndFrameCount_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pLineCount,NvU32 * pFrameCount)732 static inline NV_STATUS kdispReadRgLineCountAndFrameCount_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount) {
733     return pKernelDisplay->__kdispReadRgLineCountAndFrameCount__(pGpu, pKernelDisplay, head, pLineCount, pFrameCount);
734 }
735 
736 void kdispRestoreOriginalLsrMinTime_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime);
737 
kdispRestoreOriginalLsrMinTime_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 origLsrMinTime)738 static inline void kdispRestoreOriginalLsrMinTime_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime) {
739     return;
740 }
741 
kdispRestoreOriginalLsrMinTime_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 origLsrMinTime)742 static inline void kdispRestoreOriginalLsrMinTime_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime) {
743     pKernelDisplay->__kdispRestoreOriginalLsrMinTime__(pGpu, pKernelDisplay, head, origLsrMinTime);
744 }
745 
746 NV_STATUS kdispComputeLsrMinTimeValue_v02_07(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime);
747 
kdispComputeLsrMinTimeValue_56cd7a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 swapRdyHiLsrMinTime,NvU32 * pComputedLsrMinTime)748 static inline NV_STATUS kdispComputeLsrMinTimeValue_56cd7a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime) {
749     return NV_OK;
750 }
751 
kdispComputeLsrMinTimeValue_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 swapRdyHiLsrMinTime,NvU32 * pComputedLsrMinTime)752 static inline NV_STATUS kdispComputeLsrMinTimeValue_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime) {
753     return pKernelDisplay->__kdispComputeLsrMinTimeValue__(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime);
754 }
755 
756 void kdispSetSwapBarrierLsrMinTime_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime);
757 
kdispSetSwapBarrierLsrMinTime_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pOrigLsrMinTime,NvU32 newLsrMinTime)758 static inline void kdispSetSwapBarrierLsrMinTime_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime) {
759     return;
760 }
761 
kdispSetSwapBarrierLsrMinTime_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pOrigLsrMinTime,NvU32 newLsrMinTime)762 static inline void kdispSetSwapBarrierLsrMinTime_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime) {
763     pKernelDisplay->__kdispSetSwapBarrierLsrMinTime__(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime);
764 }
765 
766 NV_STATUS kdispGetRgScanLock_v02_01(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin);
767 
kdispGetRgScanLock_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head0,OBJGPU * pPeerGpu,NvU32 head1,NvBool * pMasterScanLock,NvU32 * pMasterScanLockPin,NvBool * pSlaveScanLock,NvU32 * pSlaveScanLockPin)768 static inline NV_STATUS kdispGetRgScanLock_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin) {
769     NV_ASSERT_PRECOMP(0);
770     return NV_ERR_NOT_SUPPORTED;
771 }
772 
kdispGetRgScanLock_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head0,OBJGPU * pPeerGpu,NvU32 head1,NvBool * pMasterScanLock,NvU32 * pMasterScanLockPin,NvBool * pSlaveScanLock,NvU32 * pSlaveScanLockPin)773 static inline NV_STATUS kdispGetRgScanLock_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin) {
774     return pKernelDisplay->__kdispGetRgScanLock__(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin);
775 }
776 
777 NV_STATUS kdispDetectSliLink_v04_00(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort);
778 
kdispDetectSliLink_92bfc3(struct KernelDisplay * pKernelDisplay,OBJGPU * pParentGpu,OBJGPU * pChildGpu,NvU32 ParentDrPort,NvU32 ChildDrPort)779 static inline NV_STATUS kdispDetectSliLink_92bfc3(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort) {
780     NV_ASSERT_PRECOMP(0);
781     return NV_ERR_NOT_SUPPORTED;
782 }
783 
kdispDetectSliLink_DISPATCH(struct KernelDisplay * pKernelDisplay,OBJGPU * pParentGpu,OBJGPU * pChildGpu,NvU32 ParentDrPort,NvU32 ChildDrPort)784 static inline NV_STATUS kdispDetectSliLink_DISPATCH(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort) {
785     return pKernelDisplay->__kdispDetectSliLink__(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort);
786 }
787 
kdispInitRegistryOverrides_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)788 static inline void kdispInitRegistryOverrides_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
789     return;
790 }
791 
792 void kdispInitRegistryOverrides_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
793 
kdispInitRegistryOverrides_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)794 static inline void kdispInitRegistryOverrides_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
795     pKernelDisplay->__kdispInitRegistryOverrides__(pGpu, pKernelDisplay);
796 }
797 
798 NvU32 kdispGetPBTargetAperture_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop);
799 
kdispGetPBTargetAperture_15a734(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 memAddrSpace,NvU32 cacheSnoop)800 static inline NvU32 kdispGetPBTargetAperture_15a734(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop) {
801     return 0U;
802 }
803 
kdispGetPBTargetAperture_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 memAddrSpace,NvU32 cacheSnoop)804 static inline NvU32 kdispGetPBTargetAperture_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop) {
805     return pKernelDisplay->__kdispGetPBTargetAperture__(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop);
806 }
807 
kdispServiceNotificationInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pIntrService,IntrServiceServiceNotificationInterruptArguments * pParams)808 static inline NV_STATUS kdispServiceNotificationInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) {
809     return pIntrService->__kdispServiceNotificationInterrupt__(pGpu, pIntrService, pParams);
810 }
811 
kdispStatePreLoad_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate,NvU32 arg0)812 static inline NV_STATUS kdispStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
813     return pEngstate->__kdispStatePreLoad__(pGpu, pEngstate, arg0);
814 }
815 
kdispStatePostUnload_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate,NvU32 arg0)816 static inline NV_STATUS kdispStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
817     return pEngstate->__kdispStatePostUnload__(pGpu, pEngstate, arg0);
818 }
819 
kdispStatePreUnload_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate,NvU32 arg0)820 static inline NV_STATUS kdispStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
821     return pEngstate->__kdispStatePreUnload__(pGpu, pEngstate, arg0);
822 }
823 
kdispStateInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate)824 static inline NV_STATUS kdispStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
825     return pEngstate->__kdispStateInitUnlocked__(pGpu, pEngstate);
826 }
827 
kdispInitMissing_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate)828 static inline void kdispInitMissing_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
829     pEngstate->__kdispInitMissing__(pGpu, pEngstate);
830 }
831 
kdispStatePreInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate)832 static inline NV_STATUS kdispStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
833     return pEngstate->__kdispStatePreInitUnlocked__(pGpu, pEngstate);
834 }
835 
kdispClearInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pIntrService,IntrServiceClearInterruptArguments * pParams)836 static inline NvBool kdispClearInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pIntrService, IntrServiceClearInterruptArguments *pParams) {
837     return pIntrService->__kdispClearInterrupt__(pGpu, pIntrService, pParams);
838 }
839 
kdispStatePostLoad_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate,NvU32 arg0)840 static inline NV_STATUS kdispStatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate, NvU32 arg0) {
841     return pEngstate->__kdispStatePostLoad__(pGpu, pEngstate, arg0);
842 }
843 
kdispIsPresent_DISPATCH(POBJGPU pGpu,struct KernelDisplay * pEngstate)844 static inline NvBool kdispIsPresent_DISPATCH(POBJGPU pGpu, struct KernelDisplay *pEngstate) {
845     return pEngstate->__kdispIsPresent__(pGpu, pEngstate);
846 }
847 
848 void kdispDestruct_IMPL(struct KernelDisplay *pKernelDisplay);
849 
850 #define __nvoc_kdispDestruct(pKernelDisplay) kdispDestruct_IMPL(pKernelDisplay)
851 NV_STATUS kdispConstructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
852 
853 #ifdef __nvoc_kern_disp_h_disabled
kdispConstructKhead(struct KernelDisplay * pKernelDisplay)854 static inline NV_STATUS kdispConstructKhead(struct KernelDisplay *pKernelDisplay) {
855     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
856     return NV_ERR_NOT_SUPPORTED;
857 }
858 #else //__nvoc_kern_disp_h_disabled
859 #define kdispConstructKhead(pKernelDisplay) kdispConstructKhead_IMPL(pKernelDisplay)
860 #endif //__nvoc_kern_disp_h_disabled
861 
862 void kdispDestructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
863 
864 #ifdef __nvoc_kern_disp_h_disabled
kdispDestructKhead(struct KernelDisplay * pKernelDisplay)865 static inline void kdispDestructKhead(struct KernelDisplay *pKernelDisplay) {
866     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
867 }
868 #else //__nvoc_kern_disp_h_disabled
869 #define kdispDestructKhead(pKernelDisplay) kdispDestructKhead_IMPL(pKernelDisplay)
870 #endif //__nvoc_kern_disp_h_disabled
871 
872 NV_STATUS kdispGetIntChnClsForHwCls_IMPL(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass);
873 
874 #ifdef __nvoc_kern_disp_h_disabled
kdispGetIntChnClsForHwCls(struct KernelDisplay * pKernelDisplay,NvU32 hwClass,DISPCHNCLASS * pDispChnClass)875 static inline NV_STATUS kdispGetIntChnClsForHwCls(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass) {
876     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
877     return NV_ERR_NOT_SUPPORTED;
878 }
879 #else //__nvoc_kern_disp_h_disabled
880 #define kdispGetIntChnClsForHwCls(pKernelDisplay, hwClass, pDispChnClass) kdispGetIntChnClsForHwCls_IMPL(pKernelDisplay, hwClass, pDispChnClass)
881 #endif //__nvoc_kern_disp_h_disabled
882 
883 void kdispNotifyCommonEvent_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams);
884 
885 #ifdef __nvoc_kern_disp_h_disabled
kdispNotifyCommonEvent(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 notifyIndex,void * pNotifyParams)886 static inline void kdispNotifyCommonEvent(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams) {
887     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
888 }
889 #else //__nvoc_kern_disp_h_disabled
890 #define kdispNotifyCommonEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams) kdispNotifyCommonEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams)
891 #endif //__nvoc_kern_disp_h_disabled
892 
893 void kdispNotifyEvent_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16);
894 
895 #ifdef __nvoc_kern_disp_h_disabled
kdispNotifyEvent(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 notifyIndex,void * pNotifyParams,NvU32 notifyParamsSize,NvV32 info32,NvV16 info16)896 static inline void kdispNotifyEvent(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16) {
897     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
898 }
899 #else //__nvoc_kern_disp_h_disabled
900 #define kdispNotifyEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16) kdispNotifyEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16)
901 #endif //__nvoc_kern_disp_h_disabled
902 
903 void kdispSetWarPurgeSatellitesOnCoreFree_IMPL(struct KernelDisplay *pKernelDisplay, NvBool value);
904 
905 #ifdef __nvoc_kern_disp_h_disabled
kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay * pKernelDisplay,NvBool value)906 static inline void kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay *pKernelDisplay, NvBool value) {
907     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
908 }
909 #else //__nvoc_kern_disp_h_disabled
910 #define kdispSetWarPurgeSatellitesOnCoreFree(pKernelDisplay, value) kdispSetWarPurgeSatellitesOnCoreFree_IMPL(pKernelDisplay, value)
911 #endif //__nvoc_kern_disp_h_disabled
912 
913 #undef PRIVATE_FIELD
914 
915 
916 void
917 dispdeviceFillVgaSavedDisplayState( OBJGPU *pGpu,
918     NvU64   vgaAddr,
919     NvU8    vgaMemType,
920     NvBool  vgaValid,
921     NvU64   workspaceAddr,
922     NvU8    workspaceMemType,
923     NvBool  workspaceValid,
924     NvBool  baseValid,
925     NvBool  workspaceBaseValid
926 );
927 
928 /*! PushBuffer Target Aperture Types */
929 typedef enum
930 {
931     IOVA,
932     PHYS_NVM,
933     PHYS_PCI,
934     PHYS_PCI_COHERENT
935 } PBTARGETAPERTURE;
936 
937 static NV_INLINE struct KernelHead*
kdispGetHead(struct KernelDisplay * pKernelDisplay,NvU32 head)938 kdispGetHead
939 (
940     struct KernelDisplay *pKernelDisplay,
941     NvU32 head
942 )
943 {
944     if (head >= OBJ_MAX_HEADS)
945     {
946         return NULL;
947     }
948 
949     return pKernelDisplay->pKernelHead[head];
950 }
951 
952 static NV_INLINE NvU32
kdispGetNumHeads(struct KernelDisplay * pKernelDisplay)953 kdispGetNumHeads(struct KernelDisplay *pKernelDisplay)
954 {
955     NV_ASSERT(pKernelDisplay != NULL);
956     return pKernelDisplay->numHeads;
957 }
958 
959 static NV_INLINE NvU32
kdispGetIsPrimaryVga(struct KernelDisplay * pKernelDisplay)960 kdispGetIsPrimaryVga(struct KernelDisplay *pKernelDisplay)
961 {
962     NV_ASSERT(pKernelDisplay->pStaticInfo != NULL);
963     return pKernelDisplay->pStaticInfo->bPrimaryVga;
964 }
965 
966 static NV_INLINE NvU32
kdispGetDeferredVblankHeadMask(struct KernelDisplay * pKernelDisplay)967 kdispGetDeferredVblankHeadMask(struct KernelDisplay *pKernelDisplay)
968 {
969     return pKernelDisplay->deferredVblankHeadMask;
970 }
971 
972 static NV_INLINE void
kdispSetDeferredVblankHeadMask(struct KernelDisplay * pKernelDisplay,NvU32 vblankHeadMask)973 kdispSetDeferredVblankHeadMask(struct KernelDisplay *pKernelDisplay, NvU32 vblankHeadMask)
974 {
975     pKernelDisplay->deferredVblankHeadMask = vblankHeadMask;
976 }
977 
978 static NV_INLINE NvHandle
kdispGetInternalClientHandle(struct KernelDisplay * pKernelDisplay)979 kdispGetInternalClientHandle(struct KernelDisplay *pKernelDisplay)
980 {
981     return pKernelDisplay->hInternalClient;
982 }
983 
984 static NV_INLINE NvHandle
kdispGetDispCommonHandle(struct KernelDisplay * pKernelDisplay)985 kdispGetDispCommonHandle(struct KernelDisplay *pKernelDisplay)
986 {
987     return pKernelDisplay->hDispCommonHandle;
988 }
989 
990 #endif // KERN_DISP_H
991 
992 #ifdef __cplusplus
993 } // extern "C"
994 #endif
995 
996 #endif // _G_KERN_DISP_NVOC_H_
997