1 
2 #ifndef _G_KERN_DISP_NVOC_H_
3 #define _G_KERN_DISP_NVOC_H_
4 #include "nvoc/runtime.h"
5 
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11 
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15 
16 /*
17  * SPDX-FileCopyrightText: Copyright (c) 2020-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18  * SPDX-License-Identifier: MIT
19  *
20  * Permission is hereby granted, free of charge, to any person obtaining a
21  * copy of this software and associated documentation files (the "Software"),
22  * to deal in the Software without restriction, including without limitation
23  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24  * and/or sell copies of the Software, and to permit persons to whom the
25  * Software is furnished to do so, subject to the following conditions:
26  *
27  * The above copyright notice and this permission notice shall be included in
28  * all copies or substantial portions of the Software.
29  *
30  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36  * DEALINGS IN THE SOFTWARE.
37  */
38 
39 #pragma once
40 #include "g_kern_disp_nvoc.h"
41 
42 #ifndef KERN_DISP_H
43 #define KERN_DISP_H
44 
45 /******************************************************************************
46 *
47 *       Kernel Display module header
48 *       This file contains functions managing display on CPU RM
49 *
50 ******************************************************************************/
51 
52 #include "gpu/eng_state.h"
53 #include "gpu/gpu_halspec.h"
54 #include "gpu/disp/kern_disp_type.h"
55 #include "gpu/disp/kern_disp_max.h"
56 #include "gpu/mem_mgr/context_dma.h"
57 #include "gpu/disp/vblank_callback/vblank.h"
58 
59 #include "kernel/gpu/intr/intr_service.h"
60 
61 #include "ctrl/ctrl2080/ctrl2080internal.h"
62 
63 typedef NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS KernelDisplayStaticInfo;
64 
65 typedef struct
66 {
67     NvU32 kHeadVblankCount[OBJ_MAX_HEADS];
68 } KernelDisplaySharedMem;
69 
70 
71 struct DispChannel;
72 
73 #ifndef __NVOC_CLASS_DispChannel_TYPEDEF__
74 #define __NVOC_CLASS_DispChannel_TYPEDEF__
75 typedef struct DispChannel DispChannel;
76 #endif /* __NVOC_CLASS_DispChannel_TYPEDEF__ */
77 
78 #ifndef __nvoc_class_id_DispChannel
79 #define __nvoc_class_id_DispChannel 0xbd2ff3
80 #endif /* __nvoc_class_id_DispChannel */
81 
82 
83 
84 struct RgLineCallback;
85 
86 #ifndef __NVOC_CLASS_RgLineCallback_TYPEDEF__
87 #define __NVOC_CLASS_RgLineCallback_TYPEDEF__
88 typedef struct RgLineCallback RgLineCallback;
89 #endif /* __NVOC_CLASS_RgLineCallback_TYPEDEF__ */
90 
91 #ifndef __nvoc_class_id_RgLineCallback
92 #define __nvoc_class_id_RgLineCallback 0xa3ff1c
93 #endif /* __nvoc_class_id_RgLineCallback */
94 
95 
96 
97 #define KDISP_GET_HEAD(pKernelDisplay, headID)    (RMCFG_MODULE_KERNEL_HEAD ? kdispGetHead(pKernelDisplay, headID) : NULL)
98 
99 /*!
100  * KernelDisp is a logical abstraction of the GPU Display Engine. The
101  * Public API of the Display Engine is exposed through this object, and any
102  * interfaces which do not manage the underlying Display hardware can be
103  * managed by this object.
104  */
105 
106 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
107 // the matching C source file, but causes diagnostics to be issued if another
108 // source file references the field.
109 #ifdef NVOC_KERN_DISP_H_PRIVATE_ACCESS_ALLOWED
110 #define PRIVATE_FIELD(x) x
111 #else
112 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
113 #endif
114 
115 
116 struct KernelDisplay {
117 
118     // Metadata
119     const struct NVOC_RTTI *__nvoc_rtti;
120 
121     // Parent (i.e. superclass or base class) object pointers
122     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
123     struct IntrService __nvoc_base_IntrService;
124 
125     // Ancestor object pointers for `staticCast` feature
126     struct Object *__nvoc_pbase_Object;    // obj super^2
127     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;    // engstate super
128     struct IntrService *__nvoc_pbase_IntrService;    // intrserv super
129     struct KernelDisplay *__nvoc_pbase_KernelDisplay;    // kdisp
130 
131     // Vtable with 32 per-object function pointers
132     NV_STATUS (*__kdispConstructEngine__)(OBJGPU *, struct KernelDisplay * /*this*/, ENGDESCRIPTOR);  // virtual override (engstate) base (engstate)
133     NV_STATUS (*__kdispStatePreInitLocked__)(OBJGPU *, struct KernelDisplay * /*this*/);  // virtual override (engstate) base (engstate)
134     NV_STATUS (*__kdispStateInitLocked__)(OBJGPU *, struct KernelDisplay * /*this*/);  // virtual override (engstate) base (engstate)
135     void (*__kdispStateDestroy__)(OBJGPU *, struct KernelDisplay * /*this*/);  // virtual override (engstate) base (engstate)
136     NV_STATUS (*__kdispStateLoad__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // virtual override (engstate) base (engstate)
137     NV_STATUS (*__kdispStateUnload__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // virtual override (engstate) base (engstate)
138     void (*__kdispRegisterIntrService__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceRecord *);  // virtual override (intrserv) base (intrserv)
139     NvU32 (*__kdispServiceInterrupt__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceServiceInterruptArguments *);  // virtual halified (singleton optimized) override (intrserv) base (intrserv) body
140     NV_STATUS (*__kdispSelectClass__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // halified (2 hals) body
141     NV_STATUS (*__kdispGetChannelNum__)(struct KernelDisplay * /*this*/, DISPCHNCLASS, NvU32, NvU32 *);  // halified (2 hals) body
142     void (*__kdispGetDisplayCapsBaseAndSize__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32 *, NvU32 *);  // halified (2 hals) body
143     void (*__kdispGetDisplaySfUserBaseAndSize__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32 *, NvU32 *);  // halified (2 hals) body
144     NV_STATUS (*__kdispGetDisplayChannelUserBaseAndSize__)(OBJGPU *, struct KernelDisplay * /*this*/, DISPCHNCLASS, NvU32, NvU32 *, NvU32 *);  // halified (2 hals) body
145     NvBool (*__kdispGetVgaWorkspaceBase__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU64 *);  // halified (2 hals) body
146     NV_STATUS (*__kdispReadRgLineCountAndFrameCount__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32 *, NvU32 *);  // halified (2 hals) body
147     void (*__kdispRestoreOriginalLsrMinTime__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32);  // halified (2 hals) body
148     NV_STATUS (*__kdispComputeLsrMinTimeValue__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32, NvU32 *);  // halified (2 hals) body
149     void (*__kdispSetSwapBarrierLsrMinTime__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32 *, NvU32);  // halified (2 hals) body
150     NV_STATUS (*__kdispGetRgScanLock__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, OBJGPU *, NvU32, NvBool *, NvU32 *, NvBool *, NvU32 *);  // halified (2 hals) body
151     NV_STATUS (*__kdispDetectSliLink__)(struct KernelDisplay * /*this*/, OBJGPU *, OBJGPU *, NvU32, NvU32);  // halified (2 hals) body
152     void (*__kdispInitRegistryOverrides__)(OBJGPU *, struct KernelDisplay * /*this*/);  // halified (2 hals) body
153     NvU32 (*__kdispGetPBTargetAperture__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32);  // halified (2 hals) body
154     void (*__kdispInitMissing__)(struct OBJGPU *, struct KernelDisplay * /*this*/);  // virtual inherited (engstate) base (engstate)
155     NV_STATUS (*__kdispStatePreInitUnlocked__)(struct OBJGPU *, struct KernelDisplay * /*this*/);  // virtual inherited (engstate) base (engstate)
156     NV_STATUS (*__kdispStateInitUnlocked__)(struct OBJGPU *, struct KernelDisplay * /*this*/);  // virtual inherited (engstate) base (engstate)
157     NV_STATUS (*__kdispStatePreLoad__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
158     NV_STATUS (*__kdispStatePostLoad__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
159     NV_STATUS (*__kdispStatePreUnload__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
160     NV_STATUS (*__kdispStatePostUnload__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32);  // virtual inherited (engstate) base (engstate)
161     NvBool (*__kdispIsPresent__)(struct OBJGPU *, struct KernelDisplay * /*this*/);  // virtual inherited (engstate) base (engstate)
162     NvBool (*__kdispClearInterrupt__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceClearInterruptArguments *);  // virtual inherited (intrserv) base (intrserv)
163     NV_STATUS (*__kdispServiceNotificationInterrupt__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceServiceNotificationInterruptArguments *);  // virtual inherited (intrserv) base (intrserv)
164 
165     // 6 PDB properties
166     NvBool PDB_PROP_KDISP_IMP_ENABLE;
167     NvBool PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS;
168     NvBool PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE;
169     NvBool PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE;
170     NvBool PDB_PROP_KDISP_IN_AWAKEN_INTR;
171 
172     // Data members
173     struct DisplayInstanceMemory *pInst;
174     struct KernelHead *pKernelHead[8];
175     const KernelDisplayStaticInfo *pStaticInfo;
176     NvBool bWarPurgeSatellitesOnCoreFree;
177     struct RgLineCallback *rgLineCallbackPerHead[8][2];
178     NvU32 isrVblankHeads;
179     NvBool bExtdevIntrSupported;
180     NvU32 numHeads;
181     NvU32 deferredVblankHeadMask;
182     NvHandle hInternalClient;
183     NvHandle hInternalDevice;
184     NvHandle hInternalSubdevice;
185     NvHandle hDispCommonHandle;
186     MEMORY_DESCRIPTOR *pSharedMemDesc;
187     KernelDisplaySharedMem *pSharedData;
188     NvBool bFeatureStretchVblankCapable;
189 };
190 
191 #ifndef __NVOC_CLASS_KernelDisplay_TYPEDEF__
192 #define __NVOC_CLASS_KernelDisplay_TYPEDEF__
193 typedef struct KernelDisplay KernelDisplay;
194 #endif /* __NVOC_CLASS_KernelDisplay_TYPEDEF__ */
195 
196 #ifndef __nvoc_class_id_KernelDisplay
197 #define __nvoc_class_id_KernelDisplay 0x55952e
198 #endif /* __nvoc_class_id_KernelDisplay */
199 
200 // Casting support
201 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelDisplay;
202 
203 #define __staticCast_KernelDisplay(pThis) \
204     ((pThis)->__nvoc_pbase_KernelDisplay)
205 
206 #ifdef __nvoc_kern_disp_h_disabled
207 #define __dynamicCast_KernelDisplay(pThis) ((KernelDisplay*)NULL)
208 #else //__nvoc_kern_disp_h_disabled
209 #define __dynamicCast_KernelDisplay(pThis) \
210     ((KernelDisplay*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelDisplay)))
211 #endif //__nvoc_kern_disp_h_disabled
212 
213 // Property macros
214 #define PDB_PROP_KDISP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
215 #define PDB_PROP_KDISP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
216 #define PDB_PROP_KDISP_IN_AWAKEN_INTR_BASE_CAST
217 #define PDB_PROP_KDISP_IN_AWAKEN_INTR_BASE_NAME PDB_PROP_KDISP_IN_AWAKEN_INTR
218 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS_BASE_CAST
219 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS_BASE_NAME PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANK_ALWAYS
220 #define PDB_PROP_KDISP_IMP_ENABLE_BASE_CAST
221 #define PDB_PROP_KDISP_IMP_ENABLE_BASE_NAME PDB_PROP_KDISP_IMP_ENABLE
222 #define PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE_BASE_CAST
223 #define PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE_BASE_NAME PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE
224 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE_BASE_CAST
225 #define PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE_BASE_NAME PDB_PROP_KDISP_BUG_2089053_SERIALIZE_AGGRESSIVE_VBLANKS_ONLY_ON_HMD_ACTIVE
226 
227 NV_STATUS __nvoc_objCreateDynamic_KernelDisplay(KernelDisplay**, Dynamic*, NvU32, va_list);
228 
229 NV_STATUS __nvoc_objCreate_KernelDisplay(KernelDisplay**, Dynamic*, NvU32);
230 #define __objCreate_KernelDisplay(ppNewObj, pParent, createFlags) \
231     __nvoc_objCreate_KernelDisplay((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
232 
233 
234 // Wrapper macros
235 #define kdispConstructEngine_FNPTR(pKernelDisplay) pKernelDisplay->__kdispConstructEngine__
236 #define kdispConstructEngine(pGpu, pKernelDisplay, engDesc) kdispConstructEngine_DISPATCH(pGpu, pKernelDisplay, engDesc)
237 #define kdispStatePreInitLocked_FNPTR(pKernelDisplay) pKernelDisplay->__kdispStatePreInitLocked__
238 #define kdispStatePreInitLocked(pGpu, pKernelDisplay) kdispStatePreInitLocked_DISPATCH(pGpu, pKernelDisplay)
239 #define kdispStateInitLocked_FNPTR(pKernelDisplay) pKernelDisplay->__kdispStateInitLocked__
240 #define kdispStateInitLocked(pGpu, pKernelDisplay) kdispStateInitLocked_DISPATCH(pGpu, pKernelDisplay)
241 #define kdispStateDestroy_FNPTR(pKernelDisplay) pKernelDisplay->__kdispStateDestroy__
242 #define kdispStateDestroy(pGpu, pKernelDisplay) kdispStateDestroy_DISPATCH(pGpu, pKernelDisplay)
243 #define kdispStateLoad_FNPTR(pKernelDisplay) pKernelDisplay->__kdispStateLoad__
244 #define kdispStateLoad(pGpu, pKernelDisplay, flags) kdispStateLoad_DISPATCH(pGpu, pKernelDisplay, flags)
245 #define kdispStateUnload_FNPTR(pKernelDisplay) pKernelDisplay->__kdispStateUnload__
246 #define kdispStateUnload(pGpu, pKernelDisplay, flags) kdispStateUnload_DISPATCH(pGpu, pKernelDisplay, flags)
247 #define kdispRegisterIntrService_FNPTR(pKernelDisplay) pKernelDisplay->__kdispRegisterIntrService__
248 #define kdispRegisterIntrService(pGpu, pKernelDisplay, pRecords) kdispRegisterIntrService_DISPATCH(pGpu, pKernelDisplay, pRecords)
249 #define kdispServiceInterrupt_FNPTR(pKernelDisplay) pKernelDisplay->__kdispServiceInterrupt__
250 #define kdispServiceInterrupt(pGpu, pKernelDisplay, pParams) kdispServiceInterrupt_DISPATCH(pGpu, pKernelDisplay, pParams)
251 #define kdispServiceInterrupt_HAL(pGpu, pKernelDisplay, pParams) kdispServiceInterrupt_DISPATCH(pGpu, pKernelDisplay, pParams)
252 #define kdispSelectClass_FNPTR(pKernelDisplay) pKernelDisplay->__kdispSelectClass__
253 #define kdispSelectClass(pGpu, pKernelDisplay, swClass) kdispSelectClass_DISPATCH(pGpu, pKernelDisplay, swClass)
254 #define kdispSelectClass_HAL(pGpu, pKernelDisplay, swClass) kdispSelectClass_DISPATCH(pGpu, pKernelDisplay, swClass)
255 #define kdispGetChannelNum_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetChannelNum__
256 #define kdispGetChannelNum(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_DISPATCH(pKernelDisplay, channelClass, channelInstance, pChannelNum)
257 #define kdispGetChannelNum_HAL(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_DISPATCH(pKernelDisplay, channelClass, channelInstance, pChannelNum)
258 #define kdispGetDisplayCapsBaseAndSize_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetDisplayCapsBaseAndSize__
259 #define kdispGetDisplayCapsBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
260 #define kdispGetDisplayCapsBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
261 #define kdispGetDisplaySfUserBaseAndSize_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetDisplaySfUserBaseAndSize__
262 #define kdispGetDisplaySfUserBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
263 #define kdispGetDisplaySfUserBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
264 #define kdispGetDisplayChannelUserBaseAndSize_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetDisplayChannelUserBaseAndSize__
265 #define kdispGetDisplayChannelUserBaseAndSize(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
266 #define kdispGetDisplayChannelUserBaseAndSize_HAL(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
267 #define kdispGetVgaWorkspaceBase_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetVgaWorkspaceBase__
268 #define kdispGetVgaWorkspaceBase(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_DISPATCH(pGpu, pKernelDisplay, pOffset)
269 #define kdispGetVgaWorkspaceBase_HAL(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_DISPATCH(pGpu, pKernelDisplay, pOffset)
270 #define kdispReadRgLineCountAndFrameCount_FNPTR(pKernelDisplay) pKernelDisplay->__kdispReadRgLineCountAndFrameCount__
271 #define kdispReadRgLineCountAndFrameCount(pGpu, pKernelDisplay, head, pLineCount, pFrameCount) kdispReadRgLineCountAndFrameCount_DISPATCH(pGpu, pKernelDisplay, head, pLineCount, pFrameCount)
272 #define kdispReadRgLineCountAndFrameCount_HAL(pGpu, pKernelDisplay, head, pLineCount, pFrameCount) kdispReadRgLineCountAndFrameCount_DISPATCH(pGpu, pKernelDisplay, head, pLineCount, pFrameCount)
273 #define kdispRestoreOriginalLsrMinTime_FNPTR(pKernelDisplay) pKernelDisplay->__kdispRestoreOriginalLsrMinTime__
274 #define kdispRestoreOriginalLsrMinTime(pGpu, pKernelDisplay, head, origLsrMinTime) kdispRestoreOriginalLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, origLsrMinTime)
275 #define kdispRestoreOriginalLsrMinTime_HAL(pGpu, pKernelDisplay, head, origLsrMinTime) kdispRestoreOriginalLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, origLsrMinTime)
276 #define kdispComputeLsrMinTimeValue_FNPTR(pKernelDisplay) pKernelDisplay->__kdispComputeLsrMinTimeValue__
277 #define kdispComputeLsrMinTimeValue(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime) kdispComputeLsrMinTimeValue_DISPATCH(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime)
278 #define kdispComputeLsrMinTimeValue_HAL(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime) kdispComputeLsrMinTimeValue_DISPATCH(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime)
279 #define kdispSetSwapBarrierLsrMinTime_FNPTR(pKernelDisplay) pKernelDisplay->__kdispSetSwapBarrierLsrMinTime__
280 #define kdispSetSwapBarrierLsrMinTime(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime) kdispSetSwapBarrierLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime)
281 #define kdispSetSwapBarrierLsrMinTime_HAL(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime) kdispSetSwapBarrierLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime)
282 #define kdispGetRgScanLock_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetRgScanLock__
283 #define kdispGetRgScanLock(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin) kdispGetRgScanLock_DISPATCH(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin)
284 #define kdispGetRgScanLock_HAL(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin) kdispGetRgScanLock_DISPATCH(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin)
285 #define kdispDetectSliLink_FNPTR(pKernelDisplay) pKernelDisplay->__kdispDetectSliLink__
286 #define kdispDetectSliLink(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort) kdispDetectSliLink_DISPATCH(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort)
287 #define kdispDetectSliLink_HAL(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort) kdispDetectSliLink_DISPATCH(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort)
288 #define kdispInitRegistryOverrides_FNPTR(pKernelDisplay) pKernelDisplay->__kdispInitRegistryOverrides__
289 #define kdispInitRegistryOverrides(pGpu, pKernelDisplay) kdispInitRegistryOverrides_DISPATCH(pGpu, pKernelDisplay)
290 #define kdispInitRegistryOverrides_HAL(pGpu, pKernelDisplay) kdispInitRegistryOverrides_DISPATCH(pGpu, pKernelDisplay)
291 #define kdispGetPBTargetAperture_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetPBTargetAperture__
292 #define kdispGetPBTargetAperture(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop) kdispGetPBTargetAperture_DISPATCH(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop)
293 #define kdispGetPBTargetAperture_HAL(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop) kdispGetPBTargetAperture_DISPATCH(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop)
294 #define kdispInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateInitMissing__
295 #define kdispInitMissing(pGpu, pEngstate) kdispInitMissing_DISPATCH(pGpu, pEngstate)
296 #define kdispStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitUnlocked__
297 #define kdispStatePreInitUnlocked(pGpu, pEngstate) kdispStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
298 #define kdispStateInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateInitUnlocked__
299 #define kdispStateInitUnlocked(pGpu, pEngstate) kdispStateInitUnlocked_DISPATCH(pGpu, pEngstate)
300 #define kdispStatePreLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreLoad__
301 #define kdispStatePreLoad(pGpu, pEngstate, arg3) kdispStatePreLoad_DISPATCH(pGpu, pEngstate, arg3)
302 #define kdispStatePostLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostLoad__
303 #define kdispStatePostLoad(pGpu, pEngstate, arg3) kdispStatePostLoad_DISPATCH(pGpu, pEngstate, arg3)
304 #define kdispStatePreUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreUnload__
305 #define kdispStatePreUnload(pGpu, pEngstate, arg3) kdispStatePreUnload_DISPATCH(pGpu, pEngstate, arg3)
306 #define kdispStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostUnload__
307 #define kdispStatePostUnload(pGpu, pEngstate, arg3) kdispStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
308 #define kdispIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateIsPresent__
309 #define kdispIsPresent(pGpu, pEngstate) kdispIsPresent_DISPATCH(pGpu, pEngstate)
310 #define kdispClearInterrupt_FNPTR(pIntrService) pIntrService->__nvoc_base_IntrService.__intrservClearInterrupt__
311 #define kdispClearInterrupt(pGpu, pIntrService, pParams) kdispClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
312 #define kdispServiceNotificationInterrupt_FNPTR(pIntrService) pIntrService->__nvoc_base_IntrService.__intrservServiceNotificationInterrupt__
313 #define kdispServiceNotificationInterrupt(pGpu, pIntrService, pParams) kdispServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams)
314 
315 // Dispatch functions
kdispConstructEngine_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,ENGDESCRIPTOR engDesc)316 static inline NV_STATUS kdispConstructEngine_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc) {
317     return pKernelDisplay->__kdispConstructEngine__(pGpu, pKernelDisplay, engDesc);
318 }
319 
kdispStatePreInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)320 static inline NV_STATUS kdispStatePreInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
321     return pKernelDisplay->__kdispStatePreInitLocked__(pGpu, pKernelDisplay);
322 }
323 
kdispStateInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)324 static inline NV_STATUS kdispStateInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
325     return pKernelDisplay->__kdispStateInitLocked__(pGpu, pKernelDisplay);
326 }
327 
kdispStateDestroy_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)328 static inline void kdispStateDestroy_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
329     pKernelDisplay->__kdispStateDestroy__(pGpu, pKernelDisplay);
330 }
331 
kdispStateLoad_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 flags)332 static inline NV_STATUS kdispStateLoad_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
333     return pKernelDisplay->__kdispStateLoad__(pGpu, pKernelDisplay, flags);
334 }
335 
kdispStateUnload_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 flags)336 static inline NV_STATUS kdispStateUnload_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
337     return pKernelDisplay->__kdispStateUnload__(pGpu, pKernelDisplay, flags);
338 }
339 
kdispRegisterIntrService_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceRecord pRecords[175])340 static inline void kdispRegisterIntrService_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceRecord pRecords[175]) {
341     pKernelDisplay->__kdispRegisterIntrService__(pGpu, pKernelDisplay, pRecords);
342 }
343 
kdispServiceInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceServiceInterruptArguments * pParams)344 static inline NvU32 kdispServiceInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceServiceInterruptArguments *pParams) {
345     return pKernelDisplay->__kdispServiceInterrupt__(pGpu, pKernelDisplay, pParams);
346 }
347 
kdispSelectClass_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 swClass)348 static inline NV_STATUS kdispSelectClass_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
349     return pKernelDisplay->__kdispSelectClass__(pGpu, pKernelDisplay, swClass);
350 }
351 
kdispGetChannelNum_DISPATCH(struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pChannelNum)352 static inline NV_STATUS kdispGetChannelNum_DISPATCH(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
353     return pKernelDisplay->__kdispGetChannelNum__(pKernelDisplay, channelClass, channelInstance, pChannelNum);
354 }
355 
kdispGetDisplayCapsBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)356 static inline void kdispGetDisplayCapsBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
357     pKernelDisplay->__kdispGetDisplayCapsBaseAndSize__(pGpu, pKernelDisplay, pOffset, pSize);
358 }
359 
kdispGetDisplaySfUserBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)360 static inline void kdispGetDisplaySfUserBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
361     pKernelDisplay->__kdispGetDisplaySfUserBaseAndSize__(pGpu, pKernelDisplay, pOffset, pSize);
362 }
363 
kdispGetDisplayChannelUserBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pOffset,NvU32 * pSize)364 static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
365     return pKernelDisplay->__kdispGetDisplayChannelUserBaseAndSize__(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize);
366 }
367 
kdispGetVgaWorkspaceBase_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU64 * pOffset)368 static inline NvBool kdispGetVgaWorkspaceBase_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
369     return pKernelDisplay->__kdispGetVgaWorkspaceBase__(pGpu, pKernelDisplay, pOffset);
370 }
371 
kdispReadRgLineCountAndFrameCount_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pLineCount,NvU32 * pFrameCount)372 static inline NV_STATUS kdispReadRgLineCountAndFrameCount_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount) {
373     return pKernelDisplay->__kdispReadRgLineCountAndFrameCount__(pGpu, pKernelDisplay, head, pLineCount, pFrameCount);
374 }
375 
kdispRestoreOriginalLsrMinTime_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 origLsrMinTime)376 static inline void kdispRestoreOriginalLsrMinTime_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime) {
377     pKernelDisplay->__kdispRestoreOriginalLsrMinTime__(pGpu, pKernelDisplay, head, origLsrMinTime);
378 }
379 
kdispComputeLsrMinTimeValue_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 swapRdyHiLsrMinTime,NvU32 * pComputedLsrMinTime)380 static inline NV_STATUS kdispComputeLsrMinTimeValue_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime) {
381     return pKernelDisplay->__kdispComputeLsrMinTimeValue__(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime);
382 }
383 
kdispSetSwapBarrierLsrMinTime_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pOrigLsrMinTime,NvU32 newLsrMinTime)384 static inline void kdispSetSwapBarrierLsrMinTime_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime) {
385     pKernelDisplay->__kdispSetSwapBarrierLsrMinTime__(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime);
386 }
387 
kdispGetRgScanLock_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head0,OBJGPU * pPeerGpu,NvU32 head1,NvBool * pMasterScanLock,NvU32 * pMasterScanLockPin,NvBool * pSlaveScanLock,NvU32 * pSlaveScanLockPin)388 static inline NV_STATUS kdispGetRgScanLock_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin) {
389     return pKernelDisplay->__kdispGetRgScanLock__(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin);
390 }
391 
kdispDetectSliLink_DISPATCH(struct KernelDisplay * pKernelDisplay,OBJGPU * pParentGpu,OBJGPU * pChildGpu,NvU32 ParentDrPort,NvU32 ChildDrPort)392 static inline NV_STATUS kdispDetectSliLink_DISPATCH(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort) {
393     return pKernelDisplay->__kdispDetectSliLink__(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort);
394 }
395 
kdispInitRegistryOverrides_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)396 static inline void kdispInitRegistryOverrides_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
397     pKernelDisplay->__kdispInitRegistryOverrides__(pGpu, pKernelDisplay);
398 }
399 
kdispGetPBTargetAperture_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 memAddrSpace,NvU32 cacheSnoop)400 static inline NvU32 kdispGetPBTargetAperture_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop) {
401     return pKernelDisplay->__kdispGetPBTargetAperture__(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop);
402 }
403 
kdispInitMissing_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)404 static inline void kdispInitMissing_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
405     pEngstate->__kdispInitMissing__(pGpu, pEngstate);
406 }
407 
kdispStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)408 static inline NV_STATUS kdispStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
409     return pEngstate->__kdispStatePreInitUnlocked__(pGpu, pEngstate);
410 }
411 
kdispStateInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)412 static inline NV_STATUS kdispStateInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
413     return pEngstate->__kdispStateInitUnlocked__(pGpu, pEngstate);
414 }
415 
kdispStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)416 static inline NV_STATUS kdispStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
417     return pEngstate->__kdispStatePreLoad__(pGpu, pEngstate, arg3);
418 }
419 
kdispStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)420 static inline NV_STATUS kdispStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
421     return pEngstate->__kdispStatePostLoad__(pGpu, pEngstate, arg3);
422 }
423 
kdispStatePreUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)424 static inline NV_STATUS kdispStatePreUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
425     return pEngstate->__kdispStatePreUnload__(pGpu, pEngstate, arg3);
426 }
427 
kdispStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)428 static inline NV_STATUS kdispStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
429     return pEngstate->__kdispStatePostUnload__(pGpu, pEngstate, arg3);
430 }
431 
kdispIsPresent_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)432 static inline NvBool kdispIsPresent_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
433     return pEngstate->__kdispIsPresent__(pGpu, pEngstate);
434 }
435 
kdispClearInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pIntrService,IntrServiceClearInterruptArguments * pParams)436 static inline NvBool kdispClearInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pIntrService, IntrServiceClearInterruptArguments *pParams) {
437     return pIntrService->__kdispClearInterrupt__(pGpu, pIntrService, pParams);
438 }
439 
kdispServiceNotificationInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pIntrService,IntrServiceServiceNotificationInterruptArguments * pParams)440 static inline NV_STATUS kdispServiceNotificationInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) {
441     return pIntrService->__kdispServiceNotificationInterrupt__(pGpu, pIntrService, pParams);
442 }
443 
444 void kdispServiceVblank_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
445 
446 
447 #ifdef __nvoc_kern_disp_h_disabled
kdispServiceVblank(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)448 static inline void kdispServiceVblank(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
449     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
450 }
451 #else //__nvoc_kern_disp_h_disabled
452 #define kdispServiceVblank(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispServiceVblank_KERNEL(pGpu, pKernelDisplay, arg3, arg4, arg5)
453 #endif //__nvoc_kern_disp_h_disabled
454 
455 #define kdispServiceVblank_HAL(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispServiceVblank(pGpu, pKernelDisplay, arg3, arg4, arg5)
456 
457 NV_STATUS kdispConstructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
458 
459 
460 #ifdef __nvoc_kern_disp_h_disabled
kdispConstructInstMem(struct KernelDisplay * pKernelDisplay)461 static inline NV_STATUS kdispConstructInstMem(struct KernelDisplay *pKernelDisplay) {
462     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
463     return NV_ERR_NOT_SUPPORTED;
464 }
465 #else //__nvoc_kern_disp_h_disabled
466 #define kdispConstructInstMem(pKernelDisplay) kdispConstructInstMem_IMPL(pKernelDisplay)
467 #endif //__nvoc_kern_disp_h_disabled
468 
469 #define kdispConstructInstMem_HAL(pKernelDisplay) kdispConstructInstMem(pKernelDisplay)
470 
471 void kdispDestructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
472 
473 
474 #ifdef __nvoc_kern_disp_h_disabled
kdispDestructInstMem(struct KernelDisplay * pKernelDisplay)475 static inline void kdispDestructInstMem(struct KernelDisplay *pKernelDisplay) {
476     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
477 }
478 #else //__nvoc_kern_disp_h_disabled
479 #define kdispDestructInstMem(pKernelDisplay) kdispDestructInstMem_IMPL(pKernelDisplay)
480 #endif //__nvoc_kern_disp_h_disabled
481 
482 #define kdispDestructInstMem_HAL(pKernelDisplay) kdispDestructInstMem(pKernelDisplay)
483 
kdispGetBaseOffset_4a4dee(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)484 static inline NvS32 kdispGetBaseOffset_4a4dee(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
485     return 0;
486 }
487 
488 
489 #ifdef __nvoc_kern_disp_h_disabled
kdispGetBaseOffset(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)490 static inline NvS32 kdispGetBaseOffset(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
491     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
492     return 0;
493 }
494 #else //__nvoc_kern_disp_h_disabled
495 #define kdispGetBaseOffset(pGpu, pKernelDisplay) kdispGetBaseOffset_4a4dee(pGpu, pKernelDisplay)
496 #endif //__nvoc_kern_disp_h_disabled
497 
498 #define kdispGetBaseOffset_HAL(pGpu, pKernelDisplay) kdispGetBaseOffset(pGpu, pKernelDisplay)
499 
kdispImportImpData_56cd7a(struct KernelDisplay * pKernelDisplay)500 static inline NV_STATUS kdispImportImpData_56cd7a(struct KernelDisplay *pKernelDisplay) {
501     return NV_OK;
502 }
503 
504 
505 #ifdef __nvoc_kern_disp_h_disabled
kdispImportImpData(struct KernelDisplay * pKernelDisplay)506 static inline NV_STATUS kdispImportImpData(struct KernelDisplay *pKernelDisplay) {
507     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
508     return NV_ERR_NOT_SUPPORTED;
509 }
510 #else //__nvoc_kern_disp_h_disabled
511 #define kdispImportImpData(pKernelDisplay) kdispImportImpData_56cd7a(pKernelDisplay)
512 #endif //__nvoc_kern_disp_h_disabled
513 
514 #define kdispImportImpData_HAL(pKernelDisplay) kdispImportImpData(pKernelDisplay)
515 
kdispArbAndAllocDisplayBandwidth_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,enum DISPLAY_ICC_BW_CLIENT iccBwClient,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)516 static inline NV_STATUS kdispArbAndAllocDisplayBandwidth_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
517     return NV_ERR_NOT_SUPPORTED;
518 }
519 
520 
521 #ifdef __nvoc_kern_disp_h_disabled
kdispArbAndAllocDisplayBandwidth(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,enum DISPLAY_ICC_BW_CLIENT iccBwClient,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)522 static inline NV_STATUS kdispArbAndAllocDisplayBandwidth(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
523     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
524     return NV_ERR_NOT_SUPPORTED;
525 }
526 #else //__nvoc_kern_disp_h_disabled
527 #define kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth_46f6a7(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
528 #endif //__nvoc_kern_disp_h_disabled
529 
530 #define kdispArbAndAllocDisplayBandwidth_HAL(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
531 
532 NV_STATUS kdispSetPushBufferParamsToPhysical_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass);
533 
534 
535 #ifdef __nvoc_kern_disp_h_disabled
kdispSetPushBufferParamsToPhysical(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvHandle hObjectBuffer,struct ContextDma * pBufferContextDma,NvU32 hClass,NvU32 channelInstance,DISPCHNCLASS internalDispChnClass)536 static inline NV_STATUS kdispSetPushBufferParamsToPhysical(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass) {
537     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
538     return NV_ERR_NOT_SUPPORTED;
539 }
540 #else //__nvoc_kern_disp_h_disabled
541 #define kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass) kdispSetPushBufferParamsToPhysical_IMPL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass)
542 #endif //__nvoc_kern_disp_h_disabled
543 
544 #define kdispSetPushBufferParamsToPhysical_HAL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass) kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass)
545 
kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvU32 channelInstance,NvHandle hObjectBuffer,NvU32 initialGetPutOffset,NvBool allowGrabWithinSameClient,NvBool connectPbAtGrab)546 static inline NV_STATUS kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
547     return NV_OK;
548 }
549 
550 
551 #ifdef __nvoc_kern_disp_h_disabled
kdispAcquireDispChannelHw(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvU32 channelInstance,NvHandle hObjectBuffer,NvU32 initialGetPutOffset,NvBool allowGrabWithinSameClient,NvBool connectPbAtGrab)552 static inline NV_STATUS kdispAcquireDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
553     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
554     return NV_ERR_NOT_SUPPORTED;
555 }
556 #else //__nvoc_kern_disp_h_disabled
557 #define kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw_56cd7a(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
558 #endif //__nvoc_kern_disp_h_disabled
559 
560 #define kdispAcquireDispChannelHw_HAL(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
561 
kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)562 static inline NV_STATUS kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
563     return NV_OK;
564 }
565 
566 
567 #ifdef __nvoc_kern_disp_h_disabled
kdispReleaseDispChannelHw(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)568 static inline NV_STATUS kdispReleaseDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
569     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
570     return NV_ERR_NOT_SUPPORTED;
571 }
572 #else //__nvoc_kern_disp_h_disabled
573 #define kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw_56cd7a(pKernelDisplay, pDispChannel)
574 #endif //__nvoc_kern_disp_h_disabled
575 
576 #define kdispReleaseDispChannelHw_HAL(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel)
577 
578 NV_STATUS kdispMapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
579 
580 
581 #ifdef __nvoc_kern_disp_h_disabled
kdispMapDispChannel(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)582 static inline NV_STATUS kdispMapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
583     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
584     return NV_ERR_NOT_SUPPORTED;
585 }
586 #else //__nvoc_kern_disp_h_disabled
587 #define kdispMapDispChannel(pKernelDisplay, pDispChannel) kdispMapDispChannel_IMPL(pKernelDisplay, pDispChannel)
588 #endif //__nvoc_kern_disp_h_disabled
589 
590 #define kdispMapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispMapDispChannel(pKernelDisplay, pDispChannel)
591 
592 void kdispUnbindUnmapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
593 
594 
595 #ifdef __nvoc_kern_disp_h_disabled
kdispUnbindUnmapDispChannel(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)596 static inline void kdispUnbindUnmapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
597     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
598 }
599 #else //__nvoc_kern_disp_h_disabled
600 #define kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel_IMPL(pKernelDisplay, pDispChannel)
601 #endif //__nvoc_kern_disp_h_disabled
602 
603 #define kdispUnbindUnmapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel)
604 
605 NV_STATUS kdispRegisterRgLineCallback_IMPL(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable);
606 
607 
608 #ifdef __nvoc_kern_disp_h_disabled
kdispRegisterRgLineCallback(struct KernelDisplay * pKernelDisplay,struct RgLineCallback * pRgLineCallback,NvU32 head,NvU32 rgIntrLine,NvBool bEnable)609 static inline NV_STATUS kdispRegisterRgLineCallback(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable) {
610     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
611     return NV_ERR_NOT_SUPPORTED;
612 }
613 #else //__nvoc_kern_disp_h_disabled
614 #define kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback_IMPL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
615 #endif //__nvoc_kern_disp_h_disabled
616 
617 #define kdispRegisterRgLineCallback_HAL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
618 
619 void kdispInvokeRgLineCallback_KERNEL(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr);
620 
621 
622 #ifdef __nvoc_kern_disp_h_disabled
kdispInvokeRgLineCallback(struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 rgIntrLine,NvBool bIsIrqlIsr)623 static inline void kdispInvokeRgLineCallback(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr) {
624     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
625 }
626 #else //__nvoc_kern_disp_h_disabled
627 #define kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback_KERNEL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
628 #endif //__nvoc_kern_disp_h_disabled
629 
630 #define kdispInvokeRgLineCallback_HAL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
631 
632 NvU32 kdispReadPendingVblank_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct THREAD_STATE_NODE *arg3);
633 
634 
635 #ifdef __nvoc_kern_disp_h_disabled
kdispReadPendingVblank(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,struct THREAD_STATE_NODE * arg3)636 static inline NvU32 kdispReadPendingVblank(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct THREAD_STATE_NODE *arg3) {
637     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
638     return 0;
639 }
640 #else //__nvoc_kern_disp_h_disabled
641 #define kdispReadPendingVblank(pGpu, pKernelDisplay, arg3) kdispReadPendingVblank_IMPL(pGpu, pKernelDisplay, arg3)
642 #endif //__nvoc_kern_disp_h_disabled
643 
644 #define kdispReadPendingVblank_HAL(pGpu, pKernelDisplay, arg3) kdispReadPendingVblank(pGpu, pKernelDisplay, arg3)
645 
kdispInvokeDisplayModesetCallback_b3696a(struct KernelDisplay * pKernelDisplay,NvBool bModesetStart,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)646 static inline void kdispInvokeDisplayModesetCallback_b3696a(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
647     return;
648 }
649 
650 
651 #ifdef __nvoc_kern_disp_h_disabled
kdispInvokeDisplayModesetCallback(struct KernelDisplay * pKernelDisplay,NvBool bModesetStart,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)652 static inline void kdispInvokeDisplayModesetCallback(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
653     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
654 }
655 #else //__nvoc_kern_disp_h_disabled
656 #define kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback_b3696a(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
657 #endif //__nvoc_kern_disp_h_disabled
658 
659 #define kdispInvokeDisplayModesetCallback_HAL(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
660 
kdispDsmMxmMxcbExecuteAcpi_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,void * pInOutData,NvU16 * outDataSize)661 static inline NV_STATUS kdispDsmMxmMxcbExecuteAcpi_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, void *pInOutData, NvU16 *outDataSize) {
662     NV_ASSERT_PRECOMP(0);
663     return NV_ERR_NOT_SUPPORTED;
664 }
665 
666 
667 #ifdef __nvoc_kern_disp_h_disabled
kdispDsmMxmMxcbExecuteAcpi(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,void * pInOutData,NvU16 * outDataSize)668 static inline NV_STATUS kdispDsmMxmMxcbExecuteAcpi(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, void *pInOutData, NvU16 *outDataSize) {
669     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
670     return NV_ERR_NOT_SUPPORTED;
671 }
672 #else //__nvoc_kern_disp_h_disabled
673 #define kdispDsmMxmMxcbExecuteAcpi(pGpu, pKernelDisplay, pInOutData, outDataSize) kdispDsmMxmMxcbExecuteAcpi_92bfc3(pGpu, pKernelDisplay, pInOutData, outDataSize)
674 #endif //__nvoc_kern_disp_h_disabled
675 
676 #define kdispDsmMxmMxcbExecuteAcpi_HAL(pGpu, pKernelDisplay, pInOutData, outDataSize) kdispDsmMxmMxcbExecuteAcpi(pGpu, pKernelDisplay, pInOutData, outDataSize)
677 
678 NV_STATUS kdispInitBrightcStateLoad_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
679 
680 
681 #ifdef __nvoc_kern_disp_h_disabled
kdispInitBrightcStateLoad(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)682 static inline NV_STATUS kdispInitBrightcStateLoad(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
683     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
684     return NV_ERR_NOT_SUPPORTED;
685 }
686 #else //__nvoc_kern_disp_h_disabled
687 #define kdispInitBrightcStateLoad(pGpu, pKernelDisplay) kdispInitBrightcStateLoad_IMPL(pGpu, pKernelDisplay)
688 #endif //__nvoc_kern_disp_h_disabled
689 
690 #define kdispInitBrightcStateLoad_HAL(pGpu, pKernelDisplay) kdispInitBrightcStateLoad(pGpu, pKernelDisplay)
691 
692 NV_STATUS kdispSetupAcpiEdid_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
693 
694 
695 #ifdef __nvoc_kern_disp_h_disabled
kdispSetupAcpiEdid(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)696 static inline NV_STATUS kdispSetupAcpiEdid(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
697     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
698     return NV_ERR_NOT_SUPPORTED;
699 }
700 #else //__nvoc_kern_disp_h_disabled
701 #define kdispSetupAcpiEdid(pGpu, pKernelDisplay) kdispSetupAcpiEdid_IMPL(pGpu, pKernelDisplay)
702 #endif //__nvoc_kern_disp_h_disabled
703 
704 #define kdispSetupAcpiEdid_HAL(pGpu, pKernelDisplay) kdispSetupAcpiEdid(pGpu, pKernelDisplay)
705 
kdispReadPendingAwakenIntr_ceaee8(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg4)706 static inline NvBool kdispReadPendingAwakenIntr_ceaee8(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4) {
707     NV_ASSERT_PRECOMP(0);
708     return ((NvBool)(0 != 0));
709 }
710 
711 NvBool kdispReadPendingAwakenIntr_v03_00_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4);
712 
kdispReadPendingAwakenIntr_491d52(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg4)713 static inline NvBool kdispReadPendingAwakenIntr_491d52(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4) {
714     return ((NvBool)(0 != 0));
715 }
716 
717 
718 #ifdef __nvoc_kern_disp_h_disabled
kdispReadPendingAwakenIntr(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg4)719 static inline NvBool kdispReadPendingAwakenIntr(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4) {
720     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
721     return NV_FALSE;
722 }
723 #else //__nvoc_kern_disp_h_disabled
724 #define kdispReadPendingAwakenIntr(pGpu, pKernelDisplay, pCachedIntr, arg4) kdispReadPendingAwakenIntr_ceaee8(pGpu, pKernelDisplay, pCachedIntr, arg4)
725 #endif //__nvoc_kern_disp_h_disabled
726 
727 #define kdispReadPendingAwakenIntr_HAL(pGpu, pKernelDisplay, pCachedIntr, arg4) kdispReadPendingAwakenIntr(pGpu, pKernelDisplay, pCachedIntr, arg4)
728 
kdispReadAwakenChannelNumMask_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg3,DISPCHNCLASS arg4,struct THREAD_STATE_NODE * arg5)729 static inline NV_STATUS kdispReadAwakenChannelNumMask_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5) {
730     NV_ASSERT_PRECOMP(0);
731     return NV_ERR_NOT_SUPPORTED;
732 }
733 
734 NV_STATUS kdispReadAwakenChannelNumMask_v03_00_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5);
735 
kdispReadAwakenChannelNumMask_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg3,DISPCHNCLASS arg4,struct THREAD_STATE_NODE * arg5)736 static inline NV_STATUS kdispReadAwakenChannelNumMask_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5) {
737     return NV_ERR_NOT_SUPPORTED;
738 }
739 
740 
741 #ifdef __nvoc_kern_disp_h_disabled
kdispReadAwakenChannelNumMask(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg3,DISPCHNCLASS arg4,struct THREAD_STATE_NODE * arg5)742 static inline NV_STATUS kdispReadAwakenChannelNumMask(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5) {
743     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
744     return NV_ERR_NOT_SUPPORTED;
745 }
746 #else //__nvoc_kern_disp_h_disabled
747 #define kdispReadAwakenChannelNumMask(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispReadAwakenChannelNumMask_92bfc3(pGpu, pKernelDisplay, arg3, arg4, arg5)
748 #endif //__nvoc_kern_disp_h_disabled
749 
750 #define kdispReadAwakenChannelNumMask_HAL(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispReadAwakenChannelNumMask(pGpu, pKernelDisplay, arg3, arg4, arg5)
751 
752 NV_STATUS kdispAllocateCommonHandle_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
753 
754 
755 #ifdef __nvoc_kern_disp_h_disabled
kdispAllocateCommonHandle(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)756 static inline NV_STATUS kdispAllocateCommonHandle(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
757     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
758     return NV_ERR_NOT_SUPPORTED;
759 }
760 #else //__nvoc_kern_disp_h_disabled
761 #define kdispAllocateCommonHandle(pGpu, pKernelDisplay) kdispAllocateCommonHandle_IMPL(pGpu, pKernelDisplay)
762 #endif //__nvoc_kern_disp_h_disabled
763 
764 #define kdispAllocateCommonHandle_HAL(pGpu, pKernelDisplay) kdispAllocateCommonHandle(pGpu, pKernelDisplay)
765 
766 void kdispDestroyCommonHandle_IMPL(struct KernelDisplay *pKernelDisplay);
767 
768 
769 #ifdef __nvoc_kern_disp_h_disabled
kdispDestroyCommonHandle(struct KernelDisplay * pKernelDisplay)770 static inline void kdispDestroyCommonHandle(struct KernelDisplay *pKernelDisplay) {
771     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
772 }
773 #else //__nvoc_kern_disp_h_disabled
774 #define kdispDestroyCommonHandle(pKernelDisplay) kdispDestroyCommonHandle_IMPL(pKernelDisplay)
775 #endif //__nvoc_kern_disp_h_disabled
776 
777 #define kdispDestroyCommonHandle_HAL(pKernelDisplay) kdispDestroyCommonHandle(pKernelDisplay)
778 
779 NV_STATUS kdispAllocateSharedMem_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
780 
781 
782 #ifdef __nvoc_kern_disp_h_disabled
kdispAllocateSharedMem(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)783 static inline NV_STATUS kdispAllocateSharedMem(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
784     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
785     return NV_ERR_NOT_SUPPORTED;
786 }
787 #else //__nvoc_kern_disp_h_disabled
788 #define kdispAllocateSharedMem(pGpu, pKernelDisplay) kdispAllocateSharedMem_IMPL(pGpu, pKernelDisplay)
789 #endif //__nvoc_kern_disp_h_disabled
790 
791 #define kdispAllocateSharedMem_HAL(pGpu, pKernelDisplay) kdispAllocateSharedMem(pGpu, pKernelDisplay)
792 
793 void kdispFreeSharedMem_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
794 
795 
796 #ifdef __nvoc_kern_disp_h_disabled
kdispFreeSharedMem(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)797 static inline void kdispFreeSharedMem(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
798     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
799 }
800 #else //__nvoc_kern_disp_h_disabled
801 #define kdispFreeSharedMem(pGpu, pKernelDisplay) kdispFreeSharedMem_IMPL(pGpu, pKernelDisplay)
802 #endif //__nvoc_kern_disp_h_disabled
803 
804 #define kdispFreeSharedMem_HAL(pGpu, pKernelDisplay) kdispFreeSharedMem(pGpu, pKernelDisplay)
805 
806 NvBool kdispIsDisplayConnected_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
807 
808 
809 #ifdef __nvoc_kern_disp_h_disabled
kdispIsDisplayConnected(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)810 static inline NvBool kdispIsDisplayConnected(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
811     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
812     return NV_FALSE;
813 }
814 #else //__nvoc_kern_disp_h_disabled
815 #define kdispIsDisplayConnected(pGpu, pKernelDisplay) kdispIsDisplayConnected_IMPL(pGpu, pKernelDisplay)
816 #endif //__nvoc_kern_disp_h_disabled
817 
818 #define kdispIsDisplayConnected_HAL(pGpu, pKernelDisplay) kdispIsDisplayConnected(pGpu, pKernelDisplay)
819 
820 NvU32 kdispGetSupportedDisplayMask_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
821 
822 
823 #ifdef __nvoc_kern_disp_h_disabled
kdispGetSupportedDisplayMask(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)824 static inline NvU32 kdispGetSupportedDisplayMask(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
825     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
826     return 0;
827 }
828 #else //__nvoc_kern_disp_h_disabled
829 #define kdispGetSupportedDisplayMask(pGpu, pKernelDisplay) kdispGetSupportedDisplayMask_IMPL(pGpu, pKernelDisplay)
830 #endif //__nvoc_kern_disp_h_disabled
831 
832 #define kdispGetSupportedDisplayMask_HAL(pGpu, pKernelDisplay) kdispGetSupportedDisplayMask(pGpu, pKernelDisplay)
833 
kdispUpdatePdbAfterIpHalInit_b3696a(struct KernelDisplay * pKernelDisplay)834 static inline void kdispUpdatePdbAfterIpHalInit_b3696a(struct KernelDisplay *pKernelDisplay) {
835     return;
836 }
837 
838 
839 #ifdef __nvoc_kern_disp_h_disabled
kdispUpdatePdbAfterIpHalInit(struct KernelDisplay * pKernelDisplay)840 static inline void kdispUpdatePdbAfterIpHalInit(struct KernelDisplay *pKernelDisplay) {
841     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
842 }
843 #else //__nvoc_kern_disp_h_disabled
844 #define kdispUpdatePdbAfterIpHalInit(pKernelDisplay) kdispUpdatePdbAfterIpHalInit_b3696a(pKernelDisplay)
845 #endif //__nvoc_kern_disp_h_disabled
846 
847 #define kdispUpdatePdbAfterIpHalInit_HAL(pKernelDisplay) kdispUpdatePdbAfterIpHalInit(pKernelDisplay)
848 
849 NV_STATUS kdispConstructEngine_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc);
850 
851 NV_STATUS kdispStatePreInitLocked_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
852 
853 NV_STATUS kdispStateInitLocked_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
854 
855 void kdispStateDestroy_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
856 
857 NV_STATUS kdispStateLoad_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
858 
859 NV_STATUS kdispStateUnload_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
860 
861 void kdispRegisterIntrService_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceRecord pRecords[175]);
862 
kdispServiceInterrupt_acff5e(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceServiceInterruptArguments * pParams)863 static inline NvU32 kdispServiceInterrupt_acff5e(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceServiceInterruptArguments *pParams) {
864     kdispServiceVblank(pGpu, pKernelDisplay, 0, (4), ((void *)0));
865     return NV_OK;
866 }
867 
kdispSelectClass_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 swClass)868 static inline NV_STATUS kdispSelectClass_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
869     return NV_ERR_NOT_SUPPORTED;
870 }
871 
872 NV_STATUS kdispSelectClass_v03_00_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass);
873 
kdispGetChannelNum_46f6a7(struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pChannelNum)874 static inline NV_STATUS kdispGetChannelNum_46f6a7(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
875     return NV_ERR_NOT_SUPPORTED;
876 }
877 
878 NV_STATUS kdispGetChannelNum_v03_00(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum);
879 
kdispGetDisplayCapsBaseAndSize_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)880 static inline void kdispGetDisplayCapsBaseAndSize_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
881     return;
882 }
883 
884 void kdispGetDisplayCapsBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
885 
kdispGetDisplaySfUserBaseAndSize_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)886 static inline void kdispGetDisplaySfUserBaseAndSize_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
887     return;
888 }
889 
890 void kdispGetDisplaySfUserBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
891 
kdispGetDisplayChannelUserBaseAndSize_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pOffset,NvU32 * pSize)892 static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
893     return NV_ERR_NOT_SUPPORTED;
894 }
895 
896 NV_STATUS kdispGetDisplayChannelUserBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize);
897 
898 NvBool kdispGetVgaWorkspaceBase_v04_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset);
899 
kdispGetVgaWorkspaceBase_491d52(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU64 * pOffset)900 static inline NvBool kdispGetVgaWorkspaceBase_491d52(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
901     return ((NvBool)(0 != 0));
902 }
903 
904 NV_STATUS kdispReadRgLineCountAndFrameCount_v03_00_PHYSICAL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount);
905 
kdispReadRgLineCountAndFrameCount_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pLineCount,NvU32 * pFrameCount)906 static inline NV_STATUS kdispReadRgLineCountAndFrameCount_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount) {
907     return NV_ERR_NOT_SUPPORTED;
908 }
909 
910 NV_STATUS kdispReadRgLineCountAndFrameCount_v03_00_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount);
911 
912 void kdispRestoreOriginalLsrMinTime_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime);
913 
kdispRestoreOriginalLsrMinTime_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 origLsrMinTime)914 static inline void kdispRestoreOriginalLsrMinTime_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime) {
915     return;
916 }
917 
918 NV_STATUS kdispComputeLsrMinTimeValue_v02_07(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime);
919 
kdispComputeLsrMinTimeValue_56cd7a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 swapRdyHiLsrMinTime,NvU32 * pComputedLsrMinTime)920 static inline NV_STATUS kdispComputeLsrMinTimeValue_56cd7a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime) {
921     return NV_OK;
922 }
923 
924 void kdispSetSwapBarrierLsrMinTime_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime);
925 
kdispSetSwapBarrierLsrMinTime_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pOrigLsrMinTime,NvU32 newLsrMinTime)926 static inline void kdispSetSwapBarrierLsrMinTime_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime) {
927     return;
928 }
929 
930 NV_STATUS kdispGetRgScanLock_v02_01(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin);
931 
kdispGetRgScanLock_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head0,OBJGPU * pPeerGpu,NvU32 head1,NvBool * pMasterScanLock,NvU32 * pMasterScanLockPin,NvBool * pSlaveScanLock,NvU32 * pSlaveScanLockPin)932 static inline NV_STATUS kdispGetRgScanLock_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin) {
933     NV_ASSERT_PRECOMP(0);
934     return NV_ERR_NOT_SUPPORTED;
935 }
936 
937 NV_STATUS kdispDetectSliLink_v04_00(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort);
938 
kdispDetectSliLink_92bfc3(struct KernelDisplay * pKernelDisplay,OBJGPU * pParentGpu,OBJGPU * pChildGpu,NvU32 ParentDrPort,NvU32 ChildDrPort)939 static inline NV_STATUS kdispDetectSliLink_92bfc3(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort) {
940     NV_ASSERT_PRECOMP(0);
941     return NV_ERR_NOT_SUPPORTED;
942 }
943 
kdispInitRegistryOverrides_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)944 static inline void kdispInitRegistryOverrides_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
945     return;
946 }
947 
948 void kdispInitRegistryOverrides_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
949 
950 NvU32 kdispGetPBTargetAperture_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop);
951 
kdispGetPBTargetAperture_15a734(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 memAddrSpace,NvU32 cacheSnoop)952 static inline NvU32 kdispGetPBTargetAperture_15a734(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop) {
953     return 0U;
954 }
955 
956 void kdispDestruct_IMPL(struct KernelDisplay *pKernelDisplay);
957 
958 #define __nvoc_kdispDestruct(pKernelDisplay) kdispDestruct_IMPL(pKernelDisplay)
959 NV_STATUS kdispConstructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
960 
961 #ifdef __nvoc_kern_disp_h_disabled
kdispConstructKhead(struct KernelDisplay * pKernelDisplay)962 static inline NV_STATUS kdispConstructKhead(struct KernelDisplay *pKernelDisplay) {
963     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
964     return NV_ERR_NOT_SUPPORTED;
965 }
966 #else //__nvoc_kern_disp_h_disabled
967 #define kdispConstructKhead(pKernelDisplay) kdispConstructKhead_IMPL(pKernelDisplay)
968 #endif //__nvoc_kern_disp_h_disabled
969 
970 void kdispDestructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
971 
972 #ifdef __nvoc_kern_disp_h_disabled
kdispDestructKhead(struct KernelDisplay * pKernelDisplay)973 static inline void kdispDestructKhead(struct KernelDisplay *pKernelDisplay) {
974     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
975 }
976 #else //__nvoc_kern_disp_h_disabled
977 #define kdispDestructKhead(pKernelDisplay) kdispDestructKhead_IMPL(pKernelDisplay)
978 #endif //__nvoc_kern_disp_h_disabled
979 
980 NV_STATUS kdispGetIntChnClsForHwCls_IMPL(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass);
981 
982 #ifdef __nvoc_kern_disp_h_disabled
kdispGetIntChnClsForHwCls(struct KernelDisplay * pKernelDisplay,NvU32 hwClass,DISPCHNCLASS * pDispChnClass)983 static inline NV_STATUS kdispGetIntChnClsForHwCls(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass) {
984     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
985     return NV_ERR_NOT_SUPPORTED;
986 }
987 #else //__nvoc_kern_disp_h_disabled
988 #define kdispGetIntChnClsForHwCls(pKernelDisplay, hwClass, pDispChnClass) kdispGetIntChnClsForHwCls_IMPL(pKernelDisplay, hwClass, pDispChnClass)
989 #endif //__nvoc_kern_disp_h_disabled
990 
991 void kdispNotifyCommonEvent_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams);
992 
993 #ifdef __nvoc_kern_disp_h_disabled
kdispNotifyCommonEvent(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 notifyIndex,void * pNotifyParams)994 static inline void kdispNotifyCommonEvent(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams) {
995     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
996 }
997 #else //__nvoc_kern_disp_h_disabled
998 #define kdispNotifyCommonEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams) kdispNotifyCommonEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams)
999 #endif //__nvoc_kern_disp_h_disabled
1000 
1001 void kdispNotifyEvent_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16);
1002 
1003 #ifdef __nvoc_kern_disp_h_disabled
kdispNotifyEvent(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 notifyIndex,void * pNotifyParams,NvU32 notifyParamsSize,NvV32 info32,NvV16 info16)1004 static inline void kdispNotifyEvent(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16) {
1005     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
1006 }
1007 #else //__nvoc_kern_disp_h_disabled
1008 #define kdispNotifyEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16) kdispNotifyEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16)
1009 #endif //__nvoc_kern_disp_h_disabled
1010 
1011 void kdispSetWarPurgeSatellitesOnCoreFree_IMPL(struct KernelDisplay *pKernelDisplay, NvBool value);
1012 
1013 #ifdef __nvoc_kern_disp_h_disabled
kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay * pKernelDisplay,NvBool value)1014 static inline void kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay *pKernelDisplay, NvBool value) {
1015     NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
1016 }
1017 #else //__nvoc_kern_disp_h_disabled
1018 #define kdispSetWarPurgeSatellitesOnCoreFree(pKernelDisplay, value) kdispSetWarPurgeSatellitesOnCoreFree_IMPL(pKernelDisplay, value)
1019 #endif //__nvoc_kern_disp_h_disabled
1020 
1021 #undef PRIVATE_FIELD
1022 
1023 
1024 void
1025 dispdeviceFillVgaSavedDisplayState( OBJGPU *pGpu,
1026     NvU64   vgaAddr,
1027     NvU8    vgaMemType,
1028     NvBool  vgaValid,
1029     NvU64   workspaceAddr,
1030     NvU8    workspaceMemType,
1031     NvBool  workspaceValid,
1032     NvBool  baseValid,
1033     NvBool  workspaceBaseValid
1034 );
1035 
1036 /*! PushBuffer Target Aperture Types */
1037 typedef enum
1038 {
1039     IOVA,
1040     PHYS_NVM,
1041     PHYS_PCI,
1042     PHYS_PCI_COHERENT
1043 } PBTARGETAPERTURE;
1044 
1045 static NV_INLINE struct KernelHead*
kdispGetHead(struct KernelDisplay * pKernelDisplay,NvU32 head)1046 kdispGetHead
1047 (
1048     struct KernelDisplay *pKernelDisplay,
1049     NvU32 head
1050 )
1051 {
1052     if (head >= OBJ_MAX_HEADS)
1053     {
1054         return NULL;
1055     }
1056 
1057     return pKernelDisplay->pKernelHead[head];
1058 }
1059 
1060 static NV_INLINE NvU32
kdispGetNumHeads(struct KernelDisplay * pKernelDisplay)1061 kdispGetNumHeads(struct KernelDisplay *pKernelDisplay)
1062 {
1063     NV_ASSERT(pKernelDisplay != NULL);
1064     return pKernelDisplay->numHeads;
1065 }
1066 
1067 static NV_INLINE NvU32
kdispGetDeferredVblankHeadMask(struct KernelDisplay * pKernelDisplay)1068 kdispGetDeferredVblankHeadMask(struct KernelDisplay *pKernelDisplay)
1069 {
1070     return pKernelDisplay->deferredVblankHeadMask;
1071 }
1072 
1073 static NV_INLINE void
kdispSetDeferredVblankHeadMask(struct KernelDisplay * pKernelDisplay,NvU32 vblankHeadMask)1074 kdispSetDeferredVblankHeadMask(struct KernelDisplay *pKernelDisplay, NvU32 vblankHeadMask)
1075 {
1076     pKernelDisplay->deferredVblankHeadMask = vblankHeadMask;
1077 }
1078 
1079 static NV_INLINE NvHandle
kdispGetInternalClientHandle(struct KernelDisplay * pKernelDisplay)1080 kdispGetInternalClientHandle(struct KernelDisplay *pKernelDisplay)
1081 {
1082     return pKernelDisplay->hInternalClient;
1083 }
1084 
1085 static NV_INLINE NvHandle
kdispGetDispCommonHandle(struct KernelDisplay * pKernelDisplay)1086 kdispGetDispCommonHandle(struct KernelDisplay *pKernelDisplay)
1087 {
1088     return pKernelDisplay->hDispCommonHandle;
1089 }
1090 
1091 #endif // KERN_DISP_H
1092 
1093 #ifdef __cplusplus
1094 } // extern "C"
1095 #endif
1096 
1097 #endif // _G_KERN_DISP_NVOC_H_
1098