1
2 #ifndef _G_KERN_DISP_NVOC_H_
3 #define _G_KERN_DISP_NVOC_H_
4 #include "nvoc/runtime.h"
5
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 1
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /*
17 * SPDX-FileCopyrightText: Copyright (c) 2020-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18 * SPDX-License-Identifier: MIT
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining a
21 * copy of this software and associated documentation files (the "Software"),
22 * to deal in the Software without restriction, including without limitation
23 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24 * and/or sell copies of the Software, and to permit persons to whom the
25 * Software is furnished to do so, subject to the following conditions:
26 *
27 * The above copyright notice and this permission notice shall be included in
28 * all copies or substantial portions of the Software.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36 * DEALINGS IN THE SOFTWARE.
37 */
38
39 #pragma once
40 #include "g_kern_disp_nvoc.h"
41
42 #ifndef KERN_DISP_H
43 #define KERN_DISP_H
44
45 /******************************************************************************
46 *
47 * Kernel Display module header
48 * This file contains functions managing display on CPU RM
49 *
50 ******************************************************************************/
51
52 #include "gpu/eng_state.h"
53 #include "gpu/gpu_halspec.h"
54 #include "gpu/disp/kern_disp_type.h"
55 #include "gpu/disp/kern_disp_max.h"
56 #include "gpu/mem_mgr/context_dma.h"
57 #include "gpu/disp/vblank_callback/vblank.h"
58
59 #include "kernel/gpu/intr/intr_service.h"
60
61 #include "ctrl/ctrl2080/ctrl2080internal.h"
62
63 typedef NV2080_CTRL_INTERNAL_DISPLAY_GET_STATIC_INFO_PARAMS KernelDisplayStaticInfo;
64
65 typedef struct
66 {
67 NvU32 kHeadVblankCount[OBJ_MAX_HEADS];
68 } KernelDisplaySharedMem;
69
70
71 struct DispChannel;
72
73 #ifndef __NVOC_CLASS_DispChannel_TYPEDEF__
74 #define __NVOC_CLASS_DispChannel_TYPEDEF__
75 typedef struct DispChannel DispChannel;
76 #endif /* __NVOC_CLASS_DispChannel_TYPEDEF__ */
77
78 #ifndef __nvoc_class_id_DispChannel
79 #define __nvoc_class_id_DispChannel 0xbd2ff3
80 #endif /* __nvoc_class_id_DispChannel */
81
82
83
84 struct RgLineCallback;
85
86 #ifndef __NVOC_CLASS_RgLineCallback_TYPEDEF__
87 #define __NVOC_CLASS_RgLineCallback_TYPEDEF__
88 typedef struct RgLineCallback RgLineCallback;
89 #endif /* __NVOC_CLASS_RgLineCallback_TYPEDEF__ */
90
91 #ifndef __nvoc_class_id_RgLineCallback
92 #define __nvoc_class_id_RgLineCallback 0xa3ff1c
93 #endif /* __nvoc_class_id_RgLineCallback */
94
95
96
97 #define KDISP_GET_HEAD(pKernelDisplay, headID) (RMCFG_MODULE_KERNEL_HEAD ? kdispGetHead(pKernelDisplay, headID) : NULL)
98
99 /*!
100 * KernelDisp is a logical abstraction of the GPU Display Engine. The
101 * Public API of the Display Engine is exposed through this object, and any
102 * interfaces which do not manage the underlying Display hardware can be
103 * managed by this object.
104 */
105
106 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
107 // the matching C source file, but causes diagnostics to be issued if another
108 // source file references the field.
109 #ifdef NVOC_KERN_DISP_H_PRIVATE_ACCESS_ALLOWED
110 #define PRIVATE_FIELD(x) x
111 #else
112 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
113 #endif
114
115
116 // Metadata including vtable
117 struct NVOC_VTABLE__KernelDisplay;
118
119
120 struct KernelDisplay {
121
122 // Metadata
123 const struct NVOC_RTTI *__nvoc_rtti;
124 const struct NVOC_VTABLE__KernelDisplay *__nvoc_vtable;
125
126 // Parent (i.e. superclass or base class) object pointers
127 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
128 struct IntrService __nvoc_base_IntrService;
129
130 // Ancestor object pointers for `staticCast` feature
131 struct Object *__nvoc_pbase_Object; // obj super^2
132 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; // engstate super
133 struct IntrService *__nvoc_pbase_IntrService; // intrserv super
134 struct KernelDisplay *__nvoc_pbase_KernelDisplay; // kdisp
135
136 // Vtable with 13 per-object function pointers
137 NV_STATUS (*__kdispSelectClass__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // halified (2 hals) body
138 NV_STATUS (*__kdispGetChannelNum__)(struct KernelDisplay * /*this*/, DISPCHNCLASS, NvU32, NvU32 *); // halified (2 hals) body
139 void (*__kdispGetDisplayCapsBaseAndSize__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32 *, NvU32 *); // halified (2 hals) body
140 void (*__kdispGetDisplaySfUserBaseAndSize__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32 *, NvU32 *); // halified (2 hals) body
141 NV_STATUS (*__kdispGetDisplayChannelUserBaseAndSize__)(OBJGPU *, struct KernelDisplay * /*this*/, DISPCHNCLASS, NvU32, NvU32 *, NvU32 *); // halified (2 hals) body
142 NvBool (*__kdispGetVgaWorkspaceBase__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU64 *); // halified (2 hals) body
143 NV_STATUS (*__kdispReadRgLineCountAndFrameCount__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32 *, NvU32 *); // halified (2 hals) body
144 void (*__kdispRestoreOriginalLsrMinTime__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32); // halified (2 hals) body
145 NV_STATUS (*__kdispComputeLsrMinTimeValue__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32, NvU32 *); // halified (2 hals) body
146 void (*__kdispSetSwapBarrierLsrMinTime__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32 *, NvU32); // halified (2 hals) body
147 NV_STATUS (*__kdispGetRgScanLock__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, OBJGPU *, NvU32, NvBool *, NvU32 *, NvBool *, NvU32 *); // halified (2 hals) body
148 NV_STATUS (*__kdispDetectSliLink__)(struct KernelDisplay * /*this*/, OBJGPU *, OBJGPU *, NvU32, NvU32); // halified (2 hals) body
149 NvU32 (*__kdispGetPBTargetAperture__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32, NvU32); // halified (2 hals) body
150
151 // 5 PDB properties
152 NvBool PDB_PROP_KDISP_IMP_ENABLE;
153 NvBool PDB_PROP_KDISP_IMP_ALLOC_BW_IN_KERNEL_RM_DEF;
154 NvBool PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE;
155 NvBool PDB_PROP_KDISP_IN_AWAKEN_INTR;
156
157 // Data members
158 struct DisplayInstanceMemory *pInst;
159 struct KernelHead *pKernelHead[8];
160 const KernelDisplayStaticInfo *pStaticInfo;
161 NvBool bWarPurgeSatellitesOnCoreFree;
162 struct RgLineCallback *rgLineCallbackPerHead[8][2];
163 NvU32 isrVblankHeads;
164 NvBool bExtdevIntrSupported;
165 NvU32 numHeads;
166 NvU32 deferredVblankHeadMask;
167 NvHandle hInternalClient;
168 NvHandle hInternalDevice;
169 NvHandle hInternalSubdevice;
170 NvHandle hDispCommonHandle;
171 MEMORY_DESCRIPTOR *pSharedMemDesc;
172 KernelDisplaySharedMem *pSharedData;
173 NvBool bFeatureStretchVblankCapable;
174 PORT_SPINLOCK *pVblankSpinLock;
175 };
176
177
178 // Metadata including vtable with 18 function pointers plus superclass metadata
179 struct NVOC_VTABLE__KernelDisplay {
180 const struct NVOC_VTABLE__OBJENGSTATE OBJENGSTATE; // (engstate) 14 function pointers
181 const struct NVOC_VTABLE__IntrService IntrService; // (intrserv) 4 function pointers
182
183 NV_STATUS (*__kdispConstructEngine__)(OBJGPU *, struct KernelDisplay * /*this*/, ENGDESCRIPTOR); // virtual override (engstate) base (engstate)
184 NV_STATUS (*__kdispStatePreInitLocked__)(OBJGPU *, struct KernelDisplay * /*this*/); // virtual override (engstate) base (engstate)
185 NV_STATUS (*__kdispStateInitLocked__)(OBJGPU *, struct KernelDisplay * /*this*/); // virtual override (engstate) base (engstate)
186 void (*__kdispStateDestroy__)(OBJGPU *, struct KernelDisplay * /*this*/); // virtual override (engstate) base (engstate)
187 NV_STATUS (*__kdispStateLoad__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // virtual override (engstate) base (engstate)
188 NV_STATUS (*__kdispStateUnload__)(OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // virtual override (engstate) base (engstate)
189 void (*__kdispRegisterIntrService__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceRecord *); // virtual override (intrserv) base (intrserv)
190 NvU32 (*__kdispServiceInterrupt__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceServiceInterruptArguments *); // virtual halified (singleton optimized) override (intrserv) base (intrserv) body
191 void (*__kdispInitMissing__)(struct OBJGPU *, struct KernelDisplay * /*this*/); // virtual inherited (engstate) base (engstate)
192 NV_STATUS (*__kdispStatePreInitUnlocked__)(struct OBJGPU *, struct KernelDisplay * /*this*/); // virtual inherited (engstate) base (engstate)
193 NV_STATUS (*__kdispStateInitUnlocked__)(struct OBJGPU *, struct KernelDisplay * /*this*/); // virtual inherited (engstate) base (engstate)
194 NV_STATUS (*__kdispStatePreLoad__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
195 NV_STATUS (*__kdispStatePostLoad__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
196 NV_STATUS (*__kdispStatePreUnload__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
197 NV_STATUS (*__kdispStatePostUnload__)(struct OBJGPU *, struct KernelDisplay * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
198 NvBool (*__kdispIsPresent__)(struct OBJGPU *, struct KernelDisplay * /*this*/); // virtual inherited (engstate) base (engstate)
199 NvBool (*__kdispClearInterrupt__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceClearInterruptArguments *); // virtual inherited (intrserv) base (intrserv)
200 NV_STATUS (*__kdispServiceNotificationInterrupt__)(OBJGPU *, struct KernelDisplay * /*this*/, IntrServiceServiceNotificationInterruptArguments *); // virtual inherited (intrserv) base (intrserv)
201 };
202
203 #ifndef __NVOC_CLASS_KernelDisplay_TYPEDEF__
204 #define __NVOC_CLASS_KernelDisplay_TYPEDEF__
205 typedef struct KernelDisplay KernelDisplay;
206 #endif /* __NVOC_CLASS_KernelDisplay_TYPEDEF__ */
207
208 #ifndef __nvoc_class_id_KernelDisplay
209 #define __nvoc_class_id_KernelDisplay 0x55952e
210 #endif /* __nvoc_class_id_KernelDisplay */
211
212 // Casting support
213 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelDisplay;
214
215 #define __staticCast_KernelDisplay(pThis) \
216 ((pThis)->__nvoc_pbase_KernelDisplay)
217
218 #ifdef __nvoc_kern_disp_h_disabled
219 #define __dynamicCast_KernelDisplay(pThis) ((KernelDisplay*)NULL)
220 #else //__nvoc_kern_disp_h_disabled
221 #define __dynamicCast_KernelDisplay(pThis) \
222 ((KernelDisplay*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelDisplay)))
223 #endif //__nvoc_kern_disp_h_disabled
224
225 // Property macros
226 #define PDB_PROP_KDISP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
227 #define PDB_PROP_KDISP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
228 #define PDB_PROP_KDISP_IN_AWAKEN_INTR_BASE_CAST
229 #define PDB_PROP_KDISP_IN_AWAKEN_INTR_BASE_NAME PDB_PROP_KDISP_IN_AWAKEN_INTR
230 #define PDB_PROP_KDISP_IMP_ALLOC_BW_IN_KERNEL_RM_DEF_BASE_CAST
231 #define PDB_PROP_KDISP_IMP_ALLOC_BW_IN_KERNEL_RM_DEF_BASE_NAME PDB_PROP_KDISP_IMP_ALLOC_BW_IN_KERNEL_RM_DEF
232 #define PDB_PROP_KDISP_IMP_ENABLE_BASE_CAST
233 #define PDB_PROP_KDISP_IMP_ENABLE_BASE_NAME PDB_PROP_KDISP_IMP_ENABLE
234 #define PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE_BASE_CAST
235 #define PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE_BASE_NAME PDB_PROP_KDISP_FEATURE_STRETCH_VBLANK_CAPABLE
236
237 NV_STATUS __nvoc_objCreateDynamic_KernelDisplay(KernelDisplay**, Dynamic*, NvU32, va_list);
238
239 NV_STATUS __nvoc_objCreate_KernelDisplay(KernelDisplay**, Dynamic*, NvU32);
240 #define __objCreate_KernelDisplay(ppNewObj, pParent, createFlags) \
241 __nvoc_objCreate_KernelDisplay((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
242
243
244 // Wrapper macros
245 #define kdispConstructEngine_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispConstructEngine__
246 #define kdispConstructEngine(pGpu, pKernelDisplay, engDesc) kdispConstructEngine_DISPATCH(pGpu, pKernelDisplay, engDesc)
247 #define kdispStatePreInitLocked_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispStatePreInitLocked__
248 #define kdispStatePreInitLocked(pGpu, pKernelDisplay) kdispStatePreInitLocked_DISPATCH(pGpu, pKernelDisplay)
249 #define kdispStateInitLocked_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispStateInitLocked__
250 #define kdispStateInitLocked(pGpu, pKernelDisplay) kdispStateInitLocked_DISPATCH(pGpu, pKernelDisplay)
251 #define kdispStateDestroy_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispStateDestroy__
252 #define kdispStateDestroy(pGpu, pKernelDisplay) kdispStateDestroy_DISPATCH(pGpu, pKernelDisplay)
253 #define kdispStateLoad_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispStateLoad__
254 #define kdispStateLoad(pGpu, pKernelDisplay, flags) kdispStateLoad_DISPATCH(pGpu, pKernelDisplay, flags)
255 #define kdispStateUnload_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispStateUnload__
256 #define kdispStateUnload(pGpu, pKernelDisplay, flags) kdispStateUnload_DISPATCH(pGpu, pKernelDisplay, flags)
257 #define kdispRegisterIntrService_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispRegisterIntrService__
258 #define kdispRegisterIntrService(pGpu, pKernelDisplay, pRecords) kdispRegisterIntrService_DISPATCH(pGpu, pKernelDisplay, pRecords)
259 #define kdispServiceInterrupt_FNPTR(pKernelDisplay) pKernelDisplay->__nvoc_vtable->__kdispServiceInterrupt__
260 #define kdispServiceInterrupt(pGpu, pKernelDisplay, pParams) kdispServiceInterrupt_DISPATCH(pGpu, pKernelDisplay, pParams)
261 #define kdispServiceInterrupt_HAL(pGpu, pKernelDisplay, pParams) kdispServiceInterrupt_DISPATCH(pGpu, pKernelDisplay, pParams)
262 #define kdispSelectClass_FNPTR(pKernelDisplay) pKernelDisplay->__kdispSelectClass__
263 #define kdispSelectClass(pGpu, pKernelDisplay, swClass) kdispSelectClass_DISPATCH(pGpu, pKernelDisplay, swClass)
264 #define kdispSelectClass_HAL(pGpu, pKernelDisplay, swClass) kdispSelectClass_DISPATCH(pGpu, pKernelDisplay, swClass)
265 #define kdispGetChannelNum_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetChannelNum__
266 #define kdispGetChannelNum(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_DISPATCH(pKernelDisplay, channelClass, channelInstance, pChannelNum)
267 #define kdispGetChannelNum_HAL(pKernelDisplay, channelClass, channelInstance, pChannelNum) kdispGetChannelNum_DISPATCH(pKernelDisplay, channelClass, channelInstance, pChannelNum)
268 #define kdispGetDisplayCapsBaseAndSize_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetDisplayCapsBaseAndSize__
269 #define kdispGetDisplayCapsBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
270 #define kdispGetDisplayCapsBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplayCapsBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
271 #define kdispGetDisplaySfUserBaseAndSize_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetDisplaySfUserBaseAndSize__
272 #define kdispGetDisplaySfUserBaseAndSize(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
273 #define kdispGetDisplaySfUserBaseAndSize_HAL(pGpu, pKernelDisplay, pOffset, pSize) kdispGetDisplaySfUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, pOffset, pSize)
274 #define kdispGetDisplayChannelUserBaseAndSize_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetDisplayChannelUserBaseAndSize__
275 #define kdispGetDisplayChannelUserBaseAndSize(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
276 #define kdispGetDisplayChannelUserBaseAndSize_HAL(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize) kdispGetDisplayChannelUserBaseAndSize_DISPATCH(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize)
277 #define kdispGetVgaWorkspaceBase_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetVgaWorkspaceBase__
278 #define kdispGetVgaWorkspaceBase(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_DISPATCH(pGpu, pKernelDisplay, pOffset)
279 #define kdispGetVgaWorkspaceBase_HAL(pGpu, pKernelDisplay, pOffset) kdispGetVgaWorkspaceBase_DISPATCH(pGpu, pKernelDisplay, pOffset)
280 #define kdispReadRgLineCountAndFrameCount_FNPTR(pKernelDisplay) pKernelDisplay->__kdispReadRgLineCountAndFrameCount__
281 #define kdispReadRgLineCountAndFrameCount(pGpu, pKernelDisplay, head, pLineCount, pFrameCount) kdispReadRgLineCountAndFrameCount_DISPATCH(pGpu, pKernelDisplay, head, pLineCount, pFrameCount)
282 #define kdispReadRgLineCountAndFrameCount_HAL(pGpu, pKernelDisplay, head, pLineCount, pFrameCount) kdispReadRgLineCountAndFrameCount_DISPATCH(pGpu, pKernelDisplay, head, pLineCount, pFrameCount)
283 #define kdispRestoreOriginalLsrMinTime_FNPTR(pKernelDisplay) pKernelDisplay->__kdispRestoreOriginalLsrMinTime__
284 #define kdispRestoreOriginalLsrMinTime(pGpu, pKernelDisplay, head, origLsrMinTime) kdispRestoreOriginalLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, origLsrMinTime)
285 #define kdispRestoreOriginalLsrMinTime_HAL(pGpu, pKernelDisplay, head, origLsrMinTime) kdispRestoreOriginalLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, origLsrMinTime)
286 #define kdispComputeLsrMinTimeValue_FNPTR(pKernelDisplay) pKernelDisplay->__kdispComputeLsrMinTimeValue__
287 #define kdispComputeLsrMinTimeValue(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime) kdispComputeLsrMinTimeValue_DISPATCH(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime)
288 #define kdispComputeLsrMinTimeValue_HAL(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime) kdispComputeLsrMinTimeValue_DISPATCH(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime)
289 #define kdispSetSwapBarrierLsrMinTime_FNPTR(pKernelDisplay) pKernelDisplay->__kdispSetSwapBarrierLsrMinTime__
290 #define kdispSetSwapBarrierLsrMinTime(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime) kdispSetSwapBarrierLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime)
291 #define kdispSetSwapBarrierLsrMinTime_HAL(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime) kdispSetSwapBarrierLsrMinTime_DISPATCH(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime)
292 #define kdispGetRgScanLock_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetRgScanLock__
293 #define kdispGetRgScanLock(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin) kdispGetRgScanLock_DISPATCH(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin)
294 #define kdispGetRgScanLock_HAL(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin) kdispGetRgScanLock_DISPATCH(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin)
295 #define kdispDetectSliLink_FNPTR(pKernelDisplay) pKernelDisplay->__kdispDetectSliLink__
296 #define kdispDetectSliLink(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort) kdispDetectSliLink_DISPATCH(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort)
297 #define kdispDetectSliLink_HAL(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort) kdispDetectSliLink_DISPATCH(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort)
298 #define kdispGetPBTargetAperture_FNPTR(pKernelDisplay) pKernelDisplay->__kdispGetPBTargetAperture__
299 #define kdispGetPBTargetAperture(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop) kdispGetPBTargetAperture_DISPATCH(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop)
300 #define kdispGetPBTargetAperture_HAL(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop) kdispGetPBTargetAperture_DISPATCH(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop)
301 #define kdispInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateInitMissing__
302 #define kdispInitMissing(pGpu, pEngstate) kdispInitMissing_DISPATCH(pGpu, pEngstate)
303 #define kdispStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateStatePreInitUnlocked__
304 #define kdispStatePreInitUnlocked(pGpu, pEngstate) kdispStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
305 #define kdispStateInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateStateInitUnlocked__
306 #define kdispStateInitUnlocked(pGpu, pEngstate) kdispStateInitUnlocked_DISPATCH(pGpu, pEngstate)
307 #define kdispStatePreLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateStatePreLoad__
308 #define kdispStatePreLoad(pGpu, pEngstate, arg3) kdispStatePreLoad_DISPATCH(pGpu, pEngstate, arg3)
309 #define kdispStatePostLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateStatePostLoad__
310 #define kdispStatePostLoad(pGpu, pEngstate, arg3) kdispStatePostLoad_DISPATCH(pGpu, pEngstate, arg3)
311 #define kdispStatePreUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateStatePreUnload__
312 #define kdispStatePreUnload(pGpu, pEngstate, arg3) kdispStatePreUnload_DISPATCH(pGpu, pEngstate, arg3)
313 #define kdispStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateStatePostUnload__
314 #define kdispStatePostUnload(pGpu, pEngstate, arg3) kdispStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
315 #define kdispIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__nvoc_vtable->__engstateIsPresent__
316 #define kdispIsPresent(pGpu, pEngstate) kdispIsPresent_DISPATCH(pGpu, pEngstate)
317 #define kdispClearInterrupt_FNPTR(pIntrService) pIntrService->__nvoc_base_IntrService.__nvoc_vtable->__intrservClearInterrupt__
318 #define kdispClearInterrupt(pGpu, pIntrService, pParams) kdispClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
319 #define kdispServiceNotificationInterrupt_FNPTR(pIntrService) pIntrService->__nvoc_base_IntrService.__nvoc_vtable->__intrservServiceNotificationInterrupt__
320 #define kdispServiceNotificationInterrupt(pGpu, pIntrService, pParams) kdispServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams)
321
322 // Dispatch functions
kdispConstructEngine_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,ENGDESCRIPTOR engDesc)323 static inline NV_STATUS kdispConstructEngine_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc) {
324 return pKernelDisplay->__nvoc_vtable->__kdispConstructEngine__(pGpu, pKernelDisplay, engDesc);
325 }
326
kdispStatePreInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)327 static inline NV_STATUS kdispStatePreInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
328 return pKernelDisplay->__nvoc_vtable->__kdispStatePreInitLocked__(pGpu, pKernelDisplay);
329 }
330
kdispStateInitLocked_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)331 static inline NV_STATUS kdispStateInitLocked_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
332 return pKernelDisplay->__nvoc_vtable->__kdispStateInitLocked__(pGpu, pKernelDisplay);
333 }
334
kdispStateDestroy_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)335 static inline void kdispStateDestroy_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
336 pKernelDisplay->__nvoc_vtable->__kdispStateDestroy__(pGpu, pKernelDisplay);
337 }
338
kdispStateLoad_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 flags)339 static inline NV_STATUS kdispStateLoad_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
340 return pKernelDisplay->__nvoc_vtable->__kdispStateLoad__(pGpu, pKernelDisplay, flags);
341 }
342
kdispStateUnload_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 flags)343 static inline NV_STATUS kdispStateUnload_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags) {
344 return pKernelDisplay->__nvoc_vtable->__kdispStateUnload__(pGpu, pKernelDisplay, flags);
345 }
346
kdispRegisterIntrService_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceRecord pRecords[175])347 static inline void kdispRegisterIntrService_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceRecord pRecords[175]) {
348 pKernelDisplay->__nvoc_vtable->__kdispRegisterIntrService__(pGpu, pKernelDisplay, pRecords);
349 }
350
kdispServiceInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceServiceInterruptArguments * pParams)351 static inline NvU32 kdispServiceInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceServiceInterruptArguments *pParams) {
352 return pKernelDisplay->__nvoc_vtable->__kdispServiceInterrupt__(pGpu, pKernelDisplay, pParams);
353 }
354
kdispSelectClass_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 swClass)355 static inline NV_STATUS kdispSelectClass_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
356 return pKernelDisplay->__kdispSelectClass__(pGpu, pKernelDisplay, swClass);
357 }
358
kdispGetChannelNum_DISPATCH(struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pChannelNum)359 static inline NV_STATUS kdispGetChannelNum_DISPATCH(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
360 return pKernelDisplay->__kdispGetChannelNum__(pKernelDisplay, channelClass, channelInstance, pChannelNum);
361 }
362
kdispGetDisplayCapsBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)363 static inline void kdispGetDisplayCapsBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
364 pKernelDisplay->__kdispGetDisplayCapsBaseAndSize__(pGpu, pKernelDisplay, pOffset, pSize);
365 }
366
kdispGetDisplaySfUserBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)367 static inline void kdispGetDisplaySfUserBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
368 pKernelDisplay->__kdispGetDisplaySfUserBaseAndSize__(pGpu, pKernelDisplay, pOffset, pSize);
369 }
370
kdispGetDisplayChannelUserBaseAndSize_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pOffset,NvU32 * pSize)371 static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
372 return pKernelDisplay->__kdispGetDisplayChannelUserBaseAndSize__(pGpu, pKernelDisplay, channelClass, channelInstance, pOffset, pSize);
373 }
374
kdispGetVgaWorkspaceBase_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU64 * pOffset)375 static inline NvBool kdispGetVgaWorkspaceBase_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
376 return pKernelDisplay->__kdispGetVgaWorkspaceBase__(pGpu, pKernelDisplay, pOffset);
377 }
378
kdispReadRgLineCountAndFrameCount_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pLineCount,NvU32 * pFrameCount)379 static inline NV_STATUS kdispReadRgLineCountAndFrameCount_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount) {
380 return pKernelDisplay->__kdispReadRgLineCountAndFrameCount__(pGpu, pKernelDisplay, head, pLineCount, pFrameCount);
381 }
382
kdispRestoreOriginalLsrMinTime_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 origLsrMinTime)383 static inline void kdispRestoreOriginalLsrMinTime_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime) {
384 pKernelDisplay->__kdispRestoreOriginalLsrMinTime__(pGpu, pKernelDisplay, head, origLsrMinTime);
385 }
386
kdispComputeLsrMinTimeValue_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 swapRdyHiLsrMinTime,NvU32 * pComputedLsrMinTime)387 static inline NV_STATUS kdispComputeLsrMinTimeValue_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime) {
388 return pKernelDisplay->__kdispComputeLsrMinTimeValue__(pGpu, pKernelDisplay, head, swapRdyHiLsrMinTime, pComputedLsrMinTime);
389 }
390
kdispSetSwapBarrierLsrMinTime_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pOrigLsrMinTime,NvU32 newLsrMinTime)391 static inline void kdispSetSwapBarrierLsrMinTime_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime) {
392 pKernelDisplay->__kdispSetSwapBarrierLsrMinTime__(pGpu, pKernelDisplay, head, pOrigLsrMinTime, newLsrMinTime);
393 }
394
kdispGetRgScanLock_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head0,OBJGPU * pPeerGpu,NvU32 head1,NvBool * pMasterScanLock,NvU32 * pMasterScanLockPin,NvBool * pSlaveScanLock,NvU32 * pSlaveScanLockPin)395 static inline NV_STATUS kdispGetRgScanLock_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin) {
396 return pKernelDisplay->__kdispGetRgScanLock__(pGpu, pKernelDisplay, head0, pPeerGpu, head1, pMasterScanLock, pMasterScanLockPin, pSlaveScanLock, pSlaveScanLockPin);
397 }
398
kdispDetectSliLink_DISPATCH(struct KernelDisplay * pKernelDisplay,OBJGPU * pParentGpu,OBJGPU * pChildGpu,NvU32 ParentDrPort,NvU32 ChildDrPort)399 static inline NV_STATUS kdispDetectSliLink_DISPATCH(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort) {
400 return pKernelDisplay->__kdispDetectSliLink__(pKernelDisplay, pParentGpu, pChildGpu, ParentDrPort, ChildDrPort);
401 }
402
kdispGetPBTargetAperture_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 memAddrSpace,NvU32 cacheSnoop)403 static inline NvU32 kdispGetPBTargetAperture_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop) {
404 return pKernelDisplay->__kdispGetPBTargetAperture__(pGpu, pKernelDisplay, memAddrSpace, cacheSnoop);
405 }
406
kdispInitMissing_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)407 static inline void kdispInitMissing_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
408 pEngstate->__nvoc_vtable->__kdispInitMissing__(pGpu, pEngstate);
409 }
410
kdispStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)411 static inline NV_STATUS kdispStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
412 return pEngstate->__nvoc_vtable->__kdispStatePreInitUnlocked__(pGpu, pEngstate);
413 }
414
kdispStateInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)415 static inline NV_STATUS kdispStateInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
416 return pEngstate->__nvoc_vtable->__kdispStateInitUnlocked__(pGpu, pEngstate);
417 }
418
kdispStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)419 static inline NV_STATUS kdispStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
420 return pEngstate->__nvoc_vtable->__kdispStatePreLoad__(pGpu, pEngstate, arg3);
421 }
422
kdispStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)423 static inline NV_STATUS kdispStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
424 return pEngstate->__nvoc_vtable->__kdispStatePostLoad__(pGpu, pEngstate, arg3);
425 }
426
kdispStatePreUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)427 static inline NV_STATUS kdispStatePreUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
428 return pEngstate->__nvoc_vtable->__kdispStatePreUnload__(pGpu, pEngstate, arg3);
429 }
430
kdispStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate,NvU32 arg3)431 static inline NV_STATUS kdispStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate, NvU32 arg3) {
432 return pEngstate->__nvoc_vtable->__kdispStatePostUnload__(pGpu, pEngstate, arg3);
433 }
434
kdispIsPresent_DISPATCH(struct OBJGPU * pGpu,struct KernelDisplay * pEngstate)435 static inline NvBool kdispIsPresent_DISPATCH(struct OBJGPU *pGpu, struct KernelDisplay *pEngstate) {
436 return pEngstate->__nvoc_vtable->__kdispIsPresent__(pGpu, pEngstate);
437 }
438
kdispClearInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pIntrService,IntrServiceClearInterruptArguments * pParams)439 static inline NvBool kdispClearInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pIntrService, IntrServiceClearInterruptArguments *pParams) {
440 return pIntrService->__nvoc_vtable->__kdispClearInterrupt__(pGpu, pIntrService, pParams);
441 }
442
kdispServiceNotificationInterrupt_DISPATCH(OBJGPU * pGpu,struct KernelDisplay * pIntrService,IntrServiceServiceNotificationInterruptArguments * pParams)443 static inline NV_STATUS kdispServiceNotificationInterrupt_DISPATCH(OBJGPU *pGpu, struct KernelDisplay *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) {
444 return pIntrService->__nvoc_vtable->__kdispServiceNotificationInterrupt__(pGpu, pIntrService, pParams);
445 }
446
447 void kdispServiceVblank_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
448
449
450 #ifdef __nvoc_kern_disp_h_disabled
kdispServiceVblank(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)451 static inline void kdispServiceVblank(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
452 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
453 }
454 #else //__nvoc_kern_disp_h_disabled
455 #define kdispServiceVblank(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispServiceVblank_KERNEL(pGpu, pKernelDisplay, arg3, arg4, arg5)
456 #endif //__nvoc_kern_disp_h_disabled
457
458 #define kdispServiceVblank_HAL(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispServiceVblank(pGpu, pKernelDisplay, arg3, arg4, arg5)
459
460 NV_STATUS kdispConstructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
461
462
463 #ifdef __nvoc_kern_disp_h_disabled
kdispConstructInstMem(struct KernelDisplay * pKernelDisplay)464 static inline NV_STATUS kdispConstructInstMem(struct KernelDisplay *pKernelDisplay) {
465 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
466 return NV_ERR_NOT_SUPPORTED;
467 }
468 #else //__nvoc_kern_disp_h_disabled
469 #define kdispConstructInstMem(pKernelDisplay) kdispConstructInstMem_IMPL(pKernelDisplay)
470 #endif //__nvoc_kern_disp_h_disabled
471
472 #define kdispConstructInstMem_HAL(pKernelDisplay) kdispConstructInstMem(pKernelDisplay)
473
474 void kdispDestructInstMem_IMPL(struct KernelDisplay *pKernelDisplay);
475
476
477 #ifdef __nvoc_kern_disp_h_disabled
kdispDestructInstMem(struct KernelDisplay * pKernelDisplay)478 static inline void kdispDestructInstMem(struct KernelDisplay *pKernelDisplay) {
479 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
480 }
481 #else //__nvoc_kern_disp_h_disabled
482 #define kdispDestructInstMem(pKernelDisplay) kdispDestructInstMem_IMPL(pKernelDisplay)
483 #endif //__nvoc_kern_disp_h_disabled
484
485 #define kdispDestructInstMem_HAL(pKernelDisplay) kdispDestructInstMem(pKernelDisplay)
486
kdispGetBaseOffset_4a4dee(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)487 static inline NvS32 kdispGetBaseOffset_4a4dee(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
488 return 0;
489 }
490
491
492 #ifdef __nvoc_kern_disp_h_disabled
kdispGetBaseOffset(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)493 static inline NvS32 kdispGetBaseOffset(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
494 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
495 return 0;
496 }
497 #else //__nvoc_kern_disp_h_disabled
498 #define kdispGetBaseOffset(pGpu, pKernelDisplay) kdispGetBaseOffset_4a4dee(pGpu, pKernelDisplay)
499 #endif //__nvoc_kern_disp_h_disabled
500
501 #define kdispGetBaseOffset_HAL(pGpu, pKernelDisplay) kdispGetBaseOffset(pGpu, pKernelDisplay)
502
kdispImportImpData_56cd7a(struct KernelDisplay * pKernelDisplay)503 static inline NV_STATUS kdispImportImpData_56cd7a(struct KernelDisplay *pKernelDisplay) {
504 return NV_OK;
505 }
506
507
508 #ifdef __nvoc_kern_disp_h_disabled
kdispImportImpData(struct KernelDisplay * pKernelDisplay)509 static inline NV_STATUS kdispImportImpData(struct KernelDisplay *pKernelDisplay) {
510 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
511 return NV_ERR_NOT_SUPPORTED;
512 }
513 #else //__nvoc_kern_disp_h_disabled
514 #define kdispImportImpData(pKernelDisplay) kdispImportImpData_56cd7a(pKernelDisplay)
515 #endif //__nvoc_kern_disp_h_disabled
516
517 #define kdispImportImpData_HAL(pKernelDisplay) kdispImportImpData(pKernelDisplay)
518
kdispArbAndAllocDisplayBandwidth_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,enum DISPLAY_ICC_BW_CLIENT iccBwClient,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)519 static inline NV_STATUS kdispArbAndAllocDisplayBandwidth_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
520 return NV_ERR_NOT_SUPPORTED;
521 }
522
523
524 #ifdef __nvoc_kern_disp_h_disabled
kdispArbAndAllocDisplayBandwidth(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,enum DISPLAY_ICC_BW_CLIENT iccBwClient,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)525 static inline NV_STATUS kdispArbAndAllocDisplayBandwidth(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, enum DISPLAY_ICC_BW_CLIENT iccBwClient, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
526 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
527 return NV_ERR_NOT_SUPPORTED;
528 }
529 #else //__nvoc_kern_disp_h_disabled
530 #define kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth_46f6a7(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
531 #endif //__nvoc_kern_disp_h_disabled
532
533 #define kdispArbAndAllocDisplayBandwidth_HAL(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispArbAndAllocDisplayBandwidth(pGpu, pKernelDisplay, iccBwClient, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
534
535 NV_STATUS kdispSetPushBufferParamsToPhysical_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass, ChannelPBSize channelPBSize, NvU32 subDeviceId);
536
537
538 #ifdef __nvoc_kern_disp_h_disabled
kdispSetPushBufferParamsToPhysical(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvHandle hObjectBuffer,struct ContextDma * pBufferContextDma,NvU32 hClass,NvU32 channelInstance,DISPCHNCLASS internalDispChnClass,ChannelPBSize channelPBSize,NvU32 subDeviceId)539 static inline NV_STATUS kdispSetPushBufferParamsToPhysical(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvHandle hObjectBuffer, struct ContextDma *pBufferContextDma, NvU32 hClass, NvU32 channelInstance, DISPCHNCLASS internalDispChnClass, ChannelPBSize channelPBSize, NvU32 subDeviceId) {
540 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
541 return NV_ERR_NOT_SUPPORTED;
542 }
543 #else //__nvoc_kern_disp_h_disabled
544 #define kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass, channelPBSize, subDeviceId) kdispSetPushBufferParamsToPhysical_IMPL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass, channelPBSize, subDeviceId)
545 #endif //__nvoc_kern_disp_h_disabled
546
547 #define kdispSetPushBufferParamsToPhysical_HAL(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass, channelPBSize, subDeviceId) kdispSetPushBufferParamsToPhysical(pGpu, pKernelDisplay, pDispChannel, hObjectBuffer, pBufferContextDma, hClass, channelInstance, internalDispChnClass, channelPBSize, subDeviceId)
548
kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvU32 channelInstance,NvHandle hObjectBuffer,NvU32 initialGetPutOffset,NvBool allowGrabWithinSameClient,NvBool connectPbAtGrab)549 static inline NV_STATUS kdispAcquireDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
550 return NV_OK;
551 }
552
553
554 #ifdef __nvoc_kern_disp_h_disabled
kdispAcquireDispChannelHw(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel,NvU32 channelInstance,NvHandle hObjectBuffer,NvU32 initialGetPutOffset,NvBool allowGrabWithinSameClient,NvBool connectPbAtGrab)555 static inline NV_STATUS kdispAcquireDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel, NvU32 channelInstance, NvHandle hObjectBuffer, NvU32 initialGetPutOffset, NvBool allowGrabWithinSameClient, NvBool connectPbAtGrab) {
556 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
557 return NV_ERR_NOT_SUPPORTED;
558 }
559 #else //__nvoc_kern_disp_h_disabled
560 #define kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw_56cd7a(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
561 #endif //__nvoc_kern_disp_h_disabled
562
563 #define kdispAcquireDispChannelHw_HAL(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab) kdispAcquireDispChannelHw(pKernelDisplay, pDispChannel, channelInstance, hObjectBuffer, initialGetPutOffset, allowGrabWithinSameClient, connectPbAtGrab)
564
kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)565 static inline NV_STATUS kdispReleaseDispChannelHw_56cd7a(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
566 return NV_OK;
567 }
568
569
570 #ifdef __nvoc_kern_disp_h_disabled
kdispReleaseDispChannelHw(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)571 static inline NV_STATUS kdispReleaseDispChannelHw(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
572 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
573 return NV_ERR_NOT_SUPPORTED;
574 }
575 #else //__nvoc_kern_disp_h_disabled
576 #define kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw_56cd7a(pKernelDisplay, pDispChannel)
577 #endif //__nvoc_kern_disp_h_disabled
578
579 #define kdispReleaseDispChannelHw_HAL(pKernelDisplay, pDispChannel) kdispReleaseDispChannelHw(pKernelDisplay, pDispChannel)
580
581 NV_STATUS kdispMapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
582
583
584 #ifdef __nvoc_kern_disp_h_disabled
kdispMapDispChannel(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)585 static inline NV_STATUS kdispMapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
586 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
587 return NV_ERR_NOT_SUPPORTED;
588 }
589 #else //__nvoc_kern_disp_h_disabled
590 #define kdispMapDispChannel(pKernelDisplay, pDispChannel) kdispMapDispChannel_IMPL(pKernelDisplay, pDispChannel)
591 #endif //__nvoc_kern_disp_h_disabled
592
593 #define kdispMapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispMapDispChannel(pKernelDisplay, pDispChannel)
594
595 void kdispUnbindUnmapDispChannel_IMPL(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel);
596
597
598 #ifdef __nvoc_kern_disp_h_disabled
kdispUnbindUnmapDispChannel(struct KernelDisplay * pKernelDisplay,struct DispChannel * pDispChannel)599 static inline void kdispUnbindUnmapDispChannel(struct KernelDisplay *pKernelDisplay, struct DispChannel *pDispChannel) {
600 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
601 }
602 #else //__nvoc_kern_disp_h_disabled
603 #define kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel_IMPL(pKernelDisplay, pDispChannel)
604 #endif //__nvoc_kern_disp_h_disabled
605
606 #define kdispUnbindUnmapDispChannel_HAL(pKernelDisplay, pDispChannel) kdispUnbindUnmapDispChannel(pKernelDisplay, pDispChannel)
607
608 NV_STATUS kdispRegisterRgLineCallback_IMPL(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable);
609
610
611 #ifdef __nvoc_kern_disp_h_disabled
kdispRegisterRgLineCallback(struct KernelDisplay * pKernelDisplay,struct RgLineCallback * pRgLineCallback,NvU32 head,NvU32 rgIntrLine,NvBool bEnable)612 static inline NV_STATUS kdispRegisterRgLineCallback(struct KernelDisplay *pKernelDisplay, struct RgLineCallback *pRgLineCallback, NvU32 head, NvU32 rgIntrLine, NvBool bEnable) {
613 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
614 return NV_ERR_NOT_SUPPORTED;
615 }
616 #else //__nvoc_kern_disp_h_disabled
617 #define kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback_IMPL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
618 #endif //__nvoc_kern_disp_h_disabled
619
620 #define kdispRegisterRgLineCallback_HAL(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable) kdispRegisterRgLineCallback(pKernelDisplay, pRgLineCallback, head, rgIntrLine, bEnable)
621
622 void kdispInvokeRgLineCallback_KERNEL(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr);
623
624
625 #ifdef __nvoc_kern_disp_h_disabled
kdispInvokeRgLineCallback(struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 rgIntrLine,NvBool bIsIrqlIsr)626 static inline void kdispInvokeRgLineCallback(struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 rgIntrLine, NvBool bIsIrqlIsr) {
627 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
628 }
629 #else //__nvoc_kern_disp_h_disabled
630 #define kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback_KERNEL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
631 #endif //__nvoc_kern_disp_h_disabled
632
633 #define kdispInvokeRgLineCallback_HAL(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr) kdispInvokeRgLineCallback(pKernelDisplay, head, rgIntrLine, bIsIrqlIsr)
634
635 NvU32 kdispReadPendingVblank_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct THREAD_STATE_NODE *arg3);
636
637
638 #ifdef __nvoc_kern_disp_h_disabled
kdispReadPendingVblank(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,struct THREAD_STATE_NODE * arg3)639 static inline NvU32 kdispReadPendingVblank(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, struct THREAD_STATE_NODE *arg3) {
640 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
641 return 0;
642 }
643 #else //__nvoc_kern_disp_h_disabled
644 #define kdispReadPendingVblank(pGpu, pKernelDisplay, arg3) kdispReadPendingVblank_IMPL(pGpu, pKernelDisplay, arg3)
645 #endif //__nvoc_kern_disp_h_disabled
646
647 #define kdispReadPendingVblank_HAL(pGpu, pKernelDisplay, arg3) kdispReadPendingVblank(pGpu, pKernelDisplay, arg3)
648
kdispInvokeDisplayModesetCallback_b3696a(struct KernelDisplay * pKernelDisplay,NvBool bModesetStart,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)649 static inline void kdispInvokeDisplayModesetCallback_b3696a(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
650 return;
651 }
652
653
654 #ifdef __nvoc_kern_disp_h_disabled
kdispInvokeDisplayModesetCallback(struct KernelDisplay * pKernelDisplay,NvBool bModesetStart,NvU32 minRequiredIsoBandwidthKBPS,NvU32 minRequiredFloorBandwidthKBPS)655 static inline void kdispInvokeDisplayModesetCallback(struct KernelDisplay *pKernelDisplay, NvBool bModesetStart, NvU32 minRequiredIsoBandwidthKBPS, NvU32 minRequiredFloorBandwidthKBPS) {
656 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
657 }
658 #else //__nvoc_kern_disp_h_disabled
659 #define kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback_b3696a(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
660 #endif //__nvoc_kern_disp_h_disabled
661
662 #define kdispInvokeDisplayModesetCallback_HAL(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS) kdispInvokeDisplayModesetCallback(pKernelDisplay, bModesetStart, minRequiredIsoBandwidthKBPS, minRequiredFloorBandwidthKBPS)
663
kdispDsmMxmMxcbExecuteAcpi_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,void * pInOutData,NvU16 * outDataSize)664 static inline NV_STATUS kdispDsmMxmMxcbExecuteAcpi_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, void *pInOutData, NvU16 *outDataSize) {
665 NV_ASSERT_PRECOMP(0);
666 return NV_ERR_NOT_SUPPORTED;
667 }
668
669
670 #ifdef __nvoc_kern_disp_h_disabled
kdispDsmMxmMxcbExecuteAcpi(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,void * pInOutData,NvU16 * outDataSize)671 static inline NV_STATUS kdispDsmMxmMxcbExecuteAcpi(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, void *pInOutData, NvU16 *outDataSize) {
672 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
673 return NV_ERR_NOT_SUPPORTED;
674 }
675 #else //__nvoc_kern_disp_h_disabled
676 #define kdispDsmMxmMxcbExecuteAcpi(pGpu, pKernelDisplay, pInOutData, outDataSize) kdispDsmMxmMxcbExecuteAcpi_92bfc3(pGpu, pKernelDisplay, pInOutData, outDataSize)
677 #endif //__nvoc_kern_disp_h_disabled
678
679 #define kdispDsmMxmMxcbExecuteAcpi_HAL(pGpu, pKernelDisplay, pInOutData, outDataSize) kdispDsmMxmMxcbExecuteAcpi(pGpu, pKernelDisplay, pInOutData, outDataSize)
680
681 NV_STATUS kdispInitBrightcStateLoad_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
682
683
684 #ifdef __nvoc_kern_disp_h_disabled
kdispInitBrightcStateLoad(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)685 static inline NV_STATUS kdispInitBrightcStateLoad(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
686 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
687 return NV_ERR_NOT_SUPPORTED;
688 }
689 #else //__nvoc_kern_disp_h_disabled
690 #define kdispInitBrightcStateLoad(pGpu, pKernelDisplay) kdispInitBrightcStateLoad_IMPL(pGpu, pKernelDisplay)
691 #endif //__nvoc_kern_disp_h_disabled
692
693 #define kdispInitBrightcStateLoad_HAL(pGpu, pKernelDisplay) kdispInitBrightcStateLoad(pGpu, pKernelDisplay)
694
695 NV_STATUS kdispSetupAcpiEdid_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
696
697
698 #ifdef __nvoc_kern_disp_h_disabled
kdispSetupAcpiEdid(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)699 static inline NV_STATUS kdispSetupAcpiEdid(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
700 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
701 return NV_ERR_NOT_SUPPORTED;
702 }
703 #else //__nvoc_kern_disp_h_disabled
704 #define kdispSetupAcpiEdid(pGpu, pKernelDisplay) kdispSetupAcpiEdid_IMPL(pGpu, pKernelDisplay)
705 #endif //__nvoc_kern_disp_h_disabled
706
707 #define kdispSetupAcpiEdid_HAL(pGpu, pKernelDisplay) kdispSetupAcpiEdid(pGpu, pKernelDisplay)
708
kdispReadPendingAwakenIntr_72a2e1(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg4)709 static inline NvBool kdispReadPendingAwakenIntr_72a2e1(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4) {
710 NV_ASSERT_PRECOMP(0);
711 return NV_FALSE;
712 }
713
714 NvBool kdispReadPendingAwakenIntr_v03_00_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4);
715
kdispReadPendingAwakenIntr_3dd2c9(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg4)716 static inline NvBool kdispReadPendingAwakenIntr_3dd2c9(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4) {
717 return NV_FALSE;
718 }
719
720
721 #ifdef __nvoc_kern_disp_h_disabled
kdispReadPendingAwakenIntr(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pCachedIntr,struct THREAD_STATE_NODE * arg4)722 static inline NvBool kdispReadPendingAwakenIntr(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pCachedIntr, struct THREAD_STATE_NODE *arg4) {
723 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
724 return NV_FALSE;
725 }
726 #else //__nvoc_kern_disp_h_disabled
727 #define kdispReadPendingAwakenIntr(pGpu, pKernelDisplay, pCachedIntr, arg4) kdispReadPendingAwakenIntr_72a2e1(pGpu, pKernelDisplay, pCachedIntr, arg4)
728 #endif //__nvoc_kern_disp_h_disabled
729
730 #define kdispReadPendingAwakenIntr_HAL(pGpu, pKernelDisplay, pCachedIntr, arg4) kdispReadPendingAwakenIntr(pGpu, pKernelDisplay, pCachedIntr, arg4)
731
kdispReadAwakenChannelNumMask_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg3,DISPCHNCLASS arg4,struct THREAD_STATE_NODE * arg5)732 static inline NV_STATUS kdispReadAwakenChannelNumMask_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5) {
733 NV_ASSERT_PRECOMP(0);
734 return NV_ERR_NOT_SUPPORTED;
735 }
736
737 NV_STATUS kdispReadAwakenChannelNumMask_v03_00_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5);
738
kdispReadAwakenChannelNumMask_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg3,DISPCHNCLASS arg4,struct THREAD_STATE_NODE * arg5)739 static inline NV_STATUS kdispReadAwakenChannelNumMask_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5) {
740 return NV_ERR_NOT_SUPPORTED;
741 }
742
743
744 #ifdef __nvoc_kern_disp_h_disabled
kdispReadAwakenChannelNumMask(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * arg3,DISPCHNCLASS arg4,struct THREAD_STATE_NODE * arg5)745 static inline NV_STATUS kdispReadAwakenChannelNumMask(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *arg3, DISPCHNCLASS arg4, struct THREAD_STATE_NODE *arg5) {
746 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
747 return NV_ERR_NOT_SUPPORTED;
748 }
749 #else //__nvoc_kern_disp_h_disabled
750 #define kdispReadAwakenChannelNumMask(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispReadAwakenChannelNumMask_92bfc3(pGpu, pKernelDisplay, arg3, arg4, arg5)
751 #endif //__nvoc_kern_disp_h_disabled
752
753 #define kdispReadAwakenChannelNumMask_HAL(pGpu, pKernelDisplay, arg3, arg4, arg5) kdispReadAwakenChannelNumMask(pGpu, pKernelDisplay, arg3, arg4, arg5)
754
755 NV_STATUS kdispAllocateCommonHandle_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
756
757
758 #ifdef __nvoc_kern_disp_h_disabled
kdispAllocateCommonHandle(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)759 static inline NV_STATUS kdispAllocateCommonHandle(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
760 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
761 return NV_ERR_NOT_SUPPORTED;
762 }
763 #else //__nvoc_kern_disp_h_disabled
764 #define kdispAllocateCommonHandle(pGpu, pKernelDisplay) kdispAllocateCommonHandle_IMPL(pGpu, pKernelDisplay)
765 #endif //__nvoc_kern_disp_h_disabled
766
767 #define kdispAllocateCommonHandle_HAL(pGpu, pKernelDisplay) kdispAllocateCommonHandle(pGpu, pKernelDisplay)
768
769 void kdispDestroyCommonHandle_IMPL(struct KernelDisplay *pKernelDisplay);
770
771
772 #ifdef __nvoc_kern_disp_h_disabled
kdispDestroyCommonHandle(struct KernelDisplay * pKernelDisplay)773 static inline void kdispDestroyCommonHandle(struct KernelDisplay *pKernelDisplay) {
774 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
775 }
776 #else //__nvoc_kern_disp_h_disabled
777 #define kdispDestroyCommonHandle(pKernelDisplay) kdispDestroyCommonHandle_IMPL(pKernelDisplay)
778 #endif //__nvoc_kern_disp_h_disabled
779
780 #define kdispDestroyCommonHandle_HAL(pKernelDisplay) kdispDestroyCommonHandle(pKernelDisplay)
781
782 NV_STATUS kdispAllocateSharedMem_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
783
784
785 #ifdef __nvoc_kern_disp_h_disabled
kdispAllocateSharedMem(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)786 static inline NV_STATUS kdispAllocateSharedMem(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
787 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
788 return NV_ERR_NOT_SUPPORTED;
789 }
790 #else //__nvoc_kern_disp_h_disabled
791 #define kdispAllocateSharedMem(pGpu, pKernelDisplay) kdispAllocateSharedMem_IMPL(pGpu, pKernelDisplay)
792 #endif //__nvoc_kern_disp_h_disabled
793
794 #define kdispAllocateSharedMem_HAL(pGpu, pKernelDisplay) kdispAllocateSharedMem(pGpu, pKernelDisplay)
795
796 void kdispFreeSharedMem_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
797
798
799 #ifdef __nvoc_kern_disp_h_disabled
kdispFreeSharedMem(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)800 static inline void kdispFreeSharedMem(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
801 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
802 }
803 #else //__nvoc_kern_disp_h_disabled
804 #define kdispFreeSharedMem(pGpu, pKernelDisplay) kdispFreeSharedMem_IMPL(pGpu, pKernelDisplay)
805 #endif //__nvoc_kern_disp_h_disabled
806
807 #define kdispFreeSharedMem_HAL(pGpu, pKernelDisplay) kdispFreeSharedMem(pGpu, pKernelDisplay)
808
809 NvBool kdispIsDisplayConnected_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
810
811
812 #ifdef __nvoc_kern_disp_h_disabled
kdispIsDisplayConnected(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)813 static inline NvBool kdispIsDisplayConnected(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
814 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
815 return NV_FALSE;
816 }
817 #else //__nvoc_kern_disp_h_disabled
818 #define kdispIsDisplayConnected(pGpu, pKernelDisplay) kdispIsDisplayConnected_IMPL(pGpu, pKernelDisplay)
819 #endif //__nvoc_kern_disp_h_disabled
820
821 #define kdispIsDisplayConnected_HAL(pGpu, pKernelDisplay) kdispIsDisplayConnected(pGpu, pKernelDisplay)
822
823 NvU32 kdispGetSupportedDisplayMask_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
824
825
826 #ifdef __nvoc_kern_disp_h_disabled
kdispGetSupportedDisplayMask(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay)827 static inline NvU32 kdispGetSupportedDisplayMask(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay) {
828 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
829 return 0;
830 }
831 #else //__nvoc_kern_disp_h_disabled
832 #define kdispGetSupportedDisplayMask(pGpu, pKernelDisplay) kdispGetSupportedDisplayMask_IMPL(pGpu, pKernelDisplay)
833 #endif //__nvoc_kern_disp_h_disabled
834
835 #define kdispGetSupportedDisplayMask_HAL(pGpu, pKernelDisplay) kdispGetSupportedDisplayMask(pGpu, pKernelDisplay)
836
kdispUpdatePdbAfterIpHalInit_b3696a(struct KernelDisplay * pKernelDisplay)837 static inline void kdispUpdatePdbAfterIpHalInit_b3696a(struct KernelDisplay *pKernelDisplay) {
838 return;
839 }
840
841
842 #ifdef __nvoc_kern_disp_h_disabled
kdispUpdatePdbAfterIpHalInit(struct KernelDisplay * pKernelDisplay)843 static inline void kdispUpdatePdbAfterIpHalInit(struct KernelDisplay *pKernelDisplay) {
844 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
845 }
846 #else //__nvoc_kern_disp_h_disabled
847 #define kdispUpdatePdbAfterIpHalInit(pKernelDisplay) kdispUpdatePdbAfterIpHalInit_b3696a(pKernelDisplay)
848 #endif //__nvoc_kern_disp_h_disabled
849
850 #define kdispUpdatePdbAfterIpHalInit_HAL(pKernelDisplay) kdispUpdatePdbAfterIpHalInit(pKernelDisplay)
851
852 NV_STATUS kdispConstructEngine_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, ENGDESCRIPTOR engDesc);
853
854 NV_STATUS kdispStatePreInitLocked_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
855
856 NV_STATUS kdispStateInitLocked_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
857
858 void kdispStateDestroy_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay);
859
860 NV_STATUS kdispStateLoad_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
861
862 NV_STATUS kdispStateUnload_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 flags);
863
864 void kdispRegisterIntrService_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceRecord pRecords[175]);
865
kdispServiceInterrupt_acff5e(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,IntrServiceServiceInterruptArguments * pParams)866 static inline NvU32 kdispServiceInterrupt_acff5e(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, IntrServiceServiceInterruptArguments *pParams) {
867 kdispServiceVblank(pGpu, pKernelDisplay, 0, (4), ((void *)0));
868 return NV_OK;
869 }
870
kdispSelectClass_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 swClass)871 static inline NV_STATUS kdispSelectClass_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass) {
872 return NV_ERR_NOT_SUPPORTED;
873 }
874
875 NV_STATUS kdispSelectClass_v03_00_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 swClass);
876
kdispGetChannelNum_46f6a7(struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pChannelNum)877 static inline NV_STATUS kdispGetChannelNum_46f6a7(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum) {
878 return NV_ERR_NOT_SUPPORTED;
879 }
880
881 NV_STATUS kdispGetChannelNum_v03_00(struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pChannelNum);
882
kdispGetDisplayCapsBaseAndSize_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)883 static inline void kdispGetDisplayCapsBaseAndSize_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
884 return;
885 }
886
887 void kdispGetDisplayCapsBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
888
kdispGetDisplaySfUserBaseAndSize_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 * pOffset,NvU32 * pSize)889 static inline void kdispGetDisplaySfUserBaseAndSize_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize) {
890 return;
891 }
892
893 void kdispGetDisplaySfUserBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 *pOffset, NvU32 *pSize);
894
kdispGetDisplayChannelUserBaseAndSize_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,DISPCHNCLASS channelClass,NvU32 channelInstance,NvU32 * pOffset,NvU32 * pSize)895 static inline NV_STATUS kdispGetDisplayChannelUserBaseAndSize_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize) {
896 return NV_ERR_NOT_SUPPORTED;
897 }
898
899 NV_STATUS kdispGetDisplayChannelUserBaseAndSize_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, DISPCHNCLASS channelClass, NvU32 channelInstance, NvU32 *pOffset, NvU32 *pSize);
900
901 NvBool kdispGetVgaWorkspaceBase_v04_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset);
902
kdispGetVgaWorkspaceBase_3dd2c9(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU64 * pOffset)903 static inline NvBool kdispGetVgaWorkspaceBase_3dd2c9(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU64 *pOffset) {
904 return NV_FALSE;
905 }
906
907 NV_STATUS kdispReadRgLineCountAndFrameCount_v03_00_PHYSICAL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount);
908
kdispReadRgLineCountAndFrameCount_46f6a7(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pLineCount,NvU32 * pFrameCount)909 static inline NV_STATUS kdispReadRgLineCountAndFrameCount_46f6a7(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount) {
910 return NV_ERR_NOT_SUPPORTED;
911 }
912
913 NV_STATUS kdispReadRgLineCountAndFrameCount_v03_00_KERNEL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pLineCount, NvU32 *pFrameCount);
914
915 void kdispRestoreOriginalLsrMinTime_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime);
916
kdispRestoreOriginalLsrMinTime_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 origLsrMinTime)917 static inline void kdispRestoreOriginalLsrMinTime_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 origLsrMinTime) {
918 return;
919 }
920
921 NV_STATUS kdispComputeLsrMinTimeValue_v02_07(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime);
922
kdispComputeLsrMinTimeValue_56cd7a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 swapRdyHiLsrMinTime,NvU32 * pComputedLsrMinTime)923 static inline NV_STATUS kdispComputeLsrMinTimeValue_56cd7a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 swapRdyHiLsrMinTime, NvU32 *pComputedLsrMinTime) {
924 return NV_OK;
925 }
926
927 void kdispSetSwapBarrierLsrMinTime_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime);
928
kdispSetSwapBarrierLsrMinTime_b3696a(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head,NvU32 * pOrigLsrMinTime,NvU32 newLsrMinTime)929 static inline void kdispSetSwapBarrierLsrMinTime_b3696a(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head, NvU32 *pOrigLsrMinTime, NvU32 newLsrMinTime) {
930 return;
931 }
932
933 NV_STATUS kdispGetRgScanLock_v02_01(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin);
934
kdispGetRgScanLock_92bfc3(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 head0,OBJGPU * pPeerGpu,NvU32 head1,NvBool * pMasterScanLock,NvU32 * pMasterScanLockPin,NvBool * pSlaveScanLock,NvU32 * pSlaveScanLockPin)935 static inline NV_STATUS kdispGetRgScanLock_92bfc3(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 head0, OBJGPU *pPeerGpu, NvU32 head1, NvBool *pMasterScanLock, NvU32 *pMasterScanLockPin, NvBool *pSlaveScanLock, NvU32 *pSlaveScanLockPin) {
936 NV_ASSERT_PRECOMP(0);
937 return NV_ERR_NOT_SUPPORTED;
938 }
939
940 NV_STATUS kdispDetectSliLink_v04_00(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort);
941
kdispDetectSliLink_92bfc3(struct KernelDisplay * pKernelDisplay,OBJGPU * pParentGpu,OBJGPU * pChildGpu,NvU32 ParentDrPort,NvU32 ChildDrPort)942 static inline NV_STATUS kdispDetectSliLink_92bfc3(struct KernelDisplay *pKernelDisplay, OBJGPU *pParentGpu, OBJGPU *pChildGpu, NvU32 ParentDrPort, NvU32 ChildDrPort) {
943 NV_ASSERT_PRECOMP(0);
944 return NV_ERR_NOT_SUPPORTED;
945 }
946
947 NvU32 kdispGetPBTargetAperture_v03_00(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop);
948
kdispGetPBTargetAperture_15a734(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 memAddrSpace,NvU32 cacheSnoop)949 static inline NvU32 kdispGetPBTargetAperture_15a734(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 memAddrSpace, NvU32 cacheSnoop) {
950 return 0U;
951 }
952
953 void kdispDestruct_IMPL(struct KernelDisplay *pKernelDisplay);
954
955 #define __nvoc_kdispDestruct(pKernelDisplay) kdispDestruct_IMPL(pKernelDisplay)
956 NV_STATUS kdispConstructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
957
958 #ifdef __nvoc_kern_disp_h_disabled
kdispConstructKhead(struct KernelDisplay * pKernelDisplay)959 static inline NV_STATUS kdispConstructKhead(struct KernelDisplay *pKernelDisplay) {
960 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
961 return NV_ERR_NOT_SUPPORTED;
962 }
963 #else //__nvoc_kern_disp_h_disabled
964 #define kdispConstructKhead(pKernelDisplay) kdispConstructKhead_IMPL(pKernelDisplay)
965 #endif //__nvoc_kern_disp_h_disabled
966
967 void kdispDestructKhead_IMPL(struct KernelDisplay *pKernelDisplay);
968
969 #ifdef __nvoc_kern_disp_h_disabled
kdispDestructKhead(struct KernelDisplay * pKernelDisplay)970 static inline void kdispDestructKhead(struct KernelDisplay *pKernelDisplay) {
971 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
972 }
973 #else //__nvoc_kern_disp_h_disabled
974 #define kdispDestructKhead(pKernelDisplay) kdispDestructKhead_IMPL(pKernelDisplay)
975 #endif //__nvoc_kern_disp_h_disabled
976
977 NV_STATUS kdispGetIntChnClsForHwCls_IMPL(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass);
978
979 #ifdef __nvoc_kern_disp_h_disabled
kdispGetIntChnClsForHwCls(struct KernelDisplay * pKernelDisplay,NvU32 hwClass,DISPCHNCLASS * pDispChnClass)980 static inline NV_STATUS kdispGetIntChnClsForHwCls(struct KernelDisplay *pKernelDisplay, NvU32 hwClass, DISPCHNCLASS *pDispChnClass) {
981 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
982 return NV_ERR_NOT_SUPPORTED;
983 }
984 #else //__nvoc_kern_disp_h_disabled
985 #define kdispGetIntChnClsForHwCls(pKernelDisplay, hwClass, pDispChnClass) kdispGetIntChnClsForHwCls_IMPL(pKernelDisplay, hwClass, pDispChnClass)
986 #endif //__nvoc_kern_disp_h_disabled
987
988 void kdispNotifyCommonEvent_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams);
989
990 #ifdef __nvoc_kern_disp_h_disabled
kdispNotifyCommonEvent(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 notifyIndex,void * pNotifyParams)991 static inline void kdispNotifyCommonEvent(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams) {
992 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
993 }
994 #else //__nvoc_kern_disp_h_disabled
995 #define kdispNotifyCommonEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams) kdispNotifyCommonEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams)
996 #endif //__nvoc_kern_disp_h_disabled
997
998 void kdispNotifyEvent_IMPL(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16);
999
1000 #ifdef __nvoc_kern_disp_h_disabled
kdispNotifyEvent(OBJGPU * pGpu,struct KernelDisplay * pKernelDisplay,NvU32 notifyIndex,void * pNotifyParams,NvU32 notifyParamsSize,NvV32 info32,NvV16 info16)1001 static inline void kdispNotifyEvent(OBJGPU *pGpu, struct KernelDisplay *pKernelDisplay, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize, NvV32 info32, NvV16 info16) {
1002 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
1003 }
1004 #else //__nvoc_kern_disp_h_disabled
1005 #define kdispNotifyEvent(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16) kdispNotifyEvent_IMPL(pGpu, pKernelDisplay, notifyIndex, pNotifyParams, notifyParamsSize, info32, info16)
1006 #endif //__nvoc_kern_disp_h_disabled
1007
1008 void kdispSetWarPurgeSatellitesOnCoreFree_IMPL(struct KernelDisplay *pKernelDisplay, NvBool value);
1009
1010 #ifdef __nvoc_kern_disp_h_disabled
kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay * pKernelDisplay,NvBool value)1011 static inline void kdispSetWarPurgeSatellitesOnCoreFree(struct KernelDisplay *pKernelDisplay, NvBool value) {
1012 NV_ASSERT_FAILED_PRECOMP("KernelDisplay was disabled!");
1013 }
1014 #else //__nvoc_kern_disp_h_disabled
1015 #define kdispSetWarPurgeSatellitesOnCoreFree(pKernelDisplay, value) kdispSetWarPurgeSatellitesOnCoreFree_IMPL(pKernelDisplay, value)
1016 #endif //__nvoc_kern_disp_h_disabled
1017
1018 #undef PRIVATE_FIELD
1019
1020
1021 void
1022 dispdeviceFillVgaSavedDisplayState( OBJGPU *pGpu,
1023 NvU64 vgaAddr,
1024 NvU8 vgaMemType,
1025 NvBool vgaValid,
1026 NvU64 workspaceAddr,
1027 NvU8 workspaceMemType,
1028 NvBool workspaceValid,
1029 NvBool baseValid,
1030 NvBool workspaceBaseValid
1031 );
1032
1033 /*! PushBuffer Target Aperture Types */
1034 typedef enum
1035 {
1036 IOVA,
1037 PHYS_NVM,
1038 PHYS_PCI,
1039 PHYS_PCI_COHERENT
1040 } PBTARGETAPERTURE;
1041
1042 static NV_INLINE struct KernelHead*
kdispGetHead(struct KernelDisplay * pKernelDisplay,NvU32 head)1043 kdispGetHead
1044 (
1045 struct KernelDisplay *pKernelDisplay,
1046 NvU32 head
1047 )
1048 {
1049 if (head >= OBJ_MAX_HEADS)
1050 {
1051 return NULL;
1052 }
1053
1054 return pKernelDisplay->pKernelHead[head];
1055 }
1056
1057 static NV_INLINE NvU32
kdispGetNumHeads(struct KernelDisplay * pKernelDisplay)1058 kdispGetNumHeads(struct KernelDisplay *pKernelDisplay)
1059 {
1060 NV_ASSERT(pKernelDisplay != NULL);
1061 return pKernelDisplay->numHeads;
1062 }
1063
1064 static NV_INLINE NvU32
kdispGetDeferredVblankHeadMask(struct KernelDisplay * pKernelDisplay)1065 kdispGetDeferredVblankHeadMask(struct KernelDisplay *pKernelDisplay)
1066 {
1067 return pKernelDisplay->deferredVblankHeadMask;
1068 }
1069
1070 static NV_INLINE void
kdispSetDeferredVblankHeadMask(struct KernelDisplay * pKernelDisplay,NvU32 vblankHeadMask)1071 kdispSetDeferredVblankHeadMask(struct KernelDisplay *pKernelDisplay, NvU32 vblankHeadMask)
1072 {
1073 pKernelDisplay->deferredVblankHeadMask = vblankHeadMask;
1074 }
1075
1076 static NV_INLINE NvHandle
kdispGetInternalClientHandle(struct KernelDisplay * pKernelDisplay)1077 kdispGetInternalClientHandle(struct KernelDisplay *pKernelDisplay)
1078 {
1079 return pKernelDisplay->hInternalClient;
1080 }
1081
1082 static NV_INLINE NvHandle
kdispGetDispCommonHandle(struct KernelDisplay * pKernelDisplay)1083 kdispGetDispCommonHandle(struct KernelDisplay *pKernelDisplay)
1084 {
1085 return pKernelDisplay->hDispCommonHandle;
1086 }
1087
1088 #endif // KERN_DISP_H
1089
1090 #ifdef __cplusplus
1091 } // extern "C"
1092 #endif
1093
1094 #endif // _G_KERN_DISP_NVOC_H_
1095