1 #ifndef _G_KERNEL_BIF_NVOC_H_
2 #define _G_KERNEL_BIF_NVOC_H_
3 #include "nvoc/runtime.h"
4
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8
9 /*
10 * SPDX-FileCopyrightText: Copyright (c) 2013-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11 * SPDX-License-Identifier: MIT
12 *
13 * Permission is hereby granted, free of charge, to any person obtaining a
14 * copy of this software and associated documentation files (the "Software"),
15 * to deal in the Software without restriction, including without limitation
16 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17 * and/or sell copies of the Software, and to permit persons to whom the
18 * Software is furnished to do so, subject to the following conditions:
19 *
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29 * DEALINGS IN THE SOFTWARE.
30 */
31
32
33 /* ------------------------ Includes ---------------------------------------- */
34 #include "g_kernel_bif_nvoc.h"
35
36 #ifndef KERNEL_BIF_H
37 #define KERNEL_BIF_H
38
39 #include "core/core.h"
40 #include "gpu/eng_state.h"
41 #include "gpu/gpu_halspec.h"
42 #include "gpu/intr/intr_service.h"
43 #include "gpu/mem_mgr/mem_desc.h"
44 #include "rmpbicmdif.h"
45 #include "nvoc/utility.h"
46 #include "ctrl/ctrl2080/ctrl2080bus.h"
47
48
49 /* ------------------------ Types definitions ------------------------------ */
50
51 // PCIe config space size
52 #define PCIE_CONFIG_SPACE_SIZE 0x1000
53
54 // The default value of registry key ForceP2P override, ~0 means no registry key.
55 #define BIF_P2P_NOT_OVERRIDEN ((NvU32)~0)
56
57 // DMA capabilities
58 #define BIF_DMA_CAPS_SNOOP 15:0
59 #define BIF_DMA_CAPS_SNOOP_CTXDMA 0x1
60 #define BIF_DMA_CAPS_NOSNOOP 31:16
61 #define BIF_DMA_CAPS_NOSNOOP_CTXDMA 0x1
62
63 #define KBIF_CLEAR_XVE_AER_ALL_MASK (0xFFFFFFFF)
64
65 #define kbifIsSnoopDmaCapable(pGpu, pKernelBif) ((REF_VAL(BIF_DMA_CAPS_SNOOP, \
66 kbifGetDmaCaps(pGpu, pKernelBif))))
67 // Number of BAR entries: BAR0_LO, BAR0_HI, BAR1_LO, BAR1_HI, BAR2_LO, BAR2_HI and BAR3
68 #define KBIF_NUM_BAR_OFFSET_ENTRIES 0x7
69 // Indicates a non existent BAR offset
70 #define KBIF_INVALID_BAR_REG_OFFSET 0xFFFF
71
72 // XVE bus options
73 typedef enum BUS_OPTIONS
74 {
75 BUS_OPTIONS_DEV_CONTROL_STATUS = 0,
76 BUS_OPTIONS_LINK_CONTROL_STATUS,
77 BUS_OPTIONS_LINK_CAPABILITIES,
78 BUS_OPTIONS_DEV_CONTROL_STATUS_2,
79 BUS_OPTIONS_L1_PM_SUBSTATES_CTRL_1
80 } BUS_OPTIONS;
81
82 //
83 // References to save/restore PCIe Config space using registry_map
84 //
85 typedef struct KBIF_XVE_REGMAP_REF
86 {
87 // XVE Valid register map array
88 const NvU32 *xveRegMapValid;
89
90 // XVE Writable register map array
91 const NvU32 *xveRegMapWrite;
92
93 // XVE Valid register map array count
94 NvU16 numXveRegMapValid;
95
96 // XVE Writable register map array count
97 NvU16 numXveRegMapWrite;
98
99 // PCIe function number
100 NvU8 nFunc;
101
102 // Buffer to store boot PCIe config space
103 NvU32 *bufBootConfigSpace;
104
105 // Buffer to store boot PCIe MSIX table (GH100+)
106 NvU32 *bufMsixTable;
107
108 } KBIF_XVE_REGMAP_REF, *PKBIF_XVE_REGMAP_REF;
109
110 typedef struct
111 {
112 // Used to save/restore config space after a hot reset
113 NvU32 gpuBootConfigSpace[PCIE_CONFIG_SPACE_SIZE/sizeof(NvU32)];
114 NvU32 azaliaBootConfigSpace[PCIE_CONFIG_SPACE_SIZE/sizeof(NvU32)];
115
116 // Saved device control register value (Kepler+)
117 NvU32 xveDevCtrl;
118 } KBIF_CACHE_DATA;
119
120 typedef struct KERNEL_HOST_VGPU_DEVICE KERNEL_HOST_VGPU_DEVICE;
121
122
123 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
124 // the matching C source file, but causes diagnostics to be issued if another
125 // source file references the field.
126 #ifdef NVOC_KERNEL_BIF_H_PRIVATE_ACCESS_ALLOWED
127 #define PRIVATE_FIELD(x) x
128 #else
129 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
130 #endif
131
132 struct KernelBif {
133 const struct NVOC_RTTI *__nvoc_rtti;
134 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
135 struct Object *__nvoc_pbase_Object;
136 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
137 struct KernelBif *__nvoc_pbase_KernelBif;
138 NV_STATUS (*__kbifConstructEngine__)(struct OBJGPU *, struct KernelBif *, ENGDESCRIPTOR);
139 NV_STATUS (*__kbifStateInitLocked__)(struct OBJGPU *, struct KernelBif *);
140 NV_STATUS (*__kbifStateLoad__)(struct OBJGPU *, struct KernelBif *, NvU32);
141 NV_STATUS (*__kbifStatePostLoad__)(struct OBJGPU *, struct KernelBif *, NvU32);
142 NV_STATUS (*__kbifStateUnload__)(struct OBJGPU *, struct KernelBif *, NvU32);
143 NvU32 (*__kbifGetBusIntfType__)(struct KernelBif *);
144 void (*__kbifInitDmaCaps__)(struct OBJGPU *, struct KernelBif *);
145 NV_STATUS (*__kbifSavePcieConfigRegisters__)(struct OBJGPU *, struct KernelBif *);
146 NV_STATUS (*__kbifRestorePcieConfigRegisters__)(struct OBJGPU *, struct KernelBif *);
147 NV_STATUS (*__kbifGetXveStatusBits__)(struct OBJGPU *, struct KernelBif *, NvU32 *, NvU32 *);
148 NV_STATUS (*__kbifClearXveStatus__)(struct OBJGPU *, struct KernelBif *, NvU32 *);
149 NV_STATUS (*__kbifGetXveAerBits__)(struct OBJGPU *, struct KernelBif *, NvU32 *);
150 NV_STATUS (*__kbifClearXveAer__)(struct OBJGPU *, struct KernelBif *, NvU32);
151 void (*__kbifGetPcieConfigAccessTestRegisters__)(struct OBJGPU *, struct KernelBif *, NvU32 *, NvU32 *);
152 NV_STATUS (*__kbifVerifyPcieConfigAccessTestRegisters__)(struct OBJGPU *, struct KernelBif *, NvU32, NvU32);
153 void (*__kbifRearmMSI__)(struct OBJGPU *, struct KernelBif *);
154 NvBool (*__kbifIsMSIEnabledInHW__)(struct OBJGPU *, struct KernelBif *);
155 NvBool (*__kbifIsMSIXEnabledInHW__)(struct OBJGPU *, struct KernelBif *);
156 NvBool (*__kbifIsPciIoAccessEnabled__)(struct OBJGPU *, struct KernelBif *);
157 NvBool (*__kbifIs3dController__)(struct OBJGPU *, struct KernelBif *);
158 void (*__kbifExecC73War__)(struct OBJGPU *, struct KernelBif *);
159 void (*__kbifEnableExtendedTagSupport__)(struct OBJGPU *, struct KernelBif *);
160 void (*__kbifPcieConfigEnableRelaxedOrdering__)(struct OBJGPU *, struct KernelBif *);
161 void (*__kbifPcieConfigDisableRelaxedOrdering__)(struct OBJGPU *, struct KernelBif *);
162 void (*__kbifInitRelaxedOrderingFromEmulatedConfigSpace__)(struct OBJGPU *, struct KernelBif *);
163 NV_STATUS (*__kbifEnableNoSnoop__)(struct OBJGPU *, struct KernelBif *, NvBool);
164 void (*__kbifApplyWARBug3208922__)(struct OBJGPU *, struct KernelBif *);
165 void (*__kbifProbePcieReqAtomicCaps__)(struct OBJGPU *, struct KernelBif *);
166 void (*__kbifEnablePcieAtomics__)(struct OBJGPU *, struct KernelBif *);
167 NV_STATUS (*__kbifDoFunctionLevelReset__)(struct OBJGPU *, struct KernelBif *);
168 NV_STATUS (*__kbifInitXveRegMap__)(struct OBJGPU *, struct KernelBif *, NvU8);
169 NvU32 (*__kbifGetMSIXTableVectorControlSize__)(struct OBJGPU *, struct KernelBif *);
170 NV_STATUS (*__kbifSaveMsixTable__)(struct OBJGPU *, struct KernelBif *);
171 NV_STATUS (*__kbifRestoreMsixTable__)(struct OBJGPU *, struct KernelBif *);
172 NV_STATUS (*__kbifConfigAccessWait__)(struct OBJGPU *, struct KernelBif *, RMTIMEOUT *);
173 NV_STATUS (*__kbifGetPciConfigSpacePriMirror__)(struct OBJGPU *, struct KernelBif *, NvU32 *, NvU32 *);
174 NV_STATUS (*__kbifGetBusOptionsAddr__)(struct OBJGPU *, struct KernelBif *, BUS_OPTIONS, NvU32 *);
175 NV_STATUS (*__kbifPreOsGlobalErotGrantRequest__)(struct OBJGPU *, struct KernelBif *);
176 NV_STATUS (*__kbifStopSysMemRequests__)(struct OBJGPU *, struct KernelBif *, NvBool);
177 NV_STATUS (*__kbifWaitForTransactionsComplete__)(struct OBJGPU *, struct KernelBif *);
178 NV_STATUS (*__kbifTriggerFlr__)(struct OBJGPU *, struct KernelBif *);
179 void (*__kbifCacheFlrSupport__)(struct OBJGPU *, struct KernelBif *);
180 void (*__kbifCache64bBar0Support__)(struct OBJGPU *, struct KernelBif *);
181 void (*__kbifCacheVFInfo__)(struct OBJGPU *, struct KernelBif *);
182 void (*__kbifRestoreBar0__)(struct OBJGPU *, struct KernelBif *, void *, NvU32 *);
183 NvBool (*__kbifAnyBarsAreValid__)(struct OBJGPU *, struct KernelBif *);
184 NV_STATUS (*__kbifRestoreBarsAndCommand__)(struct OBJGPU *, struct KernelBif *);
185 void (*__kbifStoreBarRegOffsets__)(struct OBJGPU *, struct KernelBif *, NvU32);
186 NV_STATUS (*__kbifInit__)(struct OBJGPU *, struct KernelBif *);
187 NvU32 (*__kbifGetValidEnginesToReset__)(struct OBJGPU *, struct KernelBif *);
188 NvU32 (*__kbifGetValidDeviceEnginesToReset__)(struct OBJGPU *, struct KernelBif *);
189 NV_STATUS (*__kbifGetMigrationBandwidth__)(struct OBJGPU *, struct KernelBif *, NvU32 *);
190 NvU32 (*__kbifGetEccCounts__)(struct OBJGPU *, struct KernelBif *);
191 NV_STATUS (*__kbifStatePreLoad__)(POBJGPU, struct KernelBif *, NvU32);
192 NV_STATUS (*__kbifStatePostUnload__)(POBJGPU, struct KernelBif *, NvU32);
193 void (*__kbifStateDestroy__)(POBJGPU, struct KernelBif *);
194 NV_STATUS (*__kbifStatePreUnload__)(POBJGPU, struct KernelBif *, NvU32);
195 NV_STATUS (*__kbifStateInitUnlocked__)(POBJGPU, struct KernelBif *);
196 void (*__kbifInitMissing__)(POBJGPU, struct KernelBif *);
197 NV_STATUS (*__kbifStatePreInitLocked__)(POBJGPU, struct KernelBif *);
198 NV_STATUS (*__kbifStatePreInitUnlocked__)(POBJGPU, struct KernelBif *);
199 NvBool (*__kbifIsPresent__)(POBJGPU, struct KernelBif *);
200 NvBool PDB_PROP_KBIF_CHECK_IF_GPU_EXISTS_DEF;
201 NvBool PDB_PROP_KBIF_IS_MSI_ENABLED;
202 NvBool PDB_PROP_KBIF_IS_MSI_CACHED;
203 NvBool PDB_PROP_KBIF_IS_MSIX_ENABLED;
204 NvBool PDB_PROP_KBIF_IS_MSIX_CACHED;
205 NvBool PDB_PROP_KBIF_IS_FMODEL_MSI_BROKEN;
206 NvBool PDB_PROP_KBIF_USE_CONFIG_SPACE_TO_REARM_MSI;
207 NvBool PDB_PROP_KBIF_ALLOW_REARM_MSI_FOR_VF;
208 NvBool PDB_PROP_KBIF_IS_C2C_LINK_UP;
209 NvBool PDB_PROP_KBIF_P2P_READS_DISABLED;
210 NvBool PDB_PROP_KBIF_P2P_WRITES_DISABLED;
211 NvBool PDB_PROP_KBIF_UPSTREAM_LTR_SUPPORT_WAR_BUG_200634944;
212 NvBool PDB_PROP_KBIF_SUPPORT_NONCOHERENT;
213 NvBool PDB_PROP_KBIF_PCIE_GEN4_CAPABLE;
214 NvBool PDB_PROP_KBIF_PCIE_RELAXED_ORDERING_SET_IN_EMULATED_CONFIG_SPACE;
215 NvBool PDB_PROP_KBIF_SYSTEM_ACCESS_DISABLED;
216 NvBool PDB_PROP_KBIF_FLR_SUPPORTED;
217 NvBool PDB_PROP_KBIF_64BIT_BAR0_SUPPORTED;
218 NvBool PDB_PROP_KBIF_DEVICE_IS_MULTIFUNCTION;
219 NvBool PDB_PROP_KBIF_GCX_PMU_CFG_SPACE_RESTORE;
220 NvBool PDB_PROP_KBIF_SECONDARY_BUS_RESET_SUPPORTED;
221 NvBool PDB_PROP_KBIF_SECONDARY_BUS_RESET_ENABLED;
222 NvBool PDB_PROP_KBIF_FORCE_PCIE_CONFIG_SAVE;
223 NvBool PDB_PROP_KBIF_FLR_PRE_CONDITIONING_REQUIRED;
224 NvU32 dmaCaps;
225 RmPhysAddr dmaWindowStartAddress;
226 NvU32 p2pOverride;
227 NvU32 forceP2PType;
228 NvBool peerMappingOverride;
229 NvBool EnteredRecoverySinceErrorsLastChecked;
230 KBIF_CACHE_DATA cacheData;
231 NvBool bPreparingFunctionLevelReset;
232 NvBool bInFunctionLevelReset;
233 NvU32 osPcieAtomicsOpMask;
234 NvBool bForceDisableFLR;
235 NvU32 flrDevInitTimeoutScale;
236 KBIF_XVE_REGMAP_REF xveRegmapRef[2];
237 NvBool bMnocAvailable;
238 NvU32 barRegOffsets[7];
239 };
240
241 #ifndef __NVOC_CLASS_KernelBif_TYPEDEF__
242 #define __NVOC_CLASS_KernelBif_TYPEDEF__
243 typedef struct KernelBif KernelBif;
244 #endif /* __NVOC_CLASS_KernelBif_TYPEDEF__ */
245
246 #ifndef __nvoc_class_id_KernelBif
247 #define __nvoc_class_id_KernelBif 0xdbe523
248 #endif /* __nvoc_class_id_KernelBif */
249
250 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelBif;
251
252 #define __staticCast_KernelBif(pThis) \
253 ((pThis)->__nvoc_pbase_KernelBif)
254
255 #ifdef __nvoc_kernel_bif_h_disabled
256 #define __dynamicCast_KernelBif(pThis) ((KernelBif*)NULL)
257 #else //__nvoc_kernel_bif_h_disabled
258 #define __dynamicCast_KernelBif(pThis) \
259 ((KernelBif*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelBif)))
260 #endif //__nvoc_kernel_bif_h_disabled
261
262 #define PDB_PROP_KBIF_CHECK_IF_GPU_EXISTS_DEF_BASE_CAST
263 #define PDB_PROP_KBIF_CHECK_IF_GPU_EXISTS_DEF_BASE_NAME PDB_PROP_KBIF_CHECK_IF_GPU_EXISTS_DEF
264 #define PDB_PROP_KBIF_IS_C2C_LINK_UP_BASE_CAST
265 #define PDB_PROP_KBIF_IS_C2C_LINK_UP_BASE_NAME PDB_PROP_KBIF_IS_C2C_LINK_UP
266 #define PDB_PROP_KBIF_USE_CONFIG_SPACE_TO_REARM_MSI_BASE_CAST
267 #define PDB_PROP_KBIF_USE_CONFIG_SPACE_TO_REARM_MSI_BASE_NAME PDB_PROP_KBIF_USE_CONFIG_SPACE_TO_REARM_MSI
268 #define PDB_PROP_KBIF_IS_MSI_ENABLED_BASE_CAST
269 #define PDB_PROP_KBIF_IS_MSI_ENABLED_BASE_NAME PDB_PROP_KBIF_IS_MSI_ENABLED
270 #define PDB_PROP_KBIF_FORCE_PCIE_CONFIG_SAVE_BASE_CAST
271 #define PDB_PROP_KBIF_FORCE_PCIE_CONFIG_SAVE_BASE_NAME PDB_PROP_KBIF_FORCE_PCIE_CONFIG_SAVE
272 #define PDB_PROP_KBIF_PCIE_GEN4_CAPABLE_BASE_CAST
273 #define PDB_PROP_KBIF_PCIE_GEN4_CAPABLE_BASE_NAME PDB_PROP_KBIF_PCIE_GEN4_CAPABLE
274 #define PDB_PROP_KBIF_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
275 #define PDB_PROP_KBIF_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
276 #define PDB_PROP_KBIF_IS_MSI_CACHED_BASE_CAST
277 #define PDB_PROP_KBIF_IS_MSI_CACHED_BASE_NAME PDB_PROP_KBIF_IS_MSI_CACHED
278 #define PDB_PROP_KBIF_DEVICE_IS_MULTIFUNCTION_BASE_CAST
279 #define PDB_PROP_KBIF_DEVICE_IS_MULTIFUNCTION_BASE_NAME PDB_PROP_KBIF_DEVICE_IS_MULTIFUNCTION
280 #define PDB_PROP_KBIF_ALLOW_REARM_MSI_FOR_VF_BASE_CAST
281 #define PDB_PROP_KBIF_ALLOW_REARM_MSI_FOR_VF_BASE_NAME PDB_PROP_KBIF_ALLOW_REARM_MSI_FOR_VF
282 #define PDB_PROP_KBIF_SUPPORT_NONCOHERENT_BASE_CAST
283 #define PDB_PROP_KBIF_SUPPORT_NONCOHERENT_BASE_NAME PDB_PROP_KBIF_SUPPORT_NONCOHERENT
284 #define PDB_PROP_KBIF_IS_MSIX_ENABLED_BASE_CAST
285 #define PDB_PROP_KBIF_IS_MSIX_ENABLED_BASE_NAME PDB_PROP_KBIF_IS_MSIX_ENABLED
286 #define PDB_PROP_KBIF_P2P_WRITES_DISABLED_BASE_CAST
287 #define PDB_PROP_KBIF_P2P_WRITES_DISABLED_BASE_NAME PDB_PROP_KBIF_P2P_WRITES_DISABLED
288 #define PDB_PROP_KBIF_FLR_SUPPORTED_BASE_CAST
289 #define PDB_PROP_KBIF_FLR_SUPPORTED_BASE_NAME PDB_PROP_KBIF_FLR_SUPPORTED
290 #define PDB_PROP_KBIF_SYSTEM_ACCESS_DISABLED_BASE_CAST
291 #define PDB_PROP_KBIF_SYSTEM_ACCESS_DISABLED_BASE_NAME PDB_PROP_KBIF_SYSTEM_ACCESS_DISABLED
292 #define PDB_PROP_KBIF_FLR_PRE_CONDITIONING_REQUIRED_BASE_CAST
293 #define PDB_PROP_KBIF_FLR_PRE_CONDITIONING_REQUIRED_BASE_NAME PDB_PROP_KBIF_FLR_PRE_CONDITIONING_REQUIRED
294 #define PDB_PROP_KBIF_PCIE_RELAXED_ORDERING_SET_IN_EMULATED_CONFIG_SPACE_BASE_CAST
295 #define PDB_PROP_KBIF_PCIE_RELAXED_ORDERING_SET_IN_EMULATED_CONFIG_SPACE_BASE_NAME PDB_PROP_KBIF_PCIE_RELAXED_ORDERING_SET_IN_EMULATED_CONFIG_SPACE
296 #define PDB_PROP_KBIF_GCX_PMU_CFG_SPACE_RESTORE_BASE_CAST
297 #define PDB_PROP_KBIF_GCX_PMU_CFG_SPACE_RESTORE_BASE_NAME PDB_PROP_KBIF_GCX_PMU_CFG_SPACE_RESTORE
298 #define PDB_PROP_KBIF_64BIT_BAR0_SUPPORTED_BASE_CAST
299 #define PDB_PROP_KBIF_64BIT_BAR0_SUPPORTED_BASE_NAME PDB_PROP_KBIF_64BIT_BAR0_SUPPORTED
300 #define PDB_PROP_KBIF_UPSTREAM_LTR_SUPPORT_WAR_BUG_200634944_BASE_CAST
301 #define PDB_PROP_KBIF_UPSTREAM_LTR_SUPPORT_WAR_BUG_200634944_BASE_NAME PDB_PROP_KBIF_UPSTREAM_LTR_SUPPORT_WAR_BUG_200634944
302 #define PDB_PROP_KBIF_IS_MSIX_CACHED_BASE_CAST
303 #define PDB_PROP_KBIF_IS_MSIX_CACHED_BASE_NAME PDB_PROP_KBIF_IS_MSIX_CACHED
304 #define PDB_PROP_KBIF_P2P_READS_DISABLED_BASE_CAST
305 #define PDB_PROP_KBIF_P2P_READS_DISABLED_BASE_NAME PDB_PROP_KBIF_P2P_READS_DISABLED
306 #define PDB_PROP_KBIF_SECONDARY_BUS_RESET_SUPPORTED_BASE_CAST
307 #define PDB_PROP_KBIF_SECONDARY_BUS_RESET_SUPPORTED_BASE_NAME PDB_PROP_KBIF_SECONDARY_BUS_RESET_SUPPORTED
308 #define PDB_PROP_KBIF_IS_FMODEL_MSI_BROKEN_BASE_CAST
309 #define PDB_PROP_KBIF_IS_FMODEL_MSI_BROKEN_BASE_NAME PDB_PROP_KBIF_IS_FMODEL_MSI_BROKEN
310 #define PDB_PROP_KBIF_SECONDARY_BUS_RESET_ENABLED_BASE_CAST
311 #define PDB_PROP_KBIF_SECONDARY_BUS_RESET_ENABLED_BASE_NAME PDB_PROP_KBIF_SECONDARY_BUS_RESET_ENABLED
312
313 NV_STATUS __nvoc_objCreateDynamic_KernelBif(KernelBif**, Dynamic*, NvU32, va_list);
314
315 NV_STATUS __nvoc_objCreate_KernelBif(KernelBif**, Dynamic*, NvU32);
316 #define __objCreate_KernelBif(ppNewObj, pParent, createFlags) \
317 __nvoc_objCreate_KernelBif((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
318
319 #define kbifConstructEngine(pGpu, pKernelBif, arg0) kbifConstructEngine_DISPATCH(pGpu, pKernelBif, arg0)
320 #define kbifStateInitLocked(pGpu, pKernelBif) kbifStateInitLocked_DISPATCH(pGpu, pKernelBif)
321 #define kbifStateLoad(pGpu, pKernelBif, arg0) kbifStateLoad_DISPATCH(pGpu, pKernelBif, arg0)
322 #define kbifStateLoad_HAL(pGpu, pKernelBif, arg0) kbifStateLoad_DISPATCH(pGpu, pKernelBif, arg0)
323 #define kbifStatePostLoad(pGpu, pKernelBif, arg0) kbifStatePostLoad_DISPATCH(pGpu, pKernelBif, arg0)
324 #define kbifStatePostLoad_HAL(pGpu, pKernelBif, arg0) kbifStatePostLoad_DISPATCH(pGpu, pKernelBif, arg0)
325 #define kbifStateUnload(pGpu, pKernelBif, arg0) kbifStateUnload_DISPATCH(pGpu, pKernelBif, arg0)
326 #define kbifStateUnload_HAL(pGpu, pKernelBif, arg0) kbifStateUnload_DISPATCH(pGpu, pKernelBif, arg0)
327 #define kbifGetBusIntfType(pKernelBif) kbifGetBusIntfType_DISPATCH(pKernelBif)
328 #define kbifGetBusIntfType_HAL(pKernelBif) kbifGetBusIntfType_DISPATCH(pKernelBif)
329 #define kbifInitDmaCaps(pGpu, pKernelBif) kbifInitDmaCaps_DISPATCH(pGpu, pKernelBif)
330 #define kbifInitDmaCaps_HAL(pGpu, pKernelBif) kbifInitDmaCaps_DISPATCH(pGpu, pKernelBif)
331 #define kbifSavePcieConfigRegisters(pGpu, pKernelBif) kbifSavePcieConfigRegisters_DISPATCH(pGpu, pKernelBif)
332 #define kbifSavePcieConfigRegisters_HAL(pGpu, pKernelBif) kbifSavePcieConfigRegisters_DISPATCH(pGpu, pKernelBif)
333 #define kbifRestorePcieConfigRegisters(pGpu, pKernelBif) kbifRestorePcieConfigRegisters_DISPATCH(pGpu, pKernelBif)
334 #define kbifRestorePcieConfigRegisters_HAL(pGpu, pKernelBif) kbifRestorePcieConfigRegisters_DISPATCH(pGpu, pKernelBif)
335 #define kbifGetXveStatusBits(pGpu, pKernelBif, pBits, pStatus) kbifGetXveStatusBits_DISPATCH(pGpu, pKernelBif, pBits, pStatus)
336 #define kbifGetXveStatusBits_HAL(pGpu, pKernelBif, pBits, pStatus) kbifGetXveStatusBits_DISPATCH(pGpu, pKernelBif, pBits, pStatus)
337 #define kbifClearXveStatus(pGpu, pKernelBif, pStatus) kbifClearXveStatus_DISPATCH(pGpu, pKernelBif, pStatus)
338 #define kbifClearXveStatus_HAL(pGpu, pKernelBif, pStatus) kbifClearXveStatus_DISPATCH(pGpu, pKernelBif, pStatus)
339 #define kbifGetXveAerBits(pGpu, pKernelBif, pBits) kbifGetXveAerBits_DISPATCH(pGpu, pKernelBif, pBits)
340 #define kbifGetXveAerBits_HAL(pGpu, pKernelBif, pBits) kbifGetXveAerBits_DISPATCH(pGpu, pKernelBif, pBits)
341 #define kbifClearXveAer(pGpu, pKernelBif, bits) kbifClearXveAer_DISPATCH(pGpu, pKernelBif, bits)
342 #define kbifClearXveAer_HAL(pGpu, pKernelBif, bits) kbifClearXveAer_DISPATCH(pGpu, pKernelBif, bits)
343 #define kbifGetPcieConfigAccessTestRegisters(pGpu, pKernelBif, pciStart, pcieStart) kbifGetPcieConfigAccessTestRegisters_DISPATCH(pGpu, pKernelBif, pciStart, pcieStart)
344 #define kbifGetPcieConfigAccessTestRegisters_HAL(pGpu, pKernelBif, pciStart, pcieStart) kbifGetPcieConfigAccessTestRegisters_DISPATCH(pGpu, pKernelBif, pciStart, pcieStart)
345 #define kbifVerifyPcieConfigAccessTestRegisters(pGpu, pKernelBif, nvXveId, nvXveVccapHdr) kbifVerifyPcieConfigAccessTestRegisters_DISPATCH(pGpu, pKernelBif, nvXveId, nvXveVccapHdr)
346 #define kbifVerifyPcieConfigAccessTestRegisters_HAL(pGpu, pKernelBif, nvXveId, nvXveVccapHdr) kbifVerifyPcieConfigAccessTestRegisters_DISPATCH(pGpu, pKernelBif, nvXveId, nvXveVccapHdr)
347 #define kbifRearmMSI(pGpu, pKernelBif) kbifRearmMSI_DISPATCH(pGpu, pKernelBif)
348 #define kbifRearmMSI_HAL(pGpu, pKernelBif) kbifRearmMSI_DISPATCH(pGpu, pKernelBif)
349 #define kbifIsMSIEnabledInHW(pGpu, pKernelBif) kbifIsMSIEnabledInHW_DISPATCH(pGpu, pKernelBif)
350 #define kbifIsMSIEnabledInHW_HAL(pGpu, pKernelBif) kbifIsMSIEnabledInHW_DISPATCH(pGpu, pKernelBif)
351 #define kbifIsMSIXEnabledInHW(pGpu, pKernelBif) kbifIsMSIXEnabledInHW_DISPATCH(pGpu, pKernelBif)
352 #define kbifIsMSIXEnabledInHW_HAL(pGpu, pKernelBif) kbifIsMSIXEnabledInHW_DISPATCH(pGpu, pKernelBif)
353 #define kbifIsPciIoAccessEnabled(pGpu, pKernelBif) kbifIsPciIoAccessEnabled_DISPATCH(pGpu, pKernelBif)
354 #define kbifIsPciIoAccessEnabled_HAL(pGpu, pKernelBif) kbifIsPciIoAccessEnabled_DISPATCH(pGpu, pKernelBif)
355 #define kbifIs3dController(pGpu, pKernelBif) kbifIs3dController_DISPATCH(pGpu, pKernelBif)
356 #define kbifIs3dController_HAL(pGpu, pKernelBif) kbifIs3dController_DISPATCH(pGpu, pKernelBif)
357 #define kbifExecC73War(pGpu, pKernelBif) kbifExecC73War_DISPATCH(pGpu, pKernelBif)
358 #define kbifExecC73War_HAL(pGpu, pKernelBif) kbifExecC73War_DISPATCH(pGpu, pKernelBif)
359 #define kbifEnableExtendedTagSupport(pGpu, pKernelBif) kbifEnableExtendedTagSupport_DISPATCH(pGpu, pKernelBif)
360 #define kbifEnableExtendedTagSupport_HAL(pGpu, pKernelBif) kbifEnableExtendedTagSupport_DISPATCH(pGpu, pKernelBif)
361 #define kbifPcieConfigEnableRelaxedOrdering(pGpu, pKernelBif) kbifPcieConfigEnableRelaxedOrdering_DISPATCH(pGpu, pKernelBif)
362 #define kbifPcieConfigEnableRelaxedOrdering_HAL(pGpu, pKernelBif) kbifPcieConfigEnableRelaxedOrdering_DISPATCH(pGpu, pKernelBif)
363 #define kbifPcieConfigDisableRelaxedOrdering(pGpu, pKernelBif) kbifPcieConfigDisableRelaxedOrdering_DISPATCH(pGpu, pKernelBif)
364 #define kbifPcieConfigDisableRelaxedOrdering_HAL(pGpu, pKernelBif) kbifPcieConfigDisableRelaxedOrdering_DISPATCH(pGpu, pKernelBif)
365 #define kbifInitRelaxedOrderingFromEmulatedConfigSpace(pGpu, pBif) kbifInitRelaxedOrderingFromEmulatedConfigSpace_DISPATCH(pGpu, pBif)
366 #define kbifInitRelaxedOrderingFromEmulatedConfigSpace_HAL(pGpu, pBif) kbifInitRelaxedOrderingFromEmulatedConfigSpace_DISPATCH(pGpu, pBif)
367 #define kbifEnableNoSnoop(pGpu, pKernelBif, bEnable) kbifEnableNoSnoop_DISPATCH(pGpu, pKernelBif, bEnable)
368 #define kbifEnableNoSnoop_HAL(pGpu, pKernelBif, bEnable) kbifEnableNoSnoop_DISPATCH(pGpu, pKernelBif, bEnable)
369 #define kbifApplyWARBug3208922(pGpu, pKernelBif) kbifApplyWARBug3208922_DISPATCH(pGpu, pKernelBif)
370 #define kbifApplyWARBug3208922_HAL(pGpu, pKernelBif) kbifApplyWARBug3208922_DISPATCH(pGpu, pKernelBif)
371 #define kbifProbePcieReqAtomicCaps(pGpu, pKernelBif) kbifProbePcieReqAtomicCaps_DISPATCH(pGpu, pKernelBif)
372 #define kbifProbePcieReqAtomicCaps_HAL(pGpu, pKernelBif) kbifProbePcieReqAtomicCaps_DISPATCH(pGpu, pKernelBif)
373 #define kbifEnablePcieAtomics(pGpu, pKernelBif) kbifEnablePcieAtomics_DISPATCH(pGpu, pKernelBif)
374 #define kbifEnablePcieAtomics_HAL(pGpu, pKernelBif) kbifEnablePcieAtomics_DISPATCH(pGpu, pKernelBif)
375 #define kbifDoFunctionLevelReset(pGpu, pKernelBif) kbifDoFunctionLevelReset_DISPATCH(pGpu, pKernelBif)
376 #define kbifDoFunctionLevelReset_HAL(pGpu, pKernelBif) kbifDoFunctionLevelReset_DISPATCH(pGpu, pKernelBif)
377 #define kbifInitXveRegMap(pGpu, pKernelBif, arg0) kbifInitXveRegMap_DISPATCH(pGpu, pKernelBif, arg0)
378 #define kbifInitXveRegMap_HAL(pGpu, pKernelBif, arg0) kbifInitXveRegMap_DISPATCH(pGpu, pKernelBif, arg0)
379 #define kbifGetMSIXTableVectorControlSize(pGpu, pKernelBif) kbifGetMSIXTableVectorControlSize_DISPATCH(pGpu, pKernelBif)
380 #define kbifGetMSIXTableVectorControlSize_HAL(pGpu, pKernelBif) kbifGetMSIXTableVectorControlSize_DISPATCH(pGpu, pKernelBif)
381 #define kbifSaveMsixTable(pGpu, pKernelBif) kbifSaveMsixTable_DISPATCH(pGpu, pKernelBif)
382 #define kbifSaveMsixTable_HAL(pGpu, pKernelBif) kbifSaveMsixTable_DISPATCH(pGpu, pKernelBif)
383 #define kbifRestoreMsixTable(pGpu, pKernelBif) kbifRestoreMsixTable_DISPATCH(pGpu, pKernelBif)
384 #define kbifRestoreMsixTable_HAL(pGpu, pKernelBif) kbifRestoreMsixTable_DISPATCH(pGpu, pKernelBif)
385 #define kbifConfigAccessWait(pGpu, pKernelBif, pTimeout) kbifConfigAccessWait_DISPATCH(pGpu, pKernelBif, pTimeout)
386 #define kbifConfigAccessWait_HAL(pGpu, pKernelBif, pTimeout) kbifConfigAccessWait_DISPATCH(pGpu, pKernelBif, pTimeout)
387 #define kbifGetPciConfigSpacePriMirror(pGpu, pKernelBif, pMirrorBase, pMirrorSize) kbifGetPciConfigSpacePriMirror_DISPATCH(pGpu, pKernelBif, pMirrorBase, pMirrorSize)
388 #define kbifGetPciConfigSpacePriMirror_HAL(pGpu, pKernelBif, pMirrorBase, pMirrorSize) kbifGetPciConfigSpacePriMirror_DISPATCH(pGpu, pKernelBif, pMirrorBase, pMirrorSize)
389 #define kbifGetBusOptionsAddr(pGpu, pKernelBif, options, addrReg) kbifGetBusOptionsAddr_DISPATCH(pGpu, pKernelBif, options, addrReg)
390 #define kbifGetBusOptionsAddr_HAL(pGpu, pKernelBif, options, addrReg) kbifGetBusOptionsAddr_DISPATCH(pGpu, pKernelBif, options, addrReg)
391 #define kbifPreOsGlobalErotGrantRequest(pGpu, pKernelBif) kbifPreOsGlobalErotGrantRequest_DISPATCH(pGpu, pKernelBif)
392 #define kbifPreOsGlobalErotGrantRequest_HAL(pGpu, pKernelBif) kbifPreOsGlobalErotGrantRequest_DISPATCH(pGpu, pKernelBif)
393 #define kbifStopSysMemRequests(pGpu, pKernelBif, bStop) kbifStopSysMemRequests_DISPATCH(pGpu, pKernelBif, bStop)
394 #define kbifStopSysMemRequests_HAL(pGpu, pKernelBif, bStop) kbifStopSysMemRequests_DISPATCH(pGpu, pKernelBif, bStop)
395 #define kbifWaitForTransactionsComplete(pGpu, pKernelBif) kbifWaitForTransactionsComplete_DISPATCH(pGpu, pKernelBif)
396 #define kbifWaitForTransactionsComplete_HAL(pGpu, pKernelBif) kbifWaitForTransactionsComplete_DISPATCH(pGpu, pKernelBif)
397 #define kbifTriggerFlr(pGpu, pKernelBif) kbifTriggerFlr_DISPATCH(pGpu, pKernelBif)
398 #define kbifTriggerFlr_HAL(pGpu, pKernelBif) kbifTriggerFlr_DISPATCH(pGpu, pKernelBif)
399 #define kbifCacheFlrSupport(pGpu, pKernelBif) kbifCacheFlrSupport_DISPATCH(pGpu, pKernelBif)
400 #define kbifCacheFlrSupport_HAL(pGpu, pKernelBif) kbifCacheFlrSupport_DISPATCH(pGpu, pKernelBif)
401 #define kbifCache64bBar0Support(pGpu, pKernelBif) kbifCache64bBar0Support_DISPATCH(pGpu, pKernelBif)
402 #define kbifCache64bBar0Support_HAL(pGpu, pKernelBif) kbifCache64bBar0Support_DISPATCH(pGpu, pKernelBif)
403 #define kbifCacheVFInfo(pGpu, pKernelBif) kbifCacheVFInfo_DISPATCH(pGpu, pKernelBif)
404 #define kbifCacheVFInfo_HAL(pGpu, pKernelBif) kbifCacheVFInfo_DISPATCH(pGpu, pKernelBif)
405 #define kbifRestoreBar0(pGpu, pKernelBif, arg0, arg1) kbifRestoreBar0_DISPATCH(pGpu, pKernelBif, arg0, arg1)
406 #define kbifRestoreBar0_HAL(pGpu, pKernelBif, arg0, arg1) kbifRestoreBar0_DISPATCH(pGpu, pKernelBif, arg0, arg1)
407 #define kbifAnyBarsAreValid(pGpu, pKernelBif) kbifAnyBarsAreValid_DISPATCH(pGpu, pKernelBif)
408 #define kbifAnyBarsAreValid_HAL(pGpu, pKernelBif) kbifAnyBarsAreValid_DISPATCH(pGpu, pKernelBif)
409 #define kbifRestoreBarsAndCommand(pGpu, pKernelBif) kbifRestoreBarsAndCommand_DISPATCH(pGpu, pKernelBif)
410 #define kbifRestoreBarsAndCommand_HAL(pGpu, pKernelBif) kbifRestoreBarsAndCommand_DISPATCH(pGpu, pKernelBif)
411 #define kbifStoreBarRegOffsets(pGpu, pKernelBif, arg0) kbifStoreBarRegOffsets_DISPATCH(pGpu, pKernelBif, arg0)
412 #define kbifStoreBarRegOffsets_HAL(pGpu, pKernelBif, arg0) kbifStoreBarRegOffsets_DISPATCH(pGpu, pKernelBif, arg0)
413 #define kbifInit(pGpu, pKernelBif) kbifInit_DISPATCH(pGpu, pKernelBif)
414 #define kbifInit_HAL(pGpu, pKernelBif) kbifInit_DISPATCH(pGpu, pKernelBif)
415 #define kbifGetValidEnginesToReset(pGpu, pKernelBif) kbifGetValidEnginesToReset_DISPATCH(pGpu, pKernelBif)
416 #define kbifGetValidEnginesToReset_HAL(pGpu, pKernelBif) kbifGetValidEnginesToReset_DISPATCH(pGpu, pKernelBif)
417 #define kbifGetValidDeviceEnginesToReset(pGpu, pKernelBif) kbifGetValidDeviceEnginesToReset_DISPATCH(pGpu, pKernelBif)
418 #define kbifGetValidDeviceEnginesToReset_HAL(pGpu, pKernelBif) kbifGetValidDeviceEnginesToReset_DISPATCH(pGpu, pKernelBif)
419 #define kbifGetMigrationBandwidth(pGpu, pKernelBif, pBandwidth) kbifGetMigrationBandwidth_DISPATCH(pGpu, pKernelBif, pBandwidth)
420 #define kbifGetMigrationBandwidth_HAL(pGpu, pKernelBif, pBandwidth) kbifGetMigrationBandwidth_DISPATCH(pGpu, pKernelBif, pBandwidth)
421 #define kbifGetEccCounts(pGpu, pKernelBif) kbifGetEccCounts_DISPATCH(pGpu, pKernelBif)
422 #define kbifGetEccCounts_HAL(pGpu, pKernelBif) kbifGetEccCounts_DISPATCH(pGpu, pKernelBif)
423 #define kbifStatePreLoad(pGpu, pEngstate, arg0) kbifStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
424 #define kbifStatePostUnload(pGpu, pEngstate, arg0) kbifStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
425 #define kbifStateDestroy(pGpu, pEngstate) kbifStateDestroy_DISPATCH(pGpu, pEngstate)
426 #define kbifStatePreUnload(pGpu, pEngstate, arg0) kbifStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
427 #define kbifStateInitUnlocked(pGpu, pEngstate) kbifStateInitUnlocked_DISPATCH(pGpu, pEngstate)
428 #define kbifInitMissing(pGpu, pEngstate) kbifInitMissing_DISPATCH(pGpu, pEngstate)
429 #define kbifStatePreInitLocked(pGpu, pEngstate) kbifStatePreInitLocked_DISPATCH(pGpu, pEngstate)
430 #define kbifStatePreInitUnlocked(pGpu, pEngstate) kbifStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
431 #define kbifIsPresent(pGpu, pEngstate) kbifIsPresent_DISPATCH(pGpu, pEngstate)
432 void kbifDestruct_GM107(struct KernelBif *pKernelBif);
433
434
435 #define __nvoc_kbifDestruct(pKernelBif) kbifDestruct_GM107(pKernelBif)
436 void kbifClearConfigErrors_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool arg0, NvU32 arg1);
437
438
439 #ifdef __nvoc_kernel_bif_h_disabled
kbifClearConfigErrors(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvBool arg0,NvU32 arg1)440 static inline void kbifClearConfigErrors(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool arg0, NvU32 arg1) {
441 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
442 }
443 #else //__nvoc_kernel_bif_h_disabled
444 #define kbifClearConfigErrors(pGpu, pKernelBif, arg0, arg1) kbifClearConfigErrors_IMPL(pGpu, pKernelBif, arg0, arg1)
445 #endif //__nvoc_kernel_bif_h_disabled
446
447 #define kbifClearConfigErrors_HAL(pGpu, pKernelBif, arg0, arg1) kbifClearConfigErrors(pGpu, pKernelBif, arg0, arg1)
448
449 void kbifDisableP2PTransactions_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
450
451
452 #ifdef __nvoc_kernel_bif_h_disabled
kbifDisableP2PTransactions(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)453 static inline void kbifDisableP2PTransactions(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
454 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
455 }
456 #else //__nvoc_kernel_bif_h_disabled
457 #define kbifDisableP2PTransactions(pGpu, pKernelBif) kbifDisableP2PTransactions_TU102(pGpu, pKernelBif)
458 #endif //__nvoc_kernel_bif_h_disabled
459
460 #define kbifDisableP2PTransactions_HAL(pGpu, pKernelBif) kbifDisableP2PTransactions(pGpu, pKernelBif)
461
462 NV_STATUS kbifGetVFSparseMmapRegions_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, KERNEL_HOST_VGPU_DEVICE *pKernelHostVgpuDevice, NvU64 osPageSize, NvU32 *pNumAreas, NvU64 *pOffsets, NvU64 *pSizes);
463
464
465 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetVFSparseMmapRegions(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,KERNEL_HOST_VGPU_DEVICE * pKernelHostVgpuDevice,NvU64 osPageSize,NvU32 * pNumAreas,NvU64 * pOffsets,NvU64 * pSizes)466 static inline NV_STATUS kbifGetVFSparseMmapRegions(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, KERNEL_HOST_VGPU_DEVICE *pKernelHostVgpuDevice, NvU64 osPageSize, NvU32 *pNumAreas, NvU64 *pOffsets, NvU64 *pSizes) {
467 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
468 return NV_ERR_NOT_SUPPORTED;
469 }
470 #else //__nvoc_kernel_bif_h_disabled
471 #define kbifGetVFSparseMmapRegions(pGpu, pKernelBif, pKernelHostVgpuDevice, osPageSize, pNumAreas, pOffsets, pSizes) kbifGetVFSparseMmapRegions_TU102(pGpu, pKernelBif, pKernelHostVgpuDevice, osPageSize, pNumAreas, pOffsets, pSizes)
472 #endif //__nvoc_kernel_bif_h_disabled
473
474 #define kbifGetVFSparseMmapRegions_HAL(pGpu, pKernelBif, pKernelHostVgpuDevice, osPageSize, pNumAreas, pOffsets, pSizes) kbifGetVFSparseMmapRegions(pGpu, pKernelBif, pKernelHostVgpuDevice, osPageSize, pNumAreas, pOffsets, pSizes)
475
476 NV_STATUS kbifSaveMSIXVectorControlMasks_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *arg0);
477
478
479 #ifdef __nvoc_kernel_bif_h_disabled
kbifSaveMSIXVectorControlMasks(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * arg0)480 static inline NV_STATUS kbifSaveMSIXVectorControlMasks(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *arg0) {
481 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
482 return NV_ERR_NOT_SUPPORTED;
483 }
484 #else //__nvoc_kernel_bif_h_disabled
485 #define kbifSaveMSIXVectorControlMasks(pGpu, pKernelBif, arg0) kbifSaveMSIXVectorControlMasks_TU102(pGpu, pKernelBif, arg0)
486 #endif //__nvoc_kernel_bif_h_disabled
487
488 #define kbifSaveMSIXVectorControlMasks_HAL(pGpu, pKernelBif, arg0) kbifSaveMSIXVectorControlMasks(pGpu, pKernelBif, arg0)
489
490 NV_STATUS kbifRestoreMSIXVectorControlMasks_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0);
491
492
493 #ifdef __nvoc_kernel_bif_h_disabled
kbifRestoreMSIXVectorControlMasks(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)494 static inline NV_STATUS kbifRestoreMSIXVectorControlMasks(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
495 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
496 return NV_ERR_NOT_SUPPORTED;
497 }
498 #else //__nvoc_kernel_bif_h_disabled
499 #define kbifRestoreMSIXVectorControlMasks(pGpu, pKernelBif, arg0) kbifRestoreMSIXVectorControlMasks_TU102(pGpu, pKernelBif, arg0)
500 #endif //__nvoc_kernel_bif_h_disabled
501
502 #define kbifRestoreMSIXVectorControlMasks_HAL(pGpu, pKernelBif, arg0) kbifRestoreMSIXVectorControlMasks(pGpu, pKernelBif, arg0)
503
504 NV_STATUS kbifDisableSysmemAccess_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bDisable);
505
506
507 #ifdef __nvoc_kernel_bif_h_disabled
kbifDisableSysmemAccess(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvBool bDisable)508 static inline NV_STATUS kbifDisableSysmemAccess(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bDisable) {
509 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
510 return NV_ERR_NOT_SUPPORTED;
511 }
512 #else //__nvoc_kernel_bif_h_disabled
513 #define kbifDisableSysmemAccess(pGpu, pKernelBif, bDisable) kbifDisableSysmemAccess_GM107(pGpu, pKernelBif, bDisable)
514 #endif //__nvoc_kernel_bif_h_disabled
515
516 #define kbifDisableSysmemAccess_HAL(pGpu, pKernelBif, bDisable) kbifDisableSysmemAccess(pGpu, pKernelBif, bDisable)
517
kbifCacheMnocSupport_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)518 static inline void kbifCacheMnocSupport_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
519 return;
520 }
521
522
523 #ifdef __nvoc_kernel_bif_h_disabled
kbifCacheMnocSupport(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)524 static inline void kbifCacheMnocSupport(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
525 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
526 }
527 #else //__nvoc_kernel_bif_h_disabled
528 #define kbifCacheMnocSupport(pGpu, pKernelBif) kbifCacheMnocSupport_b3696a(pGpu, pKernelBif)
529 #endif //__nvoc_kernel_bif_h_disabled
530
531 #define kbifCacheMnocSupport_HAL(pGpu, pKernelBif) kbifCacheMnocSupport(pGpu, pKernelBif)
532
533 NV_STATUS kbifConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, ENGDESCRIPTOR arg0);
534
kbifConstructEngine_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,ENGDESCRIPTOR arg0)535 static inline NV_STATUS kbifConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, ENGDESCRIPTOR arg0) {
536 return pKernelBif->__kbifConstructEngine__(pGpu, pKernelBif, arg0);
537 }
538
539 NV_STATUS kbifStateInitLocked_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
540
kbifStateInitLocked_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)541 static inline NV_STATUS kbifStateInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
542 return pKernelBif->__kbifStateInitLocked__(pGpu, pKernelBif);
543 }
544
545 NV_STATUS kbifStateLoad_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0);
546
kbifStateLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)547 static inline NV_STATUS kbifStateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
548 return pKernelBif->__kbifStateLoad__(pGpu, pKernelBif, arg0);
549 }
550
kbifStatePostLoad_56cd7a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)551 static inline NV_STATUS kbifStatePostLoad_56cd7a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
552 return NV_OK;
553 }
554
555 NV_STATUS kbifStatePostLoad_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0);
556
kbifStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)557 static inline NV_STATUS kbifStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
558 return pKernelBif->__kbifStatePostLoad__(pGpu, pKernelBif, arg0);
559 }
560
561 NV_STATUS kbifStateUnload_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0);
562
kbifStateUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)563 static inline NV_STATUS kbifStateUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
564 return pKernelBif->__kbifStateUnload__(pGpu, pKernelBif, arg0);
565 }
566
kbifGetBusIntfType_28ceda(struct KernelBif * pKernelBif)567 static inline NvU32 kbifGetBusIntfType_28ceda(struct KernelBif *pKernelBif) {
568 return (1);
569 }
570
kbifGetBusIntfType_2f2c74(struct KernelBif * pKernelBif)571 static inline NvU32 kbifGetBusIntfType_2f2c74(struct KernelBif *pKernelBif) {
572 return (3);
573 }
574
kbifGetBusIntfType_DISPATCH(struct KernelBif * pKernelBif)575 static inline NvU32 kbifGetBusIntfType_DISPATCH(struct KernelBif *pKernelBif) {
576 return pKernelBif->__kbifGetBusIntfType__(pKernelBif);
577 }
578
579 void kbifInitDmaCaps_VGPUSTUB(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
580
581 void kbifInitDmaCaps_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
582
kbifInitDmaCaps_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)583 static inline void kbifInitDmaCaps_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
584 pKernelBif->__kbifInitDmaCaps__(pGpu, pKernelBif);
585 }
586
587 NV_STATUS kbifSavePcieConfigRegisters_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
588
589 NV_STATUS kbifSavePcieConfigRegisters_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
590
kbifSavePcieConfigRegisters_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)591 static inline NV_STATUS kbifSavePcieConfigRegisters_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
592 return pKernelBif->__kbifSavePcieConfigRegisters__(pGpu, pKernelBif);
593 }
594
595 NV_STATUS kbifRestorePcieConfigRegisters_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
596
597 NV_STATUS kbifRestorePcieConfigRegisters_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
598
kbifRestorePcieConfigRegisters_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)599 static inline NV_STATUS kbifRestorePcieConfigRegisters_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
600 return pKernelBif->__kbifRestorePcieConfigRegisters__(pGpu, pKernelBif);
601 }
602
603 NV_STATUS kbifGetXveStatusBits_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBits, NvU32 *pStatus);
604
605 NV_STATUS kbifGetXveStatusBits_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBits, NvU32 *pStatus);
606
kbifGetXveStatusBits_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pBits,NvU32 * pStatus)607 static inline NV_STATUS kbifGetXveStatusBits_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBits, NvU32 *pStatus) {
608 return pKernelBif->__kbifGetXveStatusBits__(pGpu, pKernelBif, pBits, pStatus);
609 }
610
611 NV_STATUS kbifClearXveStatus_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pStatus);
612
613 NV_STATUS kbifClearXveStatus_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pStatus);
614
kbifClearXveStatus_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pStatus)615 static inline NV_STATUS kbifClearXveStatus_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pStatus) {
616 return pKernelBif->__kbifClearXveStatus__(pGpu, pKernelBif, pStatus);
617 }
618
619 NV_STATUS kbifGetXveAerBits_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBits);
620
621 NV_STATUS kbifGetXveAerBits_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBits);
622
kbifGetXveAerBits_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pBits)623 static inline NV_STATUS kbifGetXveAerBits_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBits) {
624 return pKernelBif->__kbifGetXveAerBits__(pGpu, pKernelBif, pBits);
625 }
626
627 NV_STATUS kbifClearXveAer_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 bits);
628
629 NV_STATUS kbifClearXveAer_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 bits);
630
kbifClearXveAer_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 bits)631 static inline NV_STATUS kbifClearXveAer_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 bits) {
632 return pKernelBif->__kbifClearXveAer__(pGpu, pKernelBif, bits);
633 }
634
635 void kbifGetPcieConfigAccessTestRegisters_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pciStart, NvU32 *pcieStart);
636
kbifGetPcieConfigAccessTestRegisters_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pciStart,NvU32 * pcieStart)637 static inline void kbifGetPcieConfigAccessTestRegisters_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pciStart, NvU32 *pcieStart) {
638 return;
639 }
640
kbifGetPcieConfigAccessTestRegisters_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pciStart,NvU32 * pcieStart)641 static inline void kbifGetPcieConfigAccessTestRegisters_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pciStart, NvU32 *pcieStart) {
642 pKernelBif->__kbifGetPcieConfigAccessTestRegisters__(pGpu, pKernelBif, pciStart, pcieStart);
643 }
644
645 NV_STATUS kbifVerifyPcieConfigAccessTestRegisters_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 nvXveId, NvU32 nvXveVccapHdr);
646
kbifVerifyPcieConfigAccessTestRegisters_56cd7a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 nvXveId,NvU32 nvXveVccapHdr)647 static inline NV_STATUS kbifVerifyPcieConfigAccessTestRegisters_56cd7a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 nvXveId, NvU32 nvXveVccapHdr) {
648 return NV_OK;
649 }
650
kbifVerifyPcieConfigAccessTestRegisters_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 nvXveId,NvU32 nvXveVccapHdr)651 static inline NV_STATUS kbifVerifyPcieConfigAccessTestRegisters_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 nvXveId, NvU32 nvXveVccapHdr) {
652 return pKernelBif->__kbifVerifyPcieConfigAccessTestRegisters__(pGpu, pKernelBif, nvXveId, nvXveVccapHdr);
653 }
654
655 void kbifRearmMSI_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
656
kbifRearmMSI_f2d351(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)657 static inline void kbifRearmMSI_f2d351(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
658 NV_ASSERT_PRECOMP(0);
659 }
660
kbifRearmMSI_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)661 static inline void kbifRearmMSI_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
662 pKernelBif->__kbifRearmMSI__(pGpu, pKernelBif);
663 }
664
665 NvBool kbifIsMSIEnabledInHW_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
666
667 NvBool kbifIsMSIEnabledInHW_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
668
kbifIsMSIEnabledInHW_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)669 static inline NvBool kbifIsMSIEnabledInHW_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
670 return pKernelBif->__kbifIsMSIEnabledInHW__(pGpu, pKernelBif);
671 }
672
673 NvBool kbifIsMSIXEnabledInHW_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
674
675 NvBool kbifIsMSIXEnabledInHW_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
676
kbifIsMSIXEnabledInHW_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)677 static inline NvBool kbifIsMSIXEnabledInHW_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
678 return pKernelBif->__kbifIsMSIXEnabledInHW__(pGpu, pKernelBif);
679 }
680
681 NvBool kbifIsPciIoAccessEnabled_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
682
kbifIsPciIoAccessEnabled_491d52(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)683 static inline NvBool kbifIsPciIoAccessEnabled_491d52(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
684 return ((NvBool)(0 != 0));
685 }
686
kbifIsPciIoAccessEnabled_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)687 static inline NvBool kbifIsPciIoAccessEnabled_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
688 return pKernelBif->__kbifIsPciIoAccessEnabled__(pGpu, pKernelBif);
689 }
690
691 NvBool kbifIs3dController_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
692
693 NvBool kbifIs3dController_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
694
kbifIs3dController_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)695 static inline NvBool kbifIs3dController_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
696 return pKernelBif->__kbifIs3dController__(pGpu, pKernelBif);
697 }
698
699 void kbifExecC73War_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
700
kbifExecC73War_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)701 static inline void kbifExecC73War_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
702 return;
703 }
704
kbifExecC73War_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)705 static inline void kbifExecC73War_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
706 pKernelBif->__kbifExecC73War__(pGpu, pKernelBif);
707 }
708
709 void kbifEnableExtendedTagSupport_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
710
kbifEnableExtendedTagSupport_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)711 static inline void kbifEnableExtendedTagSupport_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
712 return;
713 }
714
kbifEnableExtendedTagSupport_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)715 static inline void kbifEnableExtendedTagSupport_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
716 pKernelBif->__kbifEnableExtendedTagSupport__(pGpu, pKernelBif);
717 }
718
719 void kbifPcieConfigEnableRelaxedOrdering_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
720
721 void kbifPcieConfigEnableRelaxedOrdering_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
722
kbifPcieConfigEnableRelaxedOrdering_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)723 static inline void kbifPcieConfigEnableRelaxedOrdering_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
724 pKernelBif->__kbifPcieConfigEnableRelaxedOrdering__(pGpu, pKernelBif);
725 }
726
727 void kbifPcieConfigDisableRelaxedOrdering_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
728
729 void kbifPcieConfigDisableRelaxedOrdering_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
730
kbifPcieConfigDisableRelaxedOrdering_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)731 static inline void kbifPcieConfigDisableRelaxedOrdering_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
732 pKernelBif->__kbifPcieConfigDisableRelaxedOrdering__(pGpu, pKernelBif);
733 }
734
kbifInitRelaxedOrderingFromEmulatedConfigSpace_b3696a(struct OBJGPU * pGpu,struct KernelBif * pBif)735 static inline void kbifInitRelaxedOrderingFromEmulatedConfigSpace_b3696a(struct OBJGPU *pGpu, struct KernelBif *pBif) {
736 return;
737 }
738
739 void kbifInitRelaxedOrderingFromEmulatedConfigSpace_GA100(struct OBJGPU *pGpu, struct KernelBif *pBif);
740
kbifInitRelaxedOrderingFromEmulatedConfigSpace_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pBif)741 static inline void kbifInitRelaxedOrderingFromEmulatedConfigSpace_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pBif) {
742 pBif->__kbifInitRelaxedOrderingFromEmulatedConfigSpace__(pGpu, pBif);
743 }
744
745 NV_STATUS kbifEnableNoSnoop_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bEnable);
746
747 NV_STATUS kbifEnableNoSnoop_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bEnable);
748
kbifEnableNoSnoop_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvBool bEnable)749 static inline NV_STATUS kbifEnableNoSnoop_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bEnable) {
750 return pKernelBif->__kbifEnableNoSnoop__(pGpu, pKernelBif, bEnable);
751 }
752
753 void kbifApplyWARBug3208922_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
754
kbifApplyWARBug3208922_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)755 static inline void kbifApplyWARBug3208922_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
756 return;
757 }
758
kbifApplyWARBug3208922_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)759 static inline void kbifApplyWARBug3208922_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
760 pKernelBif->__kbifApplyWARBug3208922__(pGpu, pKernelBif);
761 }
762
kbifProbePcieReqAtomicCaps_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)763 static inline void kbifProbePcieReqAtomicCaps_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
764 return;
765 }
766
767 void kbifProbePcieReqAtomicCaps_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
768
kbifProbePcieReqAtomicCaps_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)769 static inline void kbifProbePcieReqAtomicCaps_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
770 pKernelBif->__kbifProbePcieReqAtomicCaps__(pGpu, pKernelBif);
771 }
772
773 void kbifEnablePcieAtomics_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
774
kbifEnablePcieAtomics_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)775 static inline void kbifEnablePcieAtomics_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
776 return;
777 }
778
kbifEnablePcieAtomics_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)779 static inline void kbifEnablePcieAtomics_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
780 pKernelBif->__kbifEnablePcieAtomics__(pGpu, pKernelBif);
781 }
782
783 NV_STATUS kbifDoFunctionLevelReset_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
784
785 NV_STATUS kbifDoFunctionLevelReset_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
786
kbifDoFunctionLevelReset_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)787 static inline NV_STATUS kbifDoFunctionLevelReset_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
788 return pKernelBif->__kbifDoFunctionLevelReset__(pGpu, pKernelBif);
789 }
790
791 NV_STATUS kbifInitXveRegMap_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU8 arg0);
792
793 NV_STATUS kbifInitXveRegMap_GA102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU8 arg0);
794
kbifInitXveRegMap_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU8 arg0)795 static inline NV_STATUS kbifInitXveRegMap_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU8 arg0) {
796 return pKernelBif->__kbifInitXveRegMap__(pGpu, pKernelBif, arg0);
797 }
798
799 NvU32 kbifGetMSIXTableVectorControlSize_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
800
801 NvU32 kbifGetMSIXTableVectorControlSize_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
802
kbifGetMSIXTableVectorControlSize_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)803 static inline NvU32 kbifGetMSIXTableVectorControlSize_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
804 return pKernelBif->__kbifGetMSIXTableVectorControlSize__(pGpu, pKernelBif);
805 }
806
807 NV_STATUS kbifSaveMsixTable_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
808
kbifSaveMsixTable_46f6a7(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)809 static inline NV_STATUS kbifSaveMsixTable_46f6a7(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
810 return NV_ERR_NOT_SUPPORTED;
811 }
812
kbifSaveMsixTable_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)813 static inline NV_STATUS kbifSaveMsixTable_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
814 return pKernelBif->__kbifSaveMsixTable__(pGpu, pKernelBif);
815 }
816
817 NV_STATUS kbifRestoreMsixTable_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
818
kbifRestoreMsixTable_46f6a7(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)819 static inline NV_STATUS kbifRestoreMsixTable_46f6a7(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
820 return NV_ERR_NOT_SUPPORTED;
821 }
822
kbifRestoreMsixTable_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)823 static inline NV_STATUS kbifRestoreMsixTable_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
824 return pKernelBif->__kbifRestoreMsixTable__(pGpu, pKernelBif);
825 }
826
827 NV_STATUS kbifConfigAccessWait_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, RMTIMEOUT *pTimeout);
828
kbifConfigAccessWait_46f6a7(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,RMTIMEOUT * pTimeout)829 static inline NV_STATUS kbifConfigAccessWait_46f6a7(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, RMTIMEOUT *pTimeout) {
830 return NV_ERR_NOT_SUPPORTED;
831 }
832
kbifConfigAccessWait_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,RMTIMEOUT * pTimeout)833 static inline NV_STATUS kbifConfigAccessWait_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, RMTIMEOUT *pTimeout) {
834 return pKernelBif->__kbifConfigAccessWait__(pGpu, pKernelBif, pTimeout);
835 }
836
837 NV_STATUS kbifGetPciConfigSpacePriMirror_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pMirrorBase, NvU32 *pMirrorSize);
838
839 NV_STATUS kbifGetPciConfigSpacePriMirror_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pMirrorBase, NvU32 *pMirrorSize);
840
kbifGetPciConfigSpacePriMirror_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pMirrorBase,NvU32 * pMirrorSize)841 static inline NV_STATUS kbifGetPciConfigSpacePriMirror_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pMirrorBase, NvU32 *pMirrorSize) {
842 return pKernelBif->__kbifGetPciConfigSpacePriMirror__(pGpu, pKernelBif, pMirrorBase, pMirrorSize);
843 }
844
845 NV_STATUS kbifGetBusOptionsAddr_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, BUS_OPTIONS options, NvU32 *addrReg);
846
847 NV_STATUS kbifGetBusOptionsAddr_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, BUS_OPTIONS options, NvU32 *addrReg);
848
kbifGetBusOptionsAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,BUS_OPTIONS options,NvU32 * addrReg)849 static inline NV_STATUS kbifGetBusOptionsAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, BUS_OPTIONS options, NvU32 *addrReg) {
850 return pKernelBif->__kbifGetBusOptionsAddr__(pGpu, pKernelBif, options, addrReg);
851 }
852
853 NV_STATUS kbifPreOsGlobalErotGrantRequest_AD102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
854
kbifPreOsGlobalErotGrantRequest_56cd7a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)855 static inline NV_STATUS kbifPreOsGlobalErotGrantRequest_56cd7a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
856 return NV_OK;
857 }
858
kbifPreOsGlobalErotGrantRequest_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)859 static inline NV_STATUS kbifPreOsGlobalErotGrantRequest_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
860 return pKernelBif->__kbifPreOsGlobalErotGrantRequest__(pGpu, pKernelBif);
861 }
862
kbifStopSysMemRequests_56cd7a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvBool bStop)863 static inline NV_STATUS kbifStopSysMemRequests_56cd7a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bStop) {
864 return NV_OK;
865 }
866
867 NV_STATUS kbifStopSysMemRequests_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bStop);
868
869 NV_STATUS kbifStopSysMemRequests_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bStop);
870
kbifStopSysMemRequests_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvBool bStop)871 static inline NV_STATUS kbifStopSysMemRequests_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvBool bStop) {
872 return pKernelBif->__kbifStopSysMemRequests__(pGpu, pKernelBif, bStop);
873 }
874
875 NV_STATUS kbifWaitForTransactionsComplete_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
876
877 NV_STATUS kbifWaitForTransactionsComplete_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
878
kbifWaitForTransactionsComplete_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)879 static inline NV_STATUS kbifWaitForTransactionsComplete_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
880 return pKernelBif->__kbifWaitForTransactionsComplete__(pGpu, pKernelBif);
881 }
882
883 NV_STATUS kbifTriggerFlr_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
884
885 NV_STATUS kbifTriggerFlr_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
886
kbifTriggerFlr_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)887 static inline NV_STATUS kbifTriggerFlr_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
888 return pKernelBif->__kbifTriggerFlr__(pGpu, pKernelBif);
889 }
890
891 void kbifCacheFlrSupport_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
892
893 void kbifCacheFlrSupport_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
894
kbifCacheFlrSupport_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)895 static inline void kbifCacheFlrSupport_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
896 pKernelBif->__kbifCacheFlrSupport__(pGpu, pKernelBif);
897 }
898
kbifCache64bBar0Support_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)899 static inline void kbifCache64bBar0Support_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
900 return;
901 }
902
903 void kbifCache64bBar0Support_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
904
905 void kbifCache64bBar0Support_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
906
kbifCache64bBar0Support_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)907 static inline void kbifCache64bBar0Support_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
908 pKernelBif->__kbifCache64bBar0Support__(pGpu, pKernelBif);
909 }
910
911 void kbifCacheVFInfo_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
912
913 void kbifCacheVFInfo_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
914
kbifCacheVFInfo_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)915 static inline void kbifCacheVFInfo_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
916 pKernelBif->__kbifCacheVFInfo__(pGpu, pKernelBif);
917 }
918
919 void kbifRestoreBar0_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, void *arg0, NvU32 *arg1);
920
921 void kbifRestoreBar0_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, void *arg0, NvU32 *arg1);
922
kbifRestoreBar0_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,void * arg0,NvU32 * arg1)923 static inline void kbifRestoreBar0_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, void *arg0, NvU32 *arg1) {
924 pKernelBif->__kbifRestoreBar0__(pGpu, pKernelBif, arg0, arg1);
925 }
926
927 NvBool kbifAnyBarsAreValid_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
928
929 NvBool kbifAnyBarsAreValid_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
930
kbifAnyBarsAreValid_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)931 static inline NvBool kbifAnyBarsAreValid_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
932 return pKernelBif->__kbifAnyBarsAreValid__(pGpu, pKernelBif);
933 }
934
935 NV_STATUS kbifRestoreBarsAndCommand_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
936
937 NV_STATUS kbifRestoreBarsAndCommand_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
938
939 NV_STATUS kbifRestoreBarsAndCommand_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
940
kbifRestoreBarsAndCommand_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)941 static inline NV_STATUS kbifRestoreBarsAndCommand_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
942 return pKernelBif->__kbifRestoreBarsAndCommand__(pGpu, pKernelBif);
943 }
944
945 void kbifStoreBarRegOffsets_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0);
946
kbifStoreBarRegOffsets_b3696a(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)947 static inline void kbifStoreBarRegOffsets_b3696a(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
948 return;
949 }
950
kbifStoreBarRegOffsets_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 arg0)951 static inline void kbifStoreBarRegOffsets_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 arg0) {
952 pKernelBif->__kbifStoreBarRegOffsets__(pGpu, pKernelBif, arg0);
953 }
954
955 NV_STATUS kbifInit_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
956
957 NV_STATUS kbifInit_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
958
kbifInit_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)959 static inline NV_STATUS kbifInit_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
960 return pKernelBif->__kbifInit__(pGpu, pKernelBif);
961 }
962
963 NvU32 kbifGetValidEnginesToReset_TU102(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
964
965 NvU32 kbifGetValidEnginesToReset_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
966
kbifGetValidEnginesToReset_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)967 static inline NvU32 kbifGetValidEnginesToReset_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
968 return pKernelBif->__kbifGetValidEnginesToReset__(pGpu, pKernelBif);
969 }
970
971 NvU32 kbifGetValidDeviceEnginesToReset_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
972
kbifGetValidDeviceEnginesToReset_15a734(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)973 static inline NvU32 kbifGetValidDeviceEnginesToReset_15a734(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
974 return 0U;
975 }
976
kbifGetValidDeviceEnginesToReset_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)977 static inline NvU32 kbifGetValidDeviceEnginesToReset_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
978 return pKernelBif->__kbifGetValidDeviceEnginesToReset__(pGpu, pKernelBif);
979 }
980
981 NV_STATUS kbifGetMigrationBandwidth_GA100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBandwidth);
982
983 NV_STATUS kbifGetMigrationBandwidth_GM107(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBandwidth);
984
kbifGetMigrationBandwidth_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 * pBandwidth)985 static inline NV_STATUS kbifGetMigrationBandwidth_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 *pBandwidth) {
986 return pKernelBif->__kbifGetMigrationBandwidth__(pGpu, pKernelBif, pBandwidth);
987 }
988
989 NvU32 kbifGetEccCounts_GH100(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
990
kbifGetEccCounts_4a4dee(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)991 static inline NvU32 kbifGetEccCounts_4a4dee(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
992 return 0;
993 }
994
kbifGetEccCounts_DISPATCH(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)995 static inline NvU32 kbifGetEccCounts_DISPATCH(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
996 return pKernelBif->__kbifGetEccCounts__(pGpu, pKernelBif);
997 }
998
kbifStatePreLoad_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate,NvU32 arg0)999 static inline NV_STATUS kbifStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate, NvU32 arg0) {
1000 return pEngstate->__kbifStatePreLoad__(pGpu, pEngstate, arg0);
1001 }
1002
kbifStatePostUnload_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate,NvU32 arg0)1003 static inline NV_STATUS kbifStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate, NvU32 arg0) {
1004 return pEngstate->__kbifStatePostUnload__(pGpu, pEngstate, arg0);
1005 }
1006
kbifStateDestroy_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate)1007 static inline void kbifStateDestroy_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate) {
1008 pEngstate->__kbifStateDestroy__(pGpu, pEngstate);
1009 }
1010
kbifStatePreUnload_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate,NvU32 arg0)1011 static inline NV_STATUS kbifStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate, NvU32 arg0) {
1012 return pEngstate->__kbifStatePreUnload__(pGpu, pEngstate, arg0);
1013 }
1014
kbifStateInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate)1015 static inline NV_STATUS kbifStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate) {
1016 return pEngstate->__kbifStateInitUnlocked__(pGpu, pEngstate);
1017 }
1018
kbifInitMissing_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate)1019 static inline void kbifInitMissing_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate) {
1020 pEngstate->__kbifInitMissing__(pGpu, pEngstate);
1021 }
1022
kbifStatePreInitLocked_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate)1023 static inline NV_STATUS kbifStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate) {
1024 return pEngstate->__kbifStatePreInitLocked__(pGpu, pEngstate);
1025 }
1026
kbifStatePreInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate)1027 static inline NV_STATUS kbifStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate) {
1028 return pEngstate->__kbifStatePreInitUnlocked__(pGpu, pEngstate);
1029 }
1030
kbifIsPresent_DISPATCH(POBJGPU pGpu,struct KernelBif * pEngstate)1031 static inline NvBool kbifIsPresent_DISPATCH(POBJGPU pGpu, struct KernelBif *pEngstate) {
1032 return pEngstate->__kbifIsPresent__(pGpu, pEngstate);
1033 }
1034
1035 NV_STATUS kbifStaticInfoInit_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1036
1037 #ifdef __nvoc_kernel_bif_h_disabled
kbifStaticInfoInit(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1038 static inline NV_STATUS kbifStaticInfoInit(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1039 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1040 return NV_ERR_NOT_SUPPORTED;
1041 }
1042 #else //__nvoc_kernel_bif_h_disabled
1043 #define kbifStaticInfoInit(pGpu, pKernelBif) kbifStaticInfoInit_IMPL(pGpu, pKernelBif)
1044 #endif //__nvoc_kernel_bif_h_disabled
1045
1046 void kbifInitPcieDeviceControlStatus_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1047
1048 #ifdef __nvoc_kernel_bif_h_disabled
kbifInitPcieDeviceControlStatus(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1049 static inline void kbifInitPcieDeviceControlStatus(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1050 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1051 }
1052 #else //__nvoc_kernel_bif_h_disabled
1053 #define kbifInitPcieDeviceControlStatus(pGpu, pKernelBif) kbifInitPcieDeviceControlStatus_IMPL(pGpu, pKernelBif)
1054 #endif //__nvoc_kernel_bif_h_disabled
1055
1056 void kbifCheckAndRearmMSI_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1057
1058 #ifdef __nvoc_kernel_bif_h_disabled
kbifCheckAndRearmMSI(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1059 static inline void kbifCheckAndRearmMSI(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1060 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1061 }
1062 #else //__nvoc_kernel_bif_h_disabled
1063 #define kbifCheckAndRearmMSI(pGpu, pKernelBif) kbifCheckAndRearmMSI_IMPL(pGpu, pKernelBif)
1064 #endif //__nvoc_kernel_bif_h_disabled
1065
1066 NvBool kbifIsMSIEnabled_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1067
1068 #ifdef __nvoc_kernel_bif_h_disabled
kbifIsMSIEnabled(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1069 static inline NvBool kbifIsMSIEnabled(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1070 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1071 return NV_FALSE;
1072 }
1073 #else //__nvoc_kernel_bif_h_disabled
1074 #define kbifIsMSIEnabled(pGpu, pKernelBif) kbifIsMSIEnabled_IMPL(pGpu, pKernelBif)
1075 #endif //__nvoc_kernel_bif_h_disabled
1076
1077 NvBool kbifIsMSIXEnabled_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1078
1079 #ifdef __nvoc_kernel_bif_h_disabled
kbifIsMSIXEnabled(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1080 static inline NvBool kbifIsMSIXEnabled(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1081 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1082 return NV_FALSE;
1083 }
1084 #else //__nvoc_kernel_bif_h_disabled
1085 #define kbifIsMSIXEnabled(pGpu, pKernelBif) kbifIsMSIXEnabled_IMPL(pGpu, pKernelBif)
1086 #endif //__nvoc_kernel_bif_h_disabled
1087
1088 NV_STATUS kbifPollDeviceOnBus_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1089
1090 #ifdef __nvoc_kernel_bif_h_disabled
kbifPollDeviceOnBus(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1091 static inline NV_STATUS kbifPollDeviceOnBus(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1092 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1093 return NV_ERR_NOT_SUPPORTED;
1094 }
1095 #else //__nvoc_kernel_bif_h_disabled
1096 #define kbifPollDeviceOnBus(pGpu, pKernelBif) kbifPollDeviceOnBus_IMPL(pGpu, pKernelBif)
1097 #endif //__nvoc_kernel_bif_h_disabled
1098
1099 NvU32 kbifGetGpuLinkCapabilities_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1100
1101 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetGpuLinkCapabilities(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1102 static inline NvU32 kbifGetGpuLinkCapabilities(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1103 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1104 return 0;
1105 }
1106 #else //__nvoc_kernel_bif_h_disabled
1107 #define kbifGetGpuLinkCapabilities(pGpu, pKernelBif) kbifGetGpuLinkCapabilities_IMPL(pGpu, pKernelBif)
1108 #endif //__nvoc_kernel_bif_h_disabled
1109
1110 NvU32 kbifGetGpuLinkControlStatus_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1111
1112 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetGpuLinkControlStatus(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1113 static inline NvU32 kbifGetGpuLinkControlStatus(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1114 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1115 return 0;
1116 }
1117 #else //__nvoc_kernel_bif_h_disabled
1118 #define kbifGetGpuLinkControlStatus(pGpu, pKernelBif) kbifGetGpuLinkControlStatus_IMPL(pGpu, pKernelBif)
1119 #endif //__nvoc_kernel_bif_h_disabled
1120
1121 NvU32 kbifGetGpuDevControlStatus_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1122
1123 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetGpuDevControlStatus(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1124 static inline NvU32 kbifGetGpuDevControlStatus(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1125 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1126 return 0;
1127 }
1128 #else //__nvoc_kernel_bif_h_disabled
1129 #define kbifGetGpuDevControlStatus(pGpu, pKernelBif) kbifGetGpuDevControlStatus_IMPL(pGpu, pKernelBif)
1130 #endif //__nvoc_kernel_bif_h_disabled
1131
1132 NvU32 kbifGetGpuDevControlStatus2_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1133
1134 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetGpuDevControlStatus2(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1135 static inline NvU32 kbifGetGpuDevControlStatus2(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1136 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1137 return 0;
1138 }
1139 #else //__nvoc_kernel_bif_h_disabled
1140 #define kbifGetGpuDevControlStatus2(pGpu, pKernelBif) kbifGetGpuDevControlStatus2_IMPL(pGpu, pKernelBif)
1141 #endif //__nvoc_kernel_bif_h_disabled
1142
1143 NvU32 kbifGetGpuL1PmSubstatesCtrl1_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1144
1145 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetGpuL1PmSubstatesCtrl1(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1146 static inline NvU32 kbifGetGpuL1PmSubstatesCtrl1(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1147 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1148 return 0;
1149 }
1150 #else //__nvoc_kernel_bif_h_disabled
1151 #define kbifGetGpuL1PmSubstatesCtrl1(pGpu, pKernelBif) kbifGetGpuL1PmSubstatesCtrl1_IMPL(pGpu, pKernelBif)
1152 #endif //__nvoc_kernel_bif_h_disabled
1153
1154 NV_STATUS kbifGetPciLinkMaxSpeedByPciGenInfo_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 pciLinkGenInfo, NvU32 *pciLinkMaxSpeed);
1155
1156 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetPciLinkMaxSpeedByPciGenInfo(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NvU32 pciLinkGenInfo,NvU32 * pciLinkMaxSpeed)1157 static inline NV_STATUS kbifGetPciLinkMaxSpeedByPciGenInfo(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NvU32 pciLinkGenInfo, NvU32 *pciLinkMaxSpeed) {
1158 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1159 return NV_ERR_NOT_SUPPORTED;
1160 }
1161 #else //__nvoc_kernel_bif_h_disabled
1162 #define kbifGetPciLinkMaxSpeedByPciGenInfo(pGpu, pKernelBif, pciLinkGenInfo, pciLinkMaxSpeed) kbifGetPciLinkMaxSpeedByPciGenInfo_IMPL(pGpu, pKernelBif, pciLinkGenInfo, pciLinkMaxSpeed)
1163 #endif //__nvoc_kernel_bif_h_disabled
1164
1165 NvBool kbifIsPciBusFamily_IMPL(struct KernelBif *pKernelBif);
1166
1167 #ifdef __nvoc_kernel_bif_h_disabled
kbifIsPciBusFamily(struct KernelBif * pKernelBif)1168 static inline NvBool kbifIsPciBusFamily(struct KernelBif *pKernelBif) {
1169 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1170 return NV_FALSE;
1171 }
1172 #else //__nvoc_kernel_bif_h_disabled
1173 #define kbifIsPciBusFamily(pKernelBif) kbifIsPciBusFamily_IMPL(pKernelBif)
1174 #endif //__nvoc_kernel_bif_h_disabled
1175
1176 NV_STATUS kbifControlGetPCIEInfo_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NV2080_CTRL_BUS_INFO *pBusInfo);
1177
1178 #ifdef __nvoc_kernel_bif_h_disabled
kbifControlGetPCIEInfo(struct OBJGPU * pGpu,struct KernelBif * pKernelBif,NV2080_CTRL_BUS_INFO * pBusInfo)1179 static inline NV_STATUS kbifControlGetPCIEInfo(struct OBJGPU *pGpu, struct KernelBif *pKernelBif, NV2080_CTRL_BUS_INFO *pBusInfo) {
1180 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1181 return NV_ERR_NOT_SUPPORTED;
1182 }
1183 #else //__nvoc_kernel_bif_h_disabled
1184 #define kbifControlGetPCIEInfo(pGpu, pKernelBif, pBusInfo) kbifControlGetPCIEInfo_IMPL(pGpu, pKernelBif, pBusInfo)
1185 #endif //__nvoc_kernel_bif_h_disabled
1186
1187 NvU32 kbifGetDmaCaps_IMPL(struct OBJGPU *pGpu, struct KernelBif *pKernelBif);
1188
1189 #ifdef __nvoc_kernel_bif_h_disabled
kbifGetDmaCaps(struct OBJGPU * pGpu,struct KernelBif * pKernelBif)1190 static inline NvU32 kbifGetDmaCaps(struct OBJGPU *pGpu, struct KernelBif *pKernelBif) {
1191 NV_ASSERT_FAILED_PRECOMP("KernelBif was disabled!");
1192 return 0;
1193 }
1194 #else //__nvoc_kernel_bif_h_disabled
1195 #define kbifGetDmaCaps(pGpu, pKernelBif) kbifGetDmaCaps_IMPL(pGpu, pKernelBif)
1196 #endif //__nvoc_kernel_bif_h_disabled
1197
1198 #undef PRIVATE_FIELD
1199
1200
1201 #endif // KERNEL_BIF_H
1202
1203 #ifdef __cplusplus
1204 } // extern "C"
1205 #endif
1206
1207 #endif // _G_KERNEL_BIF_NVOC_H_
1208