1 #ifndef _G_KERN_BUS_NVOC_H_
2 #define _G_KERN_BUS_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2021-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kern_bus_nvoc.h"
33 
34 #ifndef KERN_BUS_H
35 #define KERN_BUS_H
36 
37 #include "core/core.h"
38 #include "gpu/eng_state.h"
39 #include "gpu/gpu_halspec.h"
40 #include "gpu/mem_mgr/mem_desc.h"
41 #include "containers/list.h"
42 #include "nvoc/utility.h"
43 #include "gpu/mmu/kern_gmmu.h" // VMMU_MAX_GFID
44 #include "mmu/mmu_walk.h"    // MMU_WALK
45 #include "mmu/gmmu_fmt.h"    // GMMU_FMT
46 #include "mem_mgr/vaspace.h"
47 #include "kernel/gpu/mem_mgr/mem_mgr.h" // TRANSFER_FLAGS_*
48 #include "ctrl/ctrl0000/ctrl0000system.h" // NV0000_CTRL_SYSTEM_MAX_ATTACHED_GPUS
49 #include "ctrl/ctrl2080/ctrl2080bus.h"
50 
51 #define MAX_PCI_BARS                        8
52 
53 //
54 // Virtual BAR2 mapping info is shared by tesla and fermi code
55 //
56 #if defined(NV_UNIX) && (defined(NVCPU_X86_64) || defined(NVCPU_AARCH64))
57 // 64-bit Unix can support many more mappings than some other operating systems:
58 #define BUS_BAR2_MAX_MAPPINGS       200
59 #else
60 #define BUS_BAR2_MAX_MAPPINGS       50
61 #endif
62 
63 typedef enum
64 {
65     BAR2_MODE_PHYSICAL = 0,
66     BAR2_MODE_VIRTUAL,
67 } BAR2_MODE;
68 
69 /*!
70  * @brief Helper macro to return NV_TRUE if the input BAR offset (i) is a 64-bit
71  *        offset. Used by several functions in the bus HAL.
72  */
73 #define IS_BAR_64(i) (((i) & 0x00000006) == 0x00000004)
74 
75 // Test buffer size used in the coherent link test
76 #define BUS_COHERENT_LINK_TEST_BUFFER_SIZE 0x100
77 
78 // FLA flags
79 #define NV_BUS_INVALID_FLA_ADDR      NV_U64_MAX
80 #define NV_BUS_FLA_VASPACE_ADDR_HI   47          // FLA is a 47b VAspace
81 
82 
83 //
84 // kbusUpdateRmAperture flags
85 //
86 // TLB invalidate
87 #define UPDATE_RM_APERTURE_FLAGS_INVALIDATE           NVBIT(0)
88 // Indicates we're done with specified mapping
89 #define UPDATE_RM_APERTURE_FLAGS_DISCARD              NVBIT(1)
90 // Indicates we're done with mapping and marking sparse in PTE
91 #define UPDATE_RM_APERTURE_FLAGS_SPARSIFY             NVBIT(2)
92 // Indicates we're updating page tables for CPU invisible va range
93 #define UPDATE_RM_APERTURE_FLAGS_CPU_INVISIBLE_RANGE  NVBIT(3)
94 
95 //
96 // kbusMapFbAperture flags
97 //
98 #define BUS_MAP_FB_FLAGS_NONE                  (0)
99 #define BUS_MAP_FB_FLAGS_MAP_RSVD_BAR1         NVBIT(0)
100 #define BUS_MAP_FB_FLAGS_DISABLE_ENCRYPTION    NVBIT(1)
101 #define BUS_MAP_FB_FLAGS_MAP_DOWNWARDS         NVBIT(2)   // bug 624482 puts USERD mapping to the top of bar1 for Fermi
102 // NOTE: these two are exclusive (but not easy to change to DRF style now)
103 #define BUS_MAP_FB_FLAGS_READ_ONLY             NVBIT(3)
104 #define BUS_MAP_FB_FLAGS_WRITE_ONLY            NVBIT(4)
105 #define BUS_MAP_FB_FLAGS_MAP_UNICAST           NVBIT(5)
106 #define BUS_MAP_FB_FLAGS_MAP_OFFSET_FIXED      NVBIT(6)
107 #define BUS_MAP_FB_FLAGS_PRE_INIT              NVBIT(7)
108 
109 #define BUS_MAP_FB_FLAGS_FERMI_INVALID         ~(BUS_MAP_FB_FLAGS_MAP_DOWNWARDS      | \
110                                                  BUS_MAP_FB_FLAGS_DISABLE_ENCRYPTION | \
111                                                  BUS_MAP_FB_FLAGS_READ_ONLY          | \
112                                                  BUS_MAP_FB_FLAGS_WRITE_ONLY         | \
113                                                  BUS_MAP_FB_FLAGS_MAP_UNICAST        | \
114                                                  BUS_MAP_FB_FLAGS_MAP_OFFSET_FIXED   | \
115                                                  BUS_MAP_FB_FLAGS_PRE_INIT)
116 
117 #define BUS_MAP_FB_FLAGS_NV5X_INVALID          ~(BUS_MAP_FB_FLAGS_MAP_RSVD_BAR1 | BUS_MAP_FB_FLAGS_DISABLE_ENCRYPTION)
118 
119 // Inst Block
120 #define FERMI_PHYSADDR_WIDTH                  40   // PA max address is 40 bits
121 #define GF100_BUS_INSTANCEBLOCK_SHIFT         (FERMI_PHYSADDR_WIDTH - DRF_SIZE(NV_PBUS_BAR2_BLOCK_PTR))
122 
123 //
124 // kbusFlush flags
125 //
126 #define BUS_FLUSH_VIDEO_MEMORY  NVBIT(0)
127 #define BUS_FLUSH_SYSTEM_MEMORY NVBIT(1)
128 
129 //
130 // Peer to peer (P2P) defines
131 //
132 #define P2P_MAX_NUM_PEERS                      8
133 
134 #define BUS_INVALID_PEER                       0xffffffff
135 
136 #define PCIE_P2P_WRITE_MAILBOX_SIZE            ((NvU64)64*1024)    // since Fermi+
137 #define PCIE_P2P_INVALID_WRITE_MAILBOX_ADDR    ~((NvU64)0)
138 
139 //
140 // BARs defines
141 //
142 #define BUS_BAR_0                           0
143 #define BUS_BAR_1                           1
144 #define BUS_BAR_2                           2
145 #define BUS_BAR_3                           3
146 #define BUS_NUM_BARS                        4
147 
148 #define BUS_BAR2_APERTURE_MB                32
149 #define BUS_BAR2_RM_APERTURE_MB             16
150 
151 // Inst Block
152 #define GF100_BUS_INSTANCEBLOCK_SIZE        4096
153 
154 #define COHERENT_CPU_MAPPING_REGION_0       0x0
155 #define COHERENT_CPU_MAPPING_REGION_1       0x1
156 #define COHERENT_CPU_MAPPING_REGION_2       0x2
157 #define COHERENT_CPU_MAPPING_TOTAL_REGIONS  0x3   // Should change it when num of regions changed
158 
159 typedef struct
160 {
161     NvU64              vAddr;      // Bar2 addr returned by eheap
162     NvU8              *pRtnPtr;    // Bar2 addr + lin addr of bar2 base
163 
164     MEMORY_DESCRIPTOR *pMemDesc;   // memory descriptor for this mapping
165     MEM_DESC_DESTROY_CALLBACK memDescCallback;
166 
167     ListNode node;
168 } VirtualBar2MapEntry;
169 
170 typedef struct
171 {
172     NvU32               refCount;
173     NvU32               remotePeerId;
174     NvBool              bReserved;
175     PMEMORY_DESCRIPTOR  pRemoteP2PDomMemDesc;
176     PMEMORY_DESCRIPTOR  pRemoteWMBoxMemDesc;
177 } KBUS_PCIE_PEER;
178 
179 MAKE_INTRUSIVE_LIST(VirtualBar2MapList, VirtualBar2MapEntry, node);
180 
181 struct Device;
182 
183 #ifndef __NVOC_CLASS_Device_TYPEDEF__
184 #define __NVOC_CLASS_Device_TYPEDEF__
185 typedef struct Device Device;
186 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */
187 
188 #ifndef __nvoc_class_id_Device
189 #define __nvoc_class_id_Device 0xe0ac20
190 #endif /* __nvoc_class_id_Device */
191 
192 
193 
194 
195 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
196 // the matching C source file, but causes diagnostics to be issued if another
197 // source file references the field.
198 #ifdef NVOC_KERN_BUS_H_PRIVATE_ACCESS_ALLOWED
199 #define PRIVATE_FIELD(x) x
200 #else
201 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
202 #endif
203 
204 struct __nvoc_inner_struc_KernelBus_1__ {
205     RmPhysAddr physAddr;
206     NvU64 apertureLength;
207     struct OBJVASPACE *pVAS;
208     NvU64 instBlockBase;
209     MEMORY_DESCRIPTOR *pInstBlkMemDesc;
210     NvBool bStaticBar1Enabled;
211     struct {
212         MEMORY_DESCRIPTOR *pVidMemDesc;
213         MEMORY_DESCRIPTOR *pDmaMemDesc;
214         NvU64 size;
215     } staticBar1;
216 };
217 
218 struct __nvoc_inner_struc_KernelBus_2__ {
219     RmPhysAddr physAddr;
220     NvU64 bar2OffsetInBar0Window;
221     NvU64 cpuVisibleBase;
222     NvU64 cpuVisibleLimit;
223     NvU64 cpuInvisibleBase;
224     NvU64 cpuInvisibleLimit;
225     NvU64 rmApertureBase;
226     NvU64 rmApertureLimit;
227     NvU64 vaLimit;
228     NvU64 pdeBase;
229     MEMORY_DESCRIPTOR *pPDEMemDesc;
230     NvU64 pteBase;
231     NvU64 instBlockBase;
232     MEMORY_DESCRIPTOR *pInstBlkMemDesc;
233     MEMORY_DESCRIPTOR *pInstBlkMemDescForBootstrap;
234     MMU_WALK *pWalkForBootstrap;
235     NvU64 pdeBaseForBootstrap;
236     MEMORY_DESCRIPTOR *pPDEMemDescForBootstrap;
237     NvU64 pteBaseForBootstrap;
238     NvBool bBootstrap;
239     NvBool bMigrating;
240     MMU_WALK *pWalk;
241     MEMORY_DESCRIPTOR *pWalkStagingBuffer;
242     const struct GMMU_FMT *pFmt;
243     NvU32 numPageDirs;
244     NvU32 pageDirSize;
245     NvU32 numPageTbls;
246     NvU32 pageTblSize;
247     NvBool bIsBar2SizeReduced;
248     NvU32 pageDirInit;
249     NvU32 pageTblInit;
250     NvU32 cpuVisiblePgTblSize;
251 };
252 
253 struct __nvoc_inner_struc_KernelBus_3__ {
254     struct OBJEHEAP *pVASpaceHeap;
255     struct OBJEHEAP *pVASpaceHiddenHeap;
256     VirtualBar2MapEntry *pMapListMemory;
257     VirtualBar2MapList freeMapList;
258     VirtualBar2MapList cachedMapList;
259     VirtualBar2MapList usedMapList;
260     MEMORY_DESCRIPTOR *pPageLevelsMemDesc;
261     NvU8 *pPageLevels;
262     MEMORY_DESCRIPTOR *pPageLevelsMemDescForBootstrap;
263     NvU8 *pPageLevelsForBootstrap;
264     MEMORY_DESCRIPTOR *pPTEMemDesc;
265     NvU8 *pCpuMapping;
266     NvU32 vAlignment;
267     NvU32 flags;
268     MEMORY_DESCRIPTOR *pPDB;
269     NvU32 mapCount;
270     NvU32 cacheHit;
271     NvU32 evictions;
272 };
273 
274 struct __nvoc_inner_struc_KernelBus_4__ {
275     NvHandle hClient;
276     NvHandle hDevice;
277     NvHandle hSubDevice;
278     NvHandle hFlaVASpace;
279     struct OBJVASPACE *pFlaVAS;
280     PMEMORY_DESCRIPTOR pInstblkMemDesc;
281     NvBool bFlaAllocated;
282     NvBool bFlaBind;
283     NvBool bFlaRangeRegistered;
284     NvU64 base;
285     NvU64 size;
286     NvBool bToggleBindPoint;
287 };
288 
289 struct __nvoc_inner_struc_KernelBus_5__ {
290     NvBool bCoherentCpuMapping;
291     NvU32 nrMapping;
292     NvP64 pCpuMapping[3];
293     NvU64 size[3];
294     NvU64 refcnt[3];
295     RmPhysAddr physAddr[3];
296 };
297 
298 struct __nvoc_inner_struc_KernelBus_6__ {
299     NvBool bNvlinkPeerIdsReserved;
300     NvU32 busNvlinkPeerNumberMask[32];
301     NvU32 busNvlinkMappingRefcountPerGpu[32];
302     NvU32 busNvlinkMappingRefcountPerPeerId[8];
303     NvU32 busNvlinkMappingRefcountPerPeerIdSpa[8];
304     NvBool bEgmPeer[8];
305 };
306 
307 struct __nvoc_inner_struc_KernelBus_7__ {
308     NvU32 peerNumberMask[32];
309     KBUS_PCIE_PEER busPeer[8];
310     NvU64 writeMailboxBar1Addr;
311     NvU64 writeMailboxTotalSize;
312 };
313 
314 struct __nvoc_inner_struc_KernelBus_8__ {
315     NvU32 busBar1PeerRefcount[32];
316 };
317 
318 struct __nvoc_inner_struc_KernelBus_9__ {
319     NvU32 busC2CPeerNumberMask[32];
320     NvU32 busC2CMappingRefcountPerPeerId[8];
321 };
322 
323 
324 struct KernelBus {
325     const struct NVOC_RTTI *__nvoc_rtti;
326     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
327     struct Object *__nvoc_pbase_Object;
328     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
329     struct KernelBus *__nvoc_pbase_KernelBus;
330     NV_STATUS (*__kbusInitBarsSize__)(struct OBJGPU *, struct KernelBus *);
331     NV_STATUS (*__kbusConstructEngine__)(struct OBJGPU *, struct KernelBus *, ENGDESCRIPTOR);
332     NV_STATUS (*__kbusStatePreInitLocked__)(struct OBJGPU *, struct KernelBus *);
333     NV_STATUS (*__kbusStateInitLocked__)(struct OBJGPU *, struct KernelBus *);
334     NV_STATUS (*__kbusStatePreLoad__)(struct OBJGPU *, struct KernelBus *, NvU32);
335     NV_STATUS (*__kbusStateLoad__)(struct OBJGPU *, struct KernelBus *, NvU32);
336     NV_STATUS (*__kbusStatePostLoad__)(struct OBJGPU *, struct KernelBus *, NvU32);
337     NV_STATUS (*__kbusStatePreUnload__)(struct OBJGPU *, struct KernelBus *, NvU32);
338     NV_STATUS (*__kbusStateUnload__)(struct OBJGPU *, struct KernelBus *, NvU32);
339     NV_STATUS (*__kbusStatePostUnload__)(struct OBJGPU *, struct KernelBus *, NvU32);
340     void (*__kbusStateDestroy__)(struct OBJGPU *, struct KernelBus *);
341     NvU8 *(*__kbusMapBar2Aperture__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU32);
342     NvU8 *(*__kbusValidateBar2ApertureMapping__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU8 *);
343     void (*__kbusUnmapBar2ApertureWithFlags__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU8 **, NvU32);
344     NvU64 (*__kbusGetVaLimitForBar2__)(struct OBJGPU *, struct KernelBus *);
345     void (*__kbusCalcCpuInvisibleBar2Range__)(struct OBJGPU *, struct KernelBus *, NvU32);
346     NvU32 (*__kbusCalcCpuInvisibleBar2ApertureSize__)(struct OBJGPU *, struct KernelBus *);
347     NV_STATUS (*__kbusCommitBar2__)(struct OBJGPU *, struct KernelBus *, NvU32);
348     NV_STATUS (*__kbusRewritePTEsForExistingMapping__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *);
349     NV_STATUS (*__kbusPatchBar1Pdb__)(struct OBJGPU *, struct KernelBus *);
350     NV_STATUS (*__kbusPatchBar2Pdb__)(struct OBJGPU *, struct KernelBus *);
351     NV_STATUS (*__kbusConstructVirtualBar2CpuInvisibleHeap__)(struct KernelBus *, NvU32);
352     NV_STATUS (*__kbusMapCpuInvisibleBar2Aperture__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU64 *, NvU64, NvU32, NvU32);
353     void (*__kbusUnmapCpuInvisibleBar2Aperture__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU64, NvU32);
354     NV_STATUS (*__kbusTeardownBar2CpuAperture__)(struct OBJGPU *, struct KernelBus *, NvU32);
355     void (*__kbusGetP2PMailboxAttributes__)(struct OBJGPU *, struct KernelBus *, NvU32 *, NvU32 *, NvU32 *);
356     NV_STATUS (*__kbusCreateP2PMapping__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32 *, NvU32 *, NvU32);
357     NV_STATUS (*__kbusRemoveP2PMapping__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32, NvU32, NvU32);
358     NvU32 (*__kbusGetEgmPeerId__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *);
359     NvU32 (*__kbusGetPeerId__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *);
360     NvU32 (*__kbusGetNvlinkPeerId__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *);
361     NvU32 (*__kbusGetNvSwitchPeerId__)(struct OBJGPU *, struct KernelBus *);
362     NvU32 (*__kbusGetUnusedPciePeerId__)(struct OBJGPU *, struct KernelBus *);
363     NV_STATUS (*__kbusIsPeerIdValid__)(struct OBJGPU *, struct KernelBus *, NvU32);
364     NV_STATUS (*__kbusGetNvlinkP2PPeerId__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32 *, NvU32);
365     void (*__kbusWriteP2PWmbTag__)(struct OBJGPU *, struct KernelBus *, NvU32, NvU64);
366     RmPhysAddr (*__kbusSetupP2PDomainAccess__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, PMEMORY_DESCRIPTOR *);
367     NvBool (*__kbusNeedWarForBug999673__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *);
368     NV_STATUS (*__kbusCreateP2PMappingForC2C__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32 *, NvU32 *, NvU32);
369     NV_STATUS (*__kbusRemoveP2PMappingForC2C__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32, NvU32, NvU32);
370     NV_STATUS (*__kbusUnreserveP2PPeerIds__)(struct OBJGPU *, struct KernelBus *, NvU32);
371     NvBool (*__kbusIsBar1P2PCapable__)(struct OBJGPU *, struct KernelBus *, NvU32);
372     NV_STATUS (*__kbusEnableStaticBar1Mapping__)(struct OBJGPU *, struct KernelBus *, NvU32);
373     void (*__kbusDisableStaticBar1Mapping__)(struct OBJGPU *, struct KernelBus *, NvU32);
374     NV_STATUS (*__kbusGetBar1P2PDmaInfo__)(struct OBJGPU *, struct OBJGPU *, struct KernelBus *, NvU64 *, NvU64 *);
375     NV_STATUS (*__kbusUpdateStaticBar1VAMapping__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU64, NvU64, NvBool);
376     NV_STATUS (*__kbusGetStaticFbAperture__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvU64, NvU64 *, NvU64 *, NvU32);
377     NV_STATUS (*__kbusCreateP2PMappingForBar1P2P__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32);
378     NV_STATUS (*__kbusRemoveP2PMappingForBar1P2P__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *, NvU32);
379     NvBool (*__kbusHasPcieBar1P2PMapping__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *);
380     NvBool (*__kbusIsPcieBar1P2PMappingSupported__)(struct OBJGPU *, struct KernelBus *, struct OBJGPU *, struct KernelBus *);
381     NV_STATUS (*__kbusCheckFlaSupportedAndInit__)(struct OBJGPU *, struct KernelBus *, NvU64, NvU64);
382     NV_STATUS (*__kbusDetermineFlaRangeAndAllocate__)(struct OBJGPU *, struct KernelBus *, NvU64, NvU64);
383     NV_STATUS (*__kbusAllocateFlaVaspace__)(struct OBJGPU *, struct KernelBus *, NvU64, NvU64);
384     NV_STATUS (*__kbusGetFlaRange__)(struct OBJGPU *, struct KernelBus *, NvU64 *, NvU64 *, NvBool);
385     NV_STATUS (*__kbusAllocateLegacyFlaVaspace__)(struct OBJGPU *, struct KernelBus *, NvU64, NvU64);
386     NV_STATUS (*__kbusAllocateHostManagedFlaVaspace__)(struct OBJGPU *, struct KernelBus *, NvHandle, NvHandle, NvHandle, NvHandle, NvU64, NvU64, NvU32);
387     void (*__kbusDestroyFla__)(struct OBJGPU *, struct KernelBus *);
388     NV_STATUS (*__kbusGetFlaVaspace__)(struct OBJGPU *, struct KernelBus *, struct OBJVASPACE **);
389     void (*__kbusDestroyHostManagedFlaVaspace__)(struct OBJGPU *, struct KernelBus *, NvU32);
390     NvBool (*__kbusVerifyFlaRange__)(struct OBJGPU *, struct KernelBus *, NvU64, NvU64);
391     NV_STATUS (*__kbusConstructFlaInstBlk__)(struct OBJGPU *, struct KernelBus *, NvU32);
392     void (*__kbusDestructFlaInstBlk__)(struct OBJGPU *, struct KernelBus *);
393     NV_STATUS (*__kbusValidateFlaBaseAddress__)(struct OBJGPU *, struct KernelBus *, NvU64);
394     NV_STATUS (*__kbusSetupUnbindFla__)(struct OBJGPU *, struct KernelBus *);
395     NV_STATUS (*__kbusSetupBindFla__)(struct OBJGPU *, struct KernelBus *, NvU32);
396     NV_STATUS (*__kbusSendSysmembarSingle__)(struct OBJGPU *, struct KernelBus *);
397     void (*__kbusCacheBAR1ResizeSize_WAR_BUG_3249028__)(struct OBJGPU *, struct KernelBus *);
398     NV_STATUS (*__kbusRestoreBAR1ResizeSize_WAR_BUG_3249028__)(struct OBJGPU *, struct KernelBus *);
399     NV_STATUS (*__kbusIsDirectMappingAllowed__)(struct OBJGPU *, struct KernelBus *, PMEMORY_DESCRIPTOR, NvU32, NvBool *);
400     NV_STATUS (*__kbusUseDirectSysmemMap__)(struct OBJGPU *, struct KernelBus *, MEMORY_DESCRIPTOR *, NvBool *);
401     NV_STATUS (*__kbusMemCopyBar0Window__)(struct OBJGPU *, struct KernelBus *, RmPhysAddr, void *, NvLength, NvBool);
402     NV_STATUS (*__kbusWriteBAR0WindowBase__)(struct OBJGPU *, struct KernelBus *, NvU32);
403     NvU32 (*__kbusReadBAR0WindowBase__)(struct OBJGPU *, struct KernelBus *);
404     NvBool (*__kbusValidateBAR0WindowBase__)(struct OBJGPU *, struct KernelBus *, NvU32);
405     NV_STATUS (*__kbusSetBAR0WindowVidOffset__)(struct OBJGPU *, struct KernelBus *, NvU64);
406     NvU64 (*__kbusGetBAR0WindowVidOffset__)(struct OBJGPU *, struct KernelBus *);
407     NV_STATUS (*__kbusSetupBar0WindowBeforeBar2Bootstrap__)(struct OBJGPU *, struct KernelBus *, NvU64 *);
408     void (*__kbusRestoreBar0WindowAfterBar2Bootstrap__)(struct OBJGPU *, struct KernelBus *, NvU64);
409     NV_STATUS (*__kbusVerifyBar2__)(struct OBJGPU *, struct KernelBus *, PMEMORY_DESCRIPTOR, NvU8 *, NvU64, NvU64);
410     NV_STATUS (*__kbusBar2BootStrapInPhysicalMode__)(struct OBJGPU *, struct KernelBus *);
411     NV_STATUS (*__kbusBindBar2__)(struct OBJGPU *, struct KernelBus *, BAR2_MODE);
412     void (*__kbusInstBlkWriteAddrLimit__)(struct OBJGPU *, struct KernelBus *, NvBool, NvU64, NvU8 *, NvU64);
413     NV_STATUS (*__kbusInitInstBlk__)(struct OBJGPU *, struct KernelBus *, PMEMORY_DESCRIPTOR, PMEMORY_DESCRIPTOR, NvU64, NvU64, struct OBJVASPACE *);
414     void (*__kbusBar2InstBlkWrite__)(struct OBJGPU *, struct KernelBus *, NvU8 *, PMEMORY_DESCRIPTOR, NvU64, NvU64);
415     NV_STATUS (*__kbusSetupBar2PageTablesAtBottomOfFb__)(struct OBJGPU *, struct KernelBus *, NvU32);
416     void (*__kbusTeardownBar2PageTablesAtBottomOfFb__)(struct OBJGPU *, struct KernelBus *, NvU32);
417     NV_STATUS (*__kbusSetupBar2InstBlkAtBottomOfFb__)(struct OBJGPU *, struct KernelBus *, PMEMORY_DESCRIPTOR, NvU64, NvU64, NvU32);
418     void (*__kbusTeardownBar2InstBlkAtBottomOfFb__)(struct OBJGPU *, struct KernelBus *, NvU32);
419     NV_STATUS (*__kbusSetupBar2PageTablesAtTopOfFb__)(struct OBJGPU *, struct KernelBus *, NvU32);
420     NV_STATUS (*__kbusCommitBar2PDEs__)(struct OBJGPU *, struct KernelBus *);
421     NV_STATUS (*__kbusVerifyCoherentLink__)(struct OBJGPU *, struct KernelBus *);
422     void (*__kbusTeardownMailbox__)(struct OBJGPU *, struct KernelBus *);
423     NV_STATUS (*__kbusBar1InstBlkVasUpdate__)(struct OBJGPU *, struct KernelBus *);
424     NV_STATUS (*__kbusFlushPcieForBar0Doorbell__)(struct OBJGPU *, struct KernelBus *);
425     NV_STATUS (*__kbusCreateCoherentCpuMapping__)(struct OBJGPU *, struct KernelBus *, NvU64, NvBool);
426     NvU8 *(*__kbusMapCoherentCpuMapping__)(struct OBJGPU *, struct KernelBus *, PMEMORY_DESCRIPTOR);
427     void (*__kbusUnmapCoherentCpuMapping__)(struct OBJGPU *, struct KernelBus *, PMEMORY_DESCRIPTOR);
428     void (*__kbusTeardownCoherentCpuMapping__)(struct OBJGPU *, struct KernelBus *, NvBool);
429     NV_STATUS (*__kbusBar1InstBlkBind__)(struct OBJGPU *, struct KernelBus *);
430     NvU32 (*__kbusGetEccCounts__)(struct OBJGPU *, struct KernelBus *);
431     NV_STATUS (*__kbusStateInitUnlocked__)(POBJGPU, struct KernelBus *);
432     void (*__kbusInitMissing__)(POBJGPU, struct KernelBus *);
433     NV_STATUS (*__kbusStatePreInitUnlocked__)(POBJGPU, struct KernelBus *);
434     NvBool (*__kbusIsPresent__)(POBJGPU, struct KernelBus *);
435     NvU32 totalPciBars;
436     RmPhysAddr pciBars[8];
437     NvU64 pciBarSizes[8];
438     NvBool bPciBarSizesValid;
439     NvU64 cachedBar0WindowVidOffset;
440     NvU8 *pWriteCombinedBar0Window;
441     NvU8 *pUncachedBar0Window;
442     NvU8 *pDefaultBar0Pointer;
443     NvU64 physicalBar0WindowSize;
444     struct __nvoc_inner_struc_KernelBus_1__ bar1[64];
445     struct __nvoc_inner_struc_KernelBus_2__ bar2[64];
446     struct __nvoc_inner_struc_KernelBus_3__ virtualBar2[64];
447     struct __nvoc_inner_struc_KernelBus_4__ flaInfo;
448     NvBool bFlaSupported;
449     NvBool bFlaEnabled;
450     NvBool bFlaDummyPageEnabled;
451     NvBool bForceFlaTraffic;
452     NvU8 bar1ResizeSizeIndex;
453     struct __nvoc_inner_struc_KernelBus_5__ coherentCpuMapping;
454     NvU64 coherentLinkTestBufferBase;
455     struct __nvoc_inner_struc_KernelBus_6__ p2p;
456     struct __nvoc_inner_struc_KernelBus_7__ p2pPcie;
457     struct __nvoc_inner_struc_KernelBus_8__ p2pPcieBar1;
458     struct __nvoc_inner_struc_KernelBus_9__ c2cPeerInfo;
459     NvU32 numPeers;
460     NvBool p2pMapSpecifyId;
461     NvU32 p2pMapPeerId;
462     NvU32 totalP2pObjectsAliveRefCount;
463     NvBool bP2pInitialized;
464     NvBool bP2pMailboxClientAllocated;
465     NvBool bP2pMailboxClientAllocatedBug3466714VoltaAndUp;
466     NvBool bBar1Force64KBMapping;
467     NvBool bBar1PhysicalModeEnabled;
468     NvBool bIsBar2Initialized;
469     NvBool bBar2SysmemAccessEnabled;
470     NvBool bBar2TestSkipped;
471     NvBool bUsePhysicalBar2InitPagetable;
472     NvBool bIsBar2SetupInPhysicalMode;
473     NvBool bPreserveBar1ConsoleEnabled;
474     NvBool bBar1ConsolePreserved;
475     NvBool bBarAccessBlocked;
476     NvBool bBug2751296LimitBar2PtSize;
477     NvBool bAllowReflectedMappingAccess;
478     NvBool bIsEntireBar2RegionVirtuallyAddressible;
479     NvBool bSkipBar2TestOnGc6Exit;
480     NvBool bFbFlushDisabled;
481     PMEMORY_DESCRIPTOR pFlushMemDesc;
482     NvU8 *pReadToFlush;
483     NvBool bReadCpuPointerToFlush;
484     NvBool PDB_PROP_KBUS_NVLINK_DECONFIG_HSHUB_ON_NO_MAPPING;
485     NvBool PDB_PROP_KBUS_RESTORE_BAR1_SIZE_BUG_3249028_WAR;
486     NvU32 PTEBAR2Aperture;
487     NvU32 PTEBAR2Attr;
488     NvU32 PDEBAR2Aperture;
489     NvU32 PDEBAR2Attr;
490     NvU32 InstBlkAperture;
491     NvU32 InstBlkAttr;
492     NvBool bInstProtectedMem;
493     NvBool bForceBarAccessOnHcc;
494 };
495 
496 #ifndef __NVOC_CLASS_KernelBus_TYPEDEF__
497 #define __NVOC_CLASS_KernelBus_TYPEDEF__
498 typedef struct KernelBus KernelBus;
499 #endif /* __NVOC_CLASS_KernelBus_TYPEDEF__ */
500 
501 #ifndef __nvoc_class_id_KernelBus
502 #define __nvoc_class_id_KernelBus 0xd2ac57
503 #endif /* __nvoc_class_id_KernelBus */
504 
505 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelBus;
506 
507 #define __staticCast_KernelBus(pThis) \
508     ((pThis)->__nvoc_pbase_KernelBus)
509 
510 #ifdef __nvoc_kern_bus_h_disabled
511 #define __dynamicCast_KernelBus(pThis) ((KernelBus*)NULL)
512 #else //__nvoc_kern_bus_h_disabled
513 #define __dynamicCast_KernelBus(pThis) \
514     ((KernelBus*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelBus)))
515 #endif //__nvoc_kern_bus_h_disabled
516 
517 #define PDB_PROP_KBUS_RESTORE_BAR1_SIZE_BUG_3249028_WAR_BASE_CAST
518 #define PDB_PROP_KBUS_RESTORE_BAR1_SIZE_BUG_3249028_WAR_BASE_NAME PDB_PROP_KBUS_RESTORE_BAR1_SIZE_BUG_3249028_WAR
519 #define PDB_PROP_KBUS_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
520 #define PDB_PROP_KBUS_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
521 #define PDB_PROP_KBUS_NVLINK_DECONFIG_HSHUB_ON_NO_MAPPING_BASE_CAST
522 #define PDB_PROP_KBUS_NVLINK_DECONFIG_HSHUB_ON_NO_MAPPING_BASE_NAME PDB_PROP_KBUS_NVLINK_DECONFIG_HSHUB_ON_NO_MAPPING
523 
524 NV_STATUS __nvoc_objCreateDynamic_KernelBus(KernelBus**, Dynamic*, NvU32, va_list);
525 
526 NV_STATUS __nvoc_objCreate_KernelBus(KernelBus**, Dynamic*, NvU32);
527 #define __objCreate_KernelBus(ppNewObj, pParent, createFlags) \
528     __nvoc_objCreate_KernelBus((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
529 
530 #define kbusInitBarsSize(pGpu, pKernelBus) kbusInitBarsSize_DISPATCH(pGpu, pKernelBus)
531 #define kbusInitBarsSize_HAL(pGpu, pKernelBus) kbusInitBarsSize_DISPATCH(pGpu, pKernelBus)
532 #define kbusConstructEngine(pGpu, pKernelBus, arg0) kbusConstructEngine_DISPATCH(pGpu, pKernelBus, arg0)
533 #define kbusStatePreInitLocked(pGpu, pKernelBus) kbusStatePreInitLocked_DISPATCH(pGpu, pKernelBus)
534 #define kbusStatePreInitLocked_HAL(pGpu, pKernelBus) kbusStatePreInitLocked_DISPATCH(pGpu, pKernelBus)
535 #define kbusStateInitLocked(pGpu, pKernelBus) kbusStateInitLocked_DISPATCH(pGpu, pKernelBus)
536 #define kbusStatePreLoad(pGpu, pKernelBus, arg0) kbusStatePreLoad_DISPATCH(pGpu, pKernelBus, arg0)
537 #define kbusStatePreLoad_HAL(pGpu, pKernelBus, arg0) kbusStatePreLoad_DISPATCH(pGpu, pKernelBus, arg0)
538 #define kbusStateLoad(pGpu, pKernelBus, arg0) kbusStateLoad_DISPATCH(pGpu, pKernelBus, arg0)
539 #define kbusStateLoad_HAL(pGpu, pKernelBus, arg0) kbusStateLoad_DISPATCH(pGpu, pKernelBus, arg0)
540 #define kbusStatePostLoad(pGpu, pKernelBus, arg0) kbusStatePostLoad_DISPATCH(pGpu, pKernelBus, arg0)
541 #define kbusStatePostLoad_HAL(pGpu, pKernelBus, arg0) kbusStatePostLoad_DISPATCH(pGpu, pKernelBus, arg0)
542 #define kbusStatePreUnload(pGpu, pKernelBus, arg0) kbusStatePreUnload_DISPATCH(pGpu, pKernelBus, arg0)
543 #define kbusStatePreUnload_HAL(pGpu, pKernelBus, arg0) kbusStatePreUnload_DISPATCH(pGpu, pKernelBus, arg0)
544 #define kbusStateUnload(pGpu, pKernelBus, flags) kbusStateUnload_DISPATCH(pGpu, pKernelBus, flags)
545 #define kbusStateUnload_HAL(pGpu, pKernelBus, flags) kbusStateUnload_DISPATCH(pGpu, pKernelBus, flags)
546 #define kbusStatePostUnload(pGpu, pKernelBus, flags) kbusStatePostUnload_DISPATCH(pGpu, pKernelBus, flags)
547 #define kbusStatePostUnload_HAL(pGpu, pKernelBus, flags) kbusStatePostUnload_DISPATCH(pGpu, pKernelBus, flags)
548 #define kbusStateDestroy(pGpu, pKernelBus) kbusStateDestroy_DISPATCH(pGpu, pKernelBus)
549 #define kbusStateDestroy_HAL(pGpu, pKernelBus) kbusStateDestroy_DISPATCH(pGpu, pKernelBus)
550 #define kbusMapBar2Aperture(pGpu, pKernelBus, pMemDesc, transfer_flags) kbusMapBar2Aperture_DISPATCH(pGpu, pKernelBus, pMemDesc, transfer_flags)
551 #define kbusMapBar2Aperture_HAL(pGpu, pKernelBus, pMemDesc, transfer_flags) kbusMapBar2Aperture_DISPATCH(pGpu, pKernelBus, pMemDesc, transfer_flags)
552 #define kbusValidateBar2ApertureMapping(pGpu, pKernelBus, pMemDesc, p) kbusValidateBar2ApertureMapping_DISPATCH(pGpu, pKernelBus, pMemDesc, p)
553 #define kbusValidateBar2ApertureMapping_HAL(pGpu, pKernelBus, pMemDesc, p) kbusValidateBar2ApertureMapping_DISPATCH(pGpu, pKernelBus, pMemDesc, p)
554 #define kbusUnmapBar2ApertureWithFlags(pGpu, pKernelBus, pMemDesc, pCpuPtr, flags) kbusUnmapBar2ApertureWithFlags_DISPATCH(pGpu, pKernelBus, pMemDesc, pCpuPtr, flags)
555 #define kbusUnmapBar2ApertureWithFlags_HAL(pGpu, pKernelBus, pMemDesc, pCpuPtr, flags) kbusUnmapBar2ApertureWithFlags_DISPATCH(pGpu, pKernelBus, pMemDesc, pCpuPtr, flags)
556 #define kbusGetVaLimitForBar2(pGpu, pKernelBus) kbusGetVaLimitForBar2_DISPATCH(pGpu, pKernelBus)
557 #define kbusGetVaLimitForBar2_HAL(pGpu, pKernelBus) kbusGetVaLimitForBar2_DISPATCH(pGpu, pKernelBus)
558 #define kbusCalcCpuInvisibleBar2Range(pGpu, pKernelBus, gfid) kbusCalcCpuInvisibleBar2Range_DISPATCH(pGpu, pKernelBus, gfid)
559 #define kbusCalcCpuInvisibleBar2Range_HAL(pGpu, pKernelBus, gfid) kbusCalcCpuInvisibleBar2Range_DISPATCH(pGpu, pKernelBus, gfid)
560 #define kbusCalcCpuInvisibleBar2ApertureSize(pGpu, pKernelBus) kbusCalcCpuInvisibleBar2ApertureSize_DISPATCH(pGpu, pKernelBus)
561 #define kbusCalcCpuInvisibleBar2ApertureSize_HAL(pGpu, pKernelBus) kbusCalcCpuInvisibleBar2ApertureSize_DISPATCH(pGpu, pKernelBus)
562 #define kbusCommitBar2(pGpu, pKernelBus, flags) kbusCommitBar2_DISPATCH(pGpu, pKernelBus, flags)
563 #define kbusCommitBar2_HAL(pGpu, pKernelBus, flags) kbusCommitBar2_DISPATCH(pGpu, pKernelBus, flags)
564 #define kbusRewritePTEsForExistingMapping(pGpu, pKernelBus, pMemDesc) kbusRewritePTEsForExistingMapping_DISPATCH(pGpu, pKernelBus, pMemDesc)
565 #define kbusRewritePTEsForExistingMapping_HAL(pGpu, pKernelBus, pMemDesc) kbusRewritePTEsForExistingMapping_DISPATCH(pGpu, pKernelBus, pMemDesc)
566 #define kbusPatchBar1Pdb(pGpu, pKernelBus) kbusPatchBar1Pdb_DISPATCH(pGpu, pKernelBus)
567 #define kbusPatchBar1Pdb_HAL(pGpu, pKernelBus) kbusPatchBar1Pdb_DISPATCH(pGpu, pKernelBus)
568 #define kbusPatchBar2Pdb(pGpu, pKernelBus) kbusPatchBar2Pdb_DISPATCH(pGpu, pKernelBus)
569 #define kbusPatchBar2Pdb_HAL(pGpu, pKernelBus) kbusPatchBar2Pdb_DISPATCH(pGpu, pKernelBus)
570 #define kbusConstructVirtualBar2CpuInvisibleHeap(pKernelBus, gfid) kbusConstructVirtualBar2CpuInvisibleHeap_DISPATCH(pKernelBus, gfid)
571 #define kbusConstructVirtualBar2CpuInvisibleHeap_HAL(pKernelBus, gfid) kbusConstructVirtualBar2CpuInvisibleHeap_DISPATCH(pKernelBus, gfid)
572 #define kbusMapCpuInvisibleBar2Aperture(pGpu, pKernelBus, pMemDesc, pVaddr, allocSize, allocFlags, gfid) kbusMapCpuInvisibleBar2Aperture_DISPATCH(pGpu, pKernelBus, pMemDesc, pVaddr, allocSize, allocFlags, gfid)
573 #define kbusMapCpuInvisibleBar2Aperture_HAL(pGpu, pKernelBus, pMemDesc, pVaddr, allocSize, allocFlags, gfid) kbusMapCpuInvisibleBar2Aperture_DISPATCH(pGpu, pKernelBus, pMemDesc, pVaddr, allocSize, allocFlags, gfid)
574 #define kbusUnmapCpuInvisibleBar2Aperture(pGpu, pKernelBus, pMemDesc, vAddr, gfid) kbusUnmapCpuInvisibleBar2Aperture_DISPATCH(pGpu, pKernelBus, pMemDesc, vAddr, gfid)
575 #define kbusUnmapCpuInvisibleBar2Aperture_HAL(pGpu, pKernelBus, pMemDesc, vAddr, gfid) kbusUnmapCpuInvisibleBar2Aperture_DISPATCH(pGpu, pKernelBus, pMemDesc, vAddr, gfid)
576 #define kbusTeardownBar2CpuAperture(pGpu, pKernelBus, gfid) kbusTeardownBar2CpuAperture_DISPATCH(pGpu, pKernelBus, gfid)
577 #define kbusTeardownBar2CpuAperture_HAL(pGpu, pKernelBus, gfid) kbusTeardownBar2CpuAperture_DISPATCH(pGpu, pKernelBus, gfid)
578 #define kbusGetP2PWriteMailboxAddressSize(pGpu) kbusGetP2PWriteMailboxAddressSize_STATIC_DISPATCH(pGpu)
579 #define kbusGetP2PWriteMailboxAddressSize_HAL(pGpu) kbusGetP2PWriteMailboxAddressSize_STATIC_DISPATCH(pGpu)
580 #define kbusGetP2PMailboxAttributes(pGpu, pKernelBus, pMailboxAreaSize, pMailboxAlignmentSize, pMailboxMaxOffset64KB) kbusGetP2PMailboxAttributes_DISPATCH(pGpu, pKernelBus, pMailboxAreaSize, pMailboxAlignmentSize, pMailboxMaxOffset64KB)
581 #define kbusGetP2PMailboxAttributes_HAL(pGpu, pKernelBus, pMailboxAreaSize, pMailboxAlignmentSize, pMailboxMaxOffset64KB) kbusGetP2PMailboxAttributes_DISPATCH(pGpu, pKernelBus, pMailboxAreaSize, pMailboxAlignmentSize, pMailboxMaxOffset64KB)
582 #define kbusCreateP2PMapping(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMapping_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
583 #define kbusCreateP2PMapping_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMapping_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
584 #define kbusRemoveP2PMapping(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMapping_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
585 #define kbusRemoveP2PMapping_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMapping_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
586 #define kbusGetEgmPeerId(pLocalGpu, pLocalKernelBus, pRemoteGpu) kbusGetEgmPeerId_DISPATCH(pLocalGpu, pLocalKernelBus, pRemoteGpu)
587 #define kbusGetEgmPeerId_HAL(pLocalGpu, pLocalKernelBus, pRemoteGpu) kbusGetEgmPeerId_DISPATCH(pLocalGpu, pLocalKernelBus, pRemoteGpu)
588 #define kbusGetPeerId(pGpu, pKernelBus, pPeerGpu) kbusGetPeerId_DISPATCH(pGpu, pKernelBus, pPeerGpu)
589 #define kbusGetPeerId_HAL(pGpu, pKernelBus, pPeerGpu) kbusGetPeerId_DISPATCH(pGpu, pKernelBus, pPeerGpu)
590 #define kbusGetNvlinkPeerId(pGpu, pKernelBus, pPeerGpu) kbusGetNvlinkPeerId_DISPATCH(pGpu, pKernelBus, pPeerGpu)
591 #define kbusGetNvlinkPeerId_HAL(pGpu, pKernelBus, pPeerGpu) kbusGetNvlinkPeerId_DISPATCH(pGpu, pKernelBus, pPeerGpu)
592 #define kbusGetNvSwitchPeerId(pGpu, pKernelBus) kbusGetNvSwitchPeerId_DISPATCH(pGpu, pKernelBus)
593 #define kbusGetNvSwitchPeerId_HAL(pGpu, pKernelBus) kbusGetNvSwitchPeerId_DISPATCH(pGpu, pKernelBus)
594 #define kbusGetUnusedPciePeerId(pGpu, pKernelBus) kbusGetUnusedPciePeerId_DISPATCH(pGpu, pKernelBus)
595 #define kbusGetUnusedPciePeerId_HAL(pGpu, pKernelBus) kbusGetUnusedPciePeerId_DISPATCH(pGpu, pKernelBus)
596 #define kbusIsPeerIdValid(pGpu, pKernelBus, peerId) kbusIsPeerIdValid_DISPATCH(pGpu, pKernelBus, peerId)
597 #define kbusIsPeerIdValid_HAL(pGpu, pKernelBus, peerId) kbusIsPeerIdValid_DISPATCH(pGpu, pKernelBus, peerId)
598 #define kbusGetNvlinkP2PPeerId(pGpu0, pKernelBus0, pGpu1, pKernelBus1, nvlinkPeer, attributes) kbusGetNvlinkP2PPeerId_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, nvlinkPeer, attributes)
599 #define kbusGetNvlinkP2PPeerId_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, nvlinkPeer, attributes) kbusGetNvlinkP2PPeerId_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, nvlinkPeer, attributes)
600 #define kbusWriteP2PWmbTag(pGpu, pKernelBus, remote2Local, p2pWmbTag) kbusWriteP2PWmbTag_DISPATCH(pGpu, pKernelBus, remote2Local, p2pWmbTag)
601 #define kbusWriteP2PWmbTag_HAL(pGpu, pKernelBus, remote2Local, p2pWmbTag) kbusWriteP2PWmbTag_DISPATCH(pGpu, pKernelBus, remote2Local, p2pWmbTag)
602 #define kbusSetupP2PDomainAccess(pGpu0, pKernelBus0, pGpu1, ppP2PDomMemDesc) kbusSetupP2PDomainAccess_DISPATCH(pGpu0, pKernelBus0, pGpu1, ppP2PDomMemDesc)
603 #define kbusSetupP2PDomainAccess_HAL(pGpu0, pKernelBus0, pGpu1, ppP2PDomMemDesc) kbusSetupP2PDomainAccess_DISPATCH(pGpu0, pKernelBus0, pGpu1, ppP2PDomMemDesc)
604 #define kbusNeedWarForBug999673(pGpu, pKernelBus, pRemoteGpu) kbusNeedWarForBug999673_DISPATCH(pGpu, pKernelBus, pRemoteGpu)
605 #define kbusNeedWarForBug999673_HAL(pGpu, pKernelBus, pRemoteGpu) kbusNeedWarForBug999673_DISPATCH(pGpu, pKernelBus, pRemoteGpu)
606 #define kbusCreateP2PMappingForC2C(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMappingForC2C_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
607 #define kbusCreateP2PMappingForC2C_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMappingForC2C_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
608 #define kbusRemoveP2PMappingForC2C(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMappingForC2C_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
609 #define kbusRemoveP2PMappingForC2C_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMappingForC2C_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
610 #define kbusUnreserveP2PPeerIds(pGpu, pKernelBus, peerMask) kbusUnreserveP2PPeerIds_DISPATCH(pGpu, pKernelBus, peerMask)
611 #define kbusUnreserveP2PPeerIds_HAL(pGpu, pKernelBus, peerMask) kbusUnreserveP2PPeerIds_DISPATCH(pGpu, pKernelBus, peerMask)
612 #define kbusIsBar1P2PCapable(pGpu, pKernelBus, gfid) kbusIsBar1P2PCapable_DISPATCH(pGpu, pKernelBus, gfid)
613 #define kbusIsBar1P2PCapable_HAL(pGpu, pKernelBus, gfid) kbusIsBar1P2PCapable_DISPATCH(pGpu, pKernelBus, gfid)
614 #define kbusEnableStaticBar1Mapping(pGpu, pKernelBus, gfid) kbusEnableStaticBar1Mapping_DISPATCH(pGpu, pKernelBus, gfid)
615 #define kbusEnableStaticBar1Mapping_HAL(pGpu, pKernelBus, gfid) kbusEnableStaticBar1Mapping_DISPATCH(pGpu, pKernelBus, gfid)
616 #define kbusDisableStaticBar1Mapping(pGpu, pKernelBus, gfid) kbusDisableStaticBar1Mapping_DISPATCH(pGpu, pKernelBus, gfid)
617 #define kbusDisableStaticBar1Mapping_HAL(pGpu, pKernelBus, gfid) kbusDisableStaticBar1Mapping_DISPATCH(pGpu, pKernelBus, gfid)
618 #define kbusGetBar1P2PDmaInfo(pSrcGpu, pPeerGpu, pPeerKernelBus, dma_addr, dma_size) kbusGetBar1P2PDmaInfo_DISPATCH(pSrcGpu, pPeerGpu, pPeerKernelBus, dma_addr, dma_size)
619 #define kbusGetBar1P2PDmaInfo_HAL(pSrcGpu, pPeerGpu, pPeerKernelBus, dma_addr, dma_size) kbusGetBar1P2PDmaInfo_DISPATCH(pSrcGpu, pPeerGpu, pPeerKernelBus, dma_addr, dma_size)
620 #define kbusUpdateStaticBar1VAMapping(pGpu, pKernelBus, pMemDesc, offset, length, bRelease) kbusUpdateStaticBar1VAMapping_DISPATCH(pGpu, pKernelBus, pMemDesc, offset, length, bRelease)
621 #define kbusUpdateStaticBar1VAMapping_HAL(pGpu, pKernelBus, pMemDesc, offset, length, bRelease) kbusUpdateStaticBar1VAMapping_DISPATCH(pGpu, pKernelBus, pMemDesc, offset, length, bRelease)
622 #define kbusGetStaticFbAperture(pGpu, pKernelBus, pMemDesc, offset, pAperOffset, pLength, gfid) kbusGetStaticFbAperture_DISPATCH(pGpu, pKernelBus, pMemDesc, offset, pAperOffset, pLength, gfid)
623 #define kbusGetStaticFbAperture_HAL(pGpu, pKernelBus, pMemDesc, offset, pAperOffset, pLength, gfid) kbusGetStaticFbAperture_DISPATCH(pGpu, pKernelBus, pMemDesc, offset, pAperOffset, pLength, gfid)
624 #define kbusCreateP2PMappingForBar1P2P(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes) kbusCreateP2PMappingForBar1P2P_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes)
625 #define kbusCreateP2PMappingForBar1P2P_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes) kbusCreateP2PMappingForBar1P2P_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes)
626 #define kbusRemoveP2PMappingForBar1P2P(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes) kbusRemoveP2PMappingForBar1P2P_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes)
627 #define kbusRemoveP2PMappingForBar1P2P_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes) kbusRemoveP2PMappingForBar1P2P_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes)
628 #define kbusHasPcieBar1P2PMapping(pGpu0, pKernelBus0, pGpu1, pKernelBus1) kbusHasPcieBar1P2PMapping_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1)
629 #define kbusHasPcieBar1P2PMapping_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1) kbusHasPcieBar1P2PMapping_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1)
630 #define kbusIsPcieBar1P2PMappingSupported(pGpu0, pKernelBus0, pGpu1, pKernelBus1) kbusIsPcieBar1P2PMappingSupported_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1)
631 #define kbusIsPcieBar1P2PMappingSupported_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1) kbusIsPcieBar1P2PMappingSupported_DISPATCH(pGpu0, pKernelBus0, pGpu1, pKernelBus1)
632 #define kbusCheckFlaSupportedAndInit(pGpu, pKernelBus, base, size) kbusCheckFlaSupportedAndInit_DISPATCH(pGpu, pKernelBus, base, size)
633 #define kbusCheckFlaSupportedAndInit_HAL(pGpu, pKernelBus, base, size) kbusCheckFlaSupportedAndInit_DISPATCH(pGpu, pKernelBus, base, size)
634 #define kbusDetermineFlaRangeAndAllocate(pGpu, pKernelBus, base, size) kbusDetermineFlaRangeAndAllocate_DISPATCH(pGpu, pKernelBus, base, size)
635 #define kbusDetermineFlaRangeAndAllocate_HAL(pGpu, pKernelBus, base, size) kbusDetermineFlaRangeAndAllocate_DISPATCH(pGpu, pKernelBus, base, size)
636 #define kbusAllocateFlaVaspace(pGpu, pKernelBus, arg0, arg1) kbusAllocateFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0, arg1)
637 #define kbusAllocateFlaVaspace_HAL(pGpu, pKernelBus, arg0, arg1) kbusAllocateFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0, arg1)
638 #define kbusGetFlaRange(pGpu, pKernelBus, arg0, arg1, arg2) kbusGetFlaRange_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2)
639 #define kbusGetFlaRange_HAL(pGpu, pKernelBus, arg0, arg1, arg2) kbusGetFlaRange_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2)
640 #define kbusAllocateLegacyFlaVaspace(pGpu, pKernelBus, arg0, arg1) kbusAllocateLegacyFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0, arg1)
641 #define kbusAllocateLegacyFlaVaspace_HAL(pGpu, pKernelBus, arg0, arg1) kbusAllocateLegacyFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0, arg1)
642 #define kbusAllocateHostManagedFlaVaspace(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4, arg5, arg6) kbusAllocateHostManagedFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4, arg5, arg6)
643 #define kbusAllocateHostManagedFlaVaspace_HAL(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4, arg5, arg6) kbusAllocateHostManagedFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4, arg5, arg6)
644 #define kbusDestroyFla(pGpu, pKernelBus) kbusDestroyFla_DISPATCH(pGpu, pKernelBus)
645 #define kbusDestroyFla_HAL(pGpu, pKernelBus) kbusDestroyFla_DISPATCH(pGpu, pKernelBus)
646 #define kbusGetFlaVaspace(pGpu, pKernelBus, arg0) kbusGetFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0)
647 #define kbusGetFlaVaspace_HAL(pGpu, pKernelBus, arg0) kbusGetFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0)
648 #define kbusDestroyHostManagedFlaVaspace(pGpu, pKernelBus, arg0) kbusDestroyHostManagedFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0)
649 #define kbusDestroyHostManagedFlaVaspace_HAL(pGpu, pKernelBus, arg0) kbusDestroyHostManagedFlaVaspace_DISPATCH(pGpu, pKernelBus, arg0)
650 #define kbusVerifyFlaRange(pGpu, pKernelBus, arg0, arg1) kbusVerifyFlaRange_DISPATCH(pGpu, pKernelBus, arg0, arg1)
651 #define kbusVerifyFlaRange_HAL(pGpu, pKernelBus, arg0, arg1) kbusVerifyFlaRange_DISPATCH(pGpu, pKernelBus, arg0, arg1)
652 #define kbusConstructFlaInstBlk(pGpu, pKernelBus, arg0) kbusConstructFlaInstBlk_DISPATCH(pGpu, pKernelBus, arg0)
653 #define kbusConstructFlaInstBlk_HAL(pGpu, pKernelBus, arg0) kbusConstructFlaInstBlk_DISPATCH(pGpu, pKernelBus, arg0)
654 #define kbusDestructFlaInstBlk(pGpu, pKernelBus) kbusDestructFlaInstBlk_DISPATCH(pGpu, pKernelBus)
655 #define kbusDestructFlaInstBlk_HAL(pGpu, pKernelBus) kbusDestructFlaInstBlk_DISPATCH(pGpu, pKernelBus)
656 #define kbusValidateFlaBaseAddress(pGpu, pKernelBus, flaBaseAddr) kbusValidateFlaBaseAddress_DISPATCH(pGpu, pKernelBus, flaBaseAddr)
657 #define kbusValidateFlaBaseAddress_HAL(pGpu, pKernelBus, flaBaseAddr) kbusValidateFlaBaseAddress_DISPATCH(pGpu, pKernelBus, flaBaseAddr)
658 #define kbusSetupUnbindFla(pGpu, pKernelBus) kbusSetupUnbindFla_DISPATCH(pGpu, pKernelBus)
659 #define kbusSetupUnbindFla_HAL(pGpu, pKernelBus) kbusSetupUnbindFla_DISPATCH(pGpu, pKernelBus)
660 #define kbusSetupBindFla(pGpu, pKernelBus, gfid) kbusSetupBindFla_DISPATCH(pGpu, pKernelBus, gfid)
661 #define kbusSetupBindFla_HAL(pGpu, pKernelBus, gfid) kbusSetupBindFla_DISPATCH(pGpu, pKernelBus, gfid)
662 #define kbusSendSysmembarSingle(pGpu, pKernelBus) kbusSendSysmembarSingle_DISPATCH(pGpu, pKernelBus)
663 #define kbusSendSysmembarSingle_HAL(pGpu, pKernelBus) kbusSendSysmembarSingle_DISPATCH(pGpu, pKernelBus)
664 #define kbusCacheBAR1ResizeSize_WAR_BUG_3249028(pGpu, pKernelBus) kbusCacheBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(pGpu, pKernelBus)
665 #define kbusCacheBAR1ResizeSize_WAR_BUG_3249028_HAL(pGpu, pKernelBus) kbusCacheBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(pGpu, pKernelBus)
666 #define kbusRestoreBAR1ResizeSize_WAR_BUG_3249028(pGpu, pKernelBus) kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(pGpu, pKernelBus)
667 #define kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_HAL(pGpu, pKernelBus) kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(pGpu, pKernelBus)
668 #define kbusIsDirectMappingAllowed(pGpu, pKernelBus, arg0, arg1, arg2) kbusIsDirectMappingAllowed_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2)
669 #define kbusIsDirectMappingAllowed_HAL(pGpu, pKernelBus, arg0, arg1, arg2) kbusIsDirectMappingAllowed_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2)
670 #define kbusUseDirectSysmemMap(pGpu, pKernelBus, arg0, arg1) kbusUseDirectSysmemMap_DISPATCH(pGpu, pKernelBus, arg0, arg1)
671 #define kbusUseDirectSysmemMap_HAL(pGpu, pKernelBus, arg0, arg1) kbusUseDirectSysmemMap_DISPATCH(pGpu, pKernelBus, arg0, arg1)
672 #define kbusMemCopyBar0Window(pGpu, pKernelBus, physAddr, pData, copySize, bRead) kbusMemCopyBar0Window_DISPATCH(pGpu, pKernelBus, physAddr, pData, copySize, bRead)
673 #define kbusMemCopyBar0Window_HAL(pGpu, pKernelBus, physAddr, pData, copySize, bRead) kbusMemCopyBar0Window_DISPATCH(pGpu, pKernelBus, physAddr, pData, copySize, bRead)
674 #define kbusWriteBAR0WindowBase(pGpu, pKernelBus, base) kbusWriteBAR0WindowBase_DISPATCH(pGpu, pKernelBus, base)
675 #define kbusWriteBAR0WindowBase_HAL(pGpu, pKernelBus, base) kbusWriteBAR0WindowBase_DISPATCH(pGpu, pKernelBus, base)
676 #define kbusReadBAR0WindowBase(pGpu, pKernelBus) kbusReadBAR0WindowBase_DISPATCH(pGpu, pKernelBus)
677 #define kbusReadBAR0WindowBase_HAL(pGpu, pKernelBus) kbusReadBAR0WindowBase_DISPATCH(pGpu, pKernelBus)
678 #define kbusValidateBAR0WindowBase(pGpu, pKernelBus, base) kbusValidateBAR0WindowBase_DISPATCH(pGpu, pKernelBus, base)
679 #define kbusValidateBAR0WindowBase_HAL(pGpu, pKernelBus, base) kbusValidateBAR0WindowBase_DISPATCH(pGpu, pKernelBus, base)
680 #define kbusSetBAR0WindowVidOffset(pGpu, pKernelBus, vidOffset) kbusSetBAR0WindowVidOffset_DISPATCH(pGpu, pKernelBus, vidOffset)
681 #define kbusSetBAR0WindowVidOffset_HAL(pGpu, pKernelBus, vidOffset) kbusSetBAR0WindowVidOffset_DISPATCH(pGpu, pKernelBus, vidOffset)
682 #define kbusGetBAR0WindowVidOffset(pGpu, pKernelBus) kbusGetBAR0WindowVidOffset_DISPATCH(pGpu, pKernelBus)
683 #define kbusGetBAR0WindowVidOffset_HAL(pGpu, pKernelBus) kbusGetBAR0WindowVidOffset_DISPATCH(pGpu, pKernelBus)
684 #define kbusSetupBar0WindowBeforeBar2Bootstrap(pGpu, pKernelBus, arg0) kbusSetupBar0WindowBeforeBar2Bootstrap_DISPATCH(pGpu, pKernelBus, arg0)
685 #define kbusSetupBar0WindowBeforeBar2Bootstrap_HAL(pGpu, pKernelBus, arg0) kbusSetupBar0WindowBeforeBar2Bootstrap_DISPATCH(pGpu, pKernelBus, arg0)
686 #define kbusRestoreBar0WindowAfterBar2Bootstrap(pGpu, pKernelBus, arg0) kbusRestoreBar0WindowAfterBar2Bootstrap_DISPATCH(pGpu, pKernelBus, arg0)
687 #define kbusRestoreBar0WindowAfterBar2Bootstrap_HAL(pGpu, pKernelBus, arg0) kbusRestoreBar0WindowAfterBar2Bootstrap_DISPATCH(pGpu, pKernelBus, arg0)
688 #define kbusVerifyBar2(pGpu, pKernelBus, memDescIn, pCpuPtrIn, offset, size) kbusVerifyBar2_DISPATCH(pGpu, pKernelBus, memDescIn, pCpuPtrIn, offset, size)
689 #define kbusVerifyBar2_HAL(pGpu, pKernelBus, memDescIn, pCpuPtrIn, offset, size) kbusVerifyBar2_DISPATCH(pGpu, pKernelBus, memDescIn, pCpuPtrIn, offset, size)
690 #define kbusBar2BootStrapInPhysicalMode(pGpu, pKernelBus) kbusBar2BootStrapInPhysicalMode_DISPATCH(pGpu, pKernelBus)
691 #define kbusBar2BootStrapInPhysicalMode_HAL(pGpu, pKernelBus) kbusBar2BootStrapInPhysicalMode_DISPATCH(pGpu, pKernelBus)
692 #define kbusBindBar2(pGpu, pKernelBus, arg0) kbusBindBar2_DISPATCH(pGpu, pKernelBus, arg0)
693 #define kbusBindBar2_HAL(pGpu, pKernelBus, arg0) kbusBindBar2_DISPATCH(pGpu, pKernelBus, arg0)
694 #define kbusInstBlkWriteAddrLimit(pGpu, pKernelBus, arg0, arg1, arg2, arg3) kbusInstBlkWriteAddrLimit_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2, arg3)
695 #define kbusInstBlkWriteAddrLimit_HAL(pGpu, pKernelBus, arg0, arg1, arg2, arg3) kbusInstBlkWriteAddrLimit_DISPATCH(pGpu, pKernelBus, arg0, arg1, arg2, arg3)
696 #define kbusInitInstBlk(pGpu, pKernelBus, pInstBlkMemDesc, pPDB, vaLimit, bigPageSize, pVAS) kbusInitInstBlk_DISPATCH(pGpu, pKernelBus, pInstBlkMemDesc, pPDB, vaLimit, bigPageSize, pVAS)
697 #define kbusInitInstBlk_HAL(pGpu, pKernelBus, pInstBlkMemDesc, pPDB, vaLimit, bigPageSize, pVAS) kbusInitInstBlk_DISPATCH(pGpu, pKernelBus, pInstBlkMemDesc, pPDB, vaLimit, bigPageSize, pVAS)
698 #define kbusBar2InstBlkWrite(pGpu, pKernelBus, pMap, pPDB, vaLimit, bigPageSize) kbusBar2InstBlkWrite_DISPATCH(pGpu, pKernelBus, pMap, pPDB, vaLimit, bigPageSize)
699 #define kbusBar2InstBlkWrite_HAL(pGpu, pKernelBus, pMap, pPDB, vaLimit, bigPageSize) kbusBar2InstBlkWrite_DISPATCH(pGpu, pKernelBus, pMap, pPDB, vaLimit, bigPageSize)
700 #define kbusSetupBar2PageTablesAtBottomOfFb(pGpu, pKernelBus, gfid) kbusSetupBar2PageTablesAtBottomOfFb_DISPATCH(pGpu, pKernelBus, gfid)
701 #define kbusSetupBar2PageTablesAtBottomOfFb_HAL(pGpu, pKernelBus, gfid) kbusSetupBar2PageTablesAtBottomOfFb_DISPATCH(pGpu, pKernelBus, gfid)
702 #define kbusTeardownBar2PageTablesAtBottomOfFb(pGpu, pKernelBus, gfid) kbusTeardownBar2PageTablesAtBottomOfFb_DISPATCH(pGpu, pKernelBus, gfid)
703 #define kbusTeardownBar2PageTablesAtBottomOfFb_HAL(pGpu, pKernelBus, gfid) kbusTeardownBar2PageTablesAtBottomOfFb_DISPATCH(pGpu, pKernelBus, gfid)
704 #define kbusSetupBar2InstBlkAtBottomOfFb(pGpu, pKernelBus, pPDB, vaLimit, bigPageSize, gfid) kbusSetupBar2InstBlkAtBottomOfFb_DISPATCH(pGpu, pKernelBus, pPDB, vaLimit, bigPageSize, gfid)
705 #define kbusSetupBar2InstBlkAtBottomOfFb_HAL(pGpu, pKernelBus, pPDB, vaLimit, bigPageSize, gfid) kbusSetupBar2InstBlkAtBottomOfFb_DISPATCH(pGpu, pKernelBus, pPDB, vaLimit, bigPageSize, gfid)
706 #define kbusTeardownBar2InstBlkAtBottomOfFb(pGpu, pKernelBus, gfid) kbusTeardownBar2InstBlkAtBottomOfFb_DISPATCH(pGpu, pKernelBus, gfid)
707 #define kbusTeardownBar2InstBlkAtBottomOfFb_HAL(pGpu, pKernelBus, gfid) kbusTeardownBar2InstBlkAtBottomOfFb_DISPATCH(pGpu, pKernelBus, gfid)
708 #define kbusSetupBar2PageTablesAtTopOfFb(pGpu, pKernelBus, gfid) kbusSetupBar2PageTablesAtTopOfFb_DISPATCH(pGpu, pKernelBus, gfid)
709 #define kbusSetupBar2PageTablesAtTopOfFb_HAL(pGpu, pKernelBus, gfid) kbusSetupBar2PageTablesAtTopOfFb_DISPATCH(pGpu, pKernelBus, gfid)
710 #define kbusCommitBar2PDEs(pGpu, pKernelBus) kbusCommitBar2PDEs_DISPATCH(pGpu, pKernelBus)
711 #define kbusCommitBar2PDEs_HAL(pGpu, pKernelBus) kbusCommitBar2PDEs_DISPATCH(pGpu, pKernelBus)
712 #define kbusVerifyCoherentLink(pGpu, pKernelBus) kbusVerifyCoherentLink_DISPATCH(pGpu, pKernelBus)
713 #define kbusVerifyCoherentLink_HAL(pGpu, pKernelBus) kbusVerifyCoherentLink_DISPATCH(pGpu, pKernelBus)
714 #define kbusTeardownMailbox(pGpu, pKernelBus) kbusTeardownMailbox_DISPATCH(pGpu, pKernelBus)
715 #define kbusTeardownMailbox_HAL(pGpu, pKernelBus) kbusTeardownMailbox_DISPATCH(pGpu, pKernelBus)
716 #define kbusBar1InstBlkVasUpdate(pGpu, pKernelBus) kbusBar1InstBlkVasUpdate_DISPATCH(pGpu, pKernelBus)
717 #define kbusBar1InstBlkVasUpdate_HAL(pGpu, pKernelBus) kbusBar1InstBlkVasUpdate_DISPATCH(pGpu, pKernelBus)
718 #define kbusFlushPcieForBar0Doorbell(pGpu, pKernelBus) kbusFlushPcieForBar0Doorbell_DISPATCH(pGpu, pKernelBus)
719 #define kbusFlushPcieForBar0Doorbell_HAL(pGpu, pKernelBus) kbusFlushPcieForBar0Doorbell_DISPATCH(pGpu, pKernelBus)
720 #define kbusCreateCoherentCpuMapping(pGpu, pKernelBus, numaOnlineMemorySize, bFlush) kbusCreateCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, numaOnlineMemorySize, bFlush)
721 #define kbusCreateCoherentCpuMapping_HAL(pGpu, pKernelBus, numaOnlineMemorySize, bFlush) kbusCreateCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, numaOnlineMemorySize, bFlush)
722 #define kbusMapCoherentCpuMapping(pGpu, pKernelBus, arg0) kbusMapCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, arg0)
723 #define kbusMapCoherentCpuMapping_HAL(pGpu, pKernelBus, arg0) kbusMapCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, arg0)
724 #define kbusUnmapCoherentCpuMapping(pGpu, pKernelBus, arg0) kbusUnmapCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, arg0)
725 #define kbusUnmapCoherentCpuMapping_HAL(pGpu, pKernelBus, arg0) kbusUnmapCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, arg0)
726 #define kbusTeardownCoherentCpuMapping(pGpu, pKernelBus, arg0) kbusTeardownCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, arg0)
727 #define kbusTeardownCoherentCpuMapping_HAL(pGpu, pKernelBus, arg0) kbusTeardownCoherentCpuMapping_DISPATCH(pGpu, pKernelBus, arg0)
728 #define kbusBar1InstBlkBind(pGpu, pKernelBus) kbusBar1InstBlkBind_DISPATCH(pGpu, pKernelBus)
729 #define kbusBar1InstBlkBind_HAL(pGpu, pKernelBus) kbusBar1InstBlkBind_DISPATCH(pGpu, pKernelBus)
730 #define kbusGetEccCounts(pGpu, pKernelBus) kbusGetEccCounts_DISPATCH(pGpu, pKernelBus)
731 #define kbusGetEccCounts_HAL(pGpu, pKernelBus) kbusGetEccCounts_DISPATCH(pGpu, pKernelBus)
732 #define kbusStateInitUnlocked(pGpu, pEngstate) kbusStateInitUnlocked_DISPATCH(pGpu, pEngstate)
733 #define kbusInitMissing(pGpu, pEngstate) kbusInitMissing_DISPATCH(pGpu, pEngstate)
734 #define kbusStatePreInitUnlocked(pGpu, pEngstate) kbusStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
735 #define kbusIsPresent(pGpu, pEngstate) kbusIsPresent_DISPATCH(pGpu, pEngstate)
736 NV_STATUS kbusConstructHal_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
737 
738 
739 #ifdef __nvoc_kern_bus_h_disabled
kbusConstructHal(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)740 static inline NV_STATUS kbusConstructHal(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
741     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
742     return NV_ERR_NOT_SUPPORTED;
743 }
744 #else //__nvoc_kern_bus_h_disabled
745 #define kbusConstructHal(pGpu, pKernelBus) kbusConstructHal_GM107(pGpu, pKernelBus)
746 #endif //__nvoc_kern_bus_h_disabled
747 
748 #define kbusConstructHal_HAL(pGpu, pKernelBus) kbusConstructHal(pGpu, pKernelBus)
749 
750 NvU64 kbusGetBar1ResvdVA_TU102(struct KernelBus *pKernelBus);
751 
752 
753 #ifdef __nvoc_kern_bus_h_disabled
kbusGetBar1ResvdVA(struct KernelBus * pKernelBus)754 static inline NvU64 kbusGetBar1ResvdVA(struct KernelBus *pKernelBus) {
755     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
756     return 0;
757 }
758 #else //__nvoc_kern_bus_h_disabled
759 #define kbusGetBar1ResvdVA(pKernelBus) kbusGetBar1ResvdVA_TU102(pKernelBus)
760 #endif //__nvoc_kern_bus_h_disabled
761 
762 #define kbusGetBar1ResvdVA_HAL(pKernelBus) kbusGetBar1ResvdVA(pKernelBus)
763 
764 NV_STATUS kbusStateInitLockedKernel_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
765 
766 
767 #ifdef __nvoc_kern_bus_h_disabled
kbusStateInitLockedKernel(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)768 static inline NV_STATUS kbusStateInitLockedKernel(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
769     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
770     return NV_ERR_NOT_SUPPORTED;
771 }
772 #else //__nvoc_kern_bus_h_disabled
773 #define kbusStateInitLockedKernel(pGpu, pKernelBus) kbusStateInitLockedKernel_GM107(pGpu, pKernelBus)
774 #endif //__nvoc_kern_bus_h_disabled
775 
776 #define kbusStateInitLockedKernel_HAL(pGpu, pKernelBus) kbusStateInitLockedKernel(pGpu, pKernelBus)
777 
kbusStateInitLockedPhysical_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)778 static inline NV_STATUS kbusStateInitLockedPhysical_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
779     return NV_OK;
780 }
781 
782 NV_STATUS kbusStateInitLockedPhysical_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
783 
784 
785 #ifdef __nvoc_kern_bus_h_disabled
kbusStateInitLockedPhysical(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)786 static inline NV_STATUS kbusStateInitLockedPhysical(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
787     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
788     return NV_ERR_NOT_SUPPORTED;
789 }
790 #else //__nvoc_kern_bus_h_disabled
791 #define kbusStateInitLockedPhysical(pGpu, pKernelBus) kbusStateInitLockedPhysical_56cd7a(pGpu, pKernelBus)
792 #endif //__nvoc_kern_bus_h_disabled
793 
794 #define kbusStateInitLockedPhysical_HAL(pGpu, pKernelBus) kbusStateInitLockedPhysical(pGpu, pKernelBus)
795 
796 NV_STATUS kbusMemoryCopy_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 arg1, PMEMORY_DESCRIPTOR arg2, NvU64 arg3, NvU64 arg4);
797 
798 
799 #ifdef __nvoc_kern_bus_h_disabled
kbusMemoryCopy(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0,NvU64 arg1,PMEMORY_DESCRIPTOR arg2,NvU64 arg3,NvU64 arg4)800 static inline NV_STATUS kbusMemoryCopy(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 arg1, PMEMORY_DESCRIPTOR arg2, NvU64 arg3, NvU64 arg4) {
801     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
802     return NV_ERR_NOT_SUPPORTED;
803 }
804 #else //__nvoc_kern_bus_h_disabled
805 #define kbusMemoryCopy(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4) kbusMemoryCopy_GM107(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4)
806 #endif //__nvoc_kern_bus_h_disabled
807 
808 #define kbusMemoryCopy_HAL(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4) kbusMemoryCopy(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4)
809 
810 NV_STATUS kbusPrepareForXVEReset_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
811 
812 
813 #ifdef __nvoc_kern_bus_h_disabled
kbusPrepareForXVEReset(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)814 static inline NV_STATUS kbusPrepareForXVEReset(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
815     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
816     return NV_ERR_NOT_SUPPORTED;
817 }
818 #else //__nvoc_kern_bus_h_disabled
819 #define kbusPrepareForXVEReset(pGpu, pKernelBus) kbusPrepareForXVEReset_GM107(pGpu, pKernelBus)
820 #endif //__nvoc_kern_bus_h_disabled
821 
822 #define kbusPrepareForXVEReset_HAL(pGpu, pKernelBus) kbusPrepareForXVEReset(pGpu, pKernelBus)
823 
824 NV_STATUS kbusUpdateRmAperture_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 arg1, NvU64 arg2, NvU32 arg3);
825 
826 
827 #ifdef __nvoc_kern_bus_h_disabled
kbusUpdateRmAperture(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0,NvU64 arg1,NvU64 arg2,NvU32 arg3)828 static inline NV_STATUS kbusUpdateRmAperture(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 arg1, NvU64 arg2, NvU32 arg3) {
829     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
830     return NV_ERR_NOT_SUPPORTED;
831 }
832 #else //__nvoc_kern_bus_h_disabled
833 #define kbusUpdateRmAperture(pGpu, pKernelBus, arg0, arg1, arg2, arg3) kbusUpdateRmAperture_GM107(pGpu, pKernelBus, arg0, arg1, arg2, arg3)
834 #endif //__nvoc_kern_bus_h_disabled
835 
836 #define kbusUpdateRmAperture_HAL(pGpu, pKernelBus, arg0, arg1, arg2, arg3) kbusUpdateRmAperture(pGpu, pKernelBus, arg0, arg1, arg2, arg3)
837 
838 NV_STATUS kbusSetupBar2GpuVaSpace_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
839 
840 
841 #ifdef __nvoc_kern_bus_h_disabled
kbusSetupBar2GpuVaSpace(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)842 static inline NV_STATUS kbusSetupBar2GpuVaSpace(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
843     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
844     return NV_ERR_NOT_SUPPORTED;
845 }
846 #else //__nvoc_kern_bus_h_disabled
847 #define kbusSetupBar2GpuVaSpace(pGpu, pKernelBus, gfid) kbusSetupBar2GpuVaSpace_GM107(pGpu, pKernelBus, gfid)
848 #endif //__nvoc_kern_bus_h_disabled
849 
850 #define kbusSetupBar2GpuVaSpace_HAL(pGpu, pKernelBus, gfid) kbusSetupBar2GpuVaSpace(pGpu, pKernelBus, gfid)
851 
852 NV_STATUS kbusTeardownBar2GpuVaSpace_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
853 
854 
855 #ifdef __nvoc_kern_bus_h_disabled
kbusTeardownBar2GpuVaSpace(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)856 static inline NV_STATUS kbusTeardownBar2GpuVaSpace(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
857     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
858     return NV_ERR_NOT_SUPPORTED;
859 }
860 #else //__nvoc_kern_bus_h_disabled
861 #define kbusTeardownBar2GpuVaSpace(pGpu, pKernelBus, gfid) kbusTeardownBar2GpuVaSpace_GM107(pGpu, pKernelBus, gfid)
862 #endif //__nvoc_kern_bus_h_disabled
863 
864 #define kbusTeardownBar2GpuVaSpace_HAL(pGpu, pKernelBus, gfid) kbusTeardownBar2GpuVaSpace(pGpu, pKernelBus, gfid)
865 
866 NvU32 kbusGetSizeOfBar2PageTables_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
867 
868 
869 #ifdef __nvoc_kern_bus_h_disabled
kbusGetSizeOfBar2PageTables(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)870 static inline NvU32 kbusGetSizeOfBar2PageTables(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
871     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
872     return 0;
873 }
874 #else //__nvoc_kern_bus_h_disabled
875 #define kbusGetSizeOfBar2PageTables(pGpu, pKernelBus) kbusGetSizeOfBar2PageTables_GM107(pGpu, pKernelBus)
876 #endif //__nvoc_kern_bus_h_disabled
877 
878 #define kbusGetSizeOfBar2PageTables_HAL(pGpu, pKernelBus) kbusGetSizeOfBar2PageTables(pGpu, pKernelBus)
879 
880 NvU32 kbusGetSizeOfBar2PageDirs_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
881 
882 
883 #ifdef __nvoc_kern_bus_h_disabled
kbusGetSizeOfBar2PageDirs(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)884 static inline NvU32 kbusGetSizeOfBar2PageDirs(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
885     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
886     return 0;
887 }
888 #else //__nvoc_kern_bus_h_disabled
889 #define kbusGetSizeOfBar2PageDirs(pGpu, pKernelBus) kbusGetSizeOfBar2PageDirs_GM107(pGpu, pKernelBus)
890 #endif //__nvoc_kern_bus_h_disabled
891 
892 #define kbusGetSizeOfBar2PageDirs_HAL(pGpu, pKernelBus) kbusGetSizeOfBar2PageDirs(pGpu, pKernelBus)
893 
kbusGetCpuInvisibleBar2BaseAdjust_cb032a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)894 static inline NvU64 kbusGetCpuInvisibleBar2BaseAdjust_cb032a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
895     return 0ULL;
896 }
897 
898 NvU64 kbusGetCpuInvisibleBar2BaseAdjust_TU102(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
899 
900 
901 #ifdef __nvoc_kern_bus_h_disabled
kbusGetCpuInvisibleBar2BaseAdjust(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)902 static inline NvU64 kbusGetCpuInvisibleBar2BaseAdjust(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
903     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
904     return 0;
905 }
906 #else //__nvoc_kern_bus_h_disabled
907 #define kbusGetCpuInvisibleBar2BaseAdjust(pGpu, pKernelBus) kbusGetCpuInvisibleBar2BaseAdjust_cb032a(pGpu, pKernelBus)
908 #endif //__nvoc_kern_bus_h_disabled
909 
910 #define kbusGetCpuInvisibleBar2BaseAdjust_HAL(pGpu, pKernelBus) kbusGetCpuInvisibleBar2BaseAdjust(pGpu, pKernelBus)
911 
912 MMU_WALK *kbusGetBar2GmmuWalker_GM107(struct KernelBus *pKernelBus);
913 
914 
915 #ifdef __nvoc_kern_bus_h_disabled
kbusGetBar2GmmuWalker(struct KernelBus * pKernelBus)916 static inline MMU_WALK *kbusGetBar2GmmuWalker(struct KernelBus *pKernelBus) {
917     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
918     return NULL;
919 }
920 #else //__nvoc_kern_bus_h_disabled
921 #define kbusGetBar2GmmuWalker(pKernelBus) kbusGetBar2GmmuWalker_GM107(pKernelBus)
922 #endif //__nvoc_kern_bus_h_disabled
923 
924 #define kbusGetBar2GmmuWalker_HAL(pKernelBus) kbusGetBar2GmmuWalker(pKernelBus)
925 
926 const struct GMMU_FMT *kbusGetBar2GmmuFmt_GM107(struct KernelBus *pKernelBus);
927 
928 
929 #ifdef __nvoc_kern_bus_h_disabled
kbusGetBar2GmmuFmt(struct KernelBus * pKernelBus)930 static inline const struct GMMU_FMT *kbusGetBar2GmmuFmt(struct KernelBus *pKernelBus) {
931     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
932     return NULL;
933 }
934 #else //__nvoc_kern_bus_h_disabled
935 #define kbusGetBar2GmmuFmt(pKernelBus) kbusGetBar2GmmuFmt_GM107(pKernelBus)
936 #endif //__nvoc_kern_bus_h_disabled
937 
938 #define kbusGetBar2GmmuFmt_HAL(pKernelBus) kbusGetBar2GmmuFmt(pKernelBus)
939 
940 NV_STATUS kbusSetBarsApertureSize_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
941 
942 
943 #ifdef __nvoc_kern_bus_h_disabled
kbusSetBarsApertureSize(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)944 static inline NV_STATUS kbusSetBarsApertureSize(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
945     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
946     return NV_ERR_NOT_SUPPORTED;
947 }
948 #else //__nvoc_kern_bus_h_disabled
949 #define kbusSetBarsApertureSize(pGpu, pKernelBus, gfid) kbusSetBarsApertureSize_GM107(pGpu, pKernelBus, gfid)
950 #endif //__nvoc_kern_bus_h_disabled
951 
952 #define kbusSetBarsApertureSize_HAL(pGpu, pKernelBus, gfid) kbusSetBarsApertureSize(pGpu, pKernelBus, gfid)
953 
954 NV_STATUS kbusConstructVirtualBar2_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
955 
956 
957 #ifdef __nvoc_kern_bus_h_disabled
kbusConstructVirtualBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)958 static inline NV_STATUS kbusConstructVirtualBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
959     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
960     return NV_ERR_NOT_SUPPORTED;
961 }
962 #else //__nvoc_kern_bus_h_disabled
963 #define kbusConstructVirtualBar2(pGpu, pKernelBus, gfid) kbusConstructVirtualBar2_VBAR2(pGpu, pKernelBus, gfid)
964 #endif //__nvoc_kern_bus_h_disabled
965 
966 #define kbusConstructVirtualBar2_HAL(pGpu, pKernelBus, gfid) kbusConstructVirtualBar2(pGpu, pKernelBus, gfid)
967 
968 void kbusDestructVirtualBar2_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool shutdown, NvU32 gfid);
969 
970 
971 #ifdef __nvoc_kern_bus_h_disabled
kbusDestructVirtualBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool shutdown,NvU32 gfid)972 static inline void kbusDestructVirtualBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool shutdown, NvU32 gfid) {
973     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
974 }
975 #else //__nvoc_kern_bus_h_disabled
976 #define kbusDestructVirtualBar2(pGpu, pKernelBus, shutdown, gfid) kbusDestructVirtualBar2_VBAR2(pGpu, pKernelBus, shutdown, gfid)
977 #endif //__nvoc_kern_bus_h_disabled
978 
979 #define kbusDestructVirtualBar2_HAL(pGpu, pKernelBus, shutdown, gfid) kbusDestructVirtualBar2(pGpu, pKernelBus, shutdown, gfid)
980 
981 void kbusFlushVirtualBar2_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool shutdown, NvU32 gfid);
982 
983 
984 #ifdef __nvoc_kern_bus_h_disabled
kbusFlushVirtualBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool shutdown,NvU32 gfid)985 static inline void kbusFlushVirtualBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool shutdown, NvU32 gfid) {
986     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
987 }
988 #else //__nvoc_kern_bus_h_disabled
989 #define kbusFlushVirtualBar2(pGpu, pKernelBus, shutdown, gfid) kbusFlushVirtualBar2_VBAR2(pGpu, pKernelBus, shutdown, gfid)
990 #endif //__nvoc_kern_bus_h_disabled
991 
992 #define kbusFlushVirtualBar2_HAL(pGpu, pKernelBus, shutdown, gfid) kbusFlushVirtualBar2(pGpu, pKernelBus, shutdown, gfid)
993 
994 NV_STATUS kbusInitVirtualBar2_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
995 
996 
997 #ifdef __nvoc_kern_bus_h_disabled
kbusInitVirtualBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)998 static inline NV_STATUS kbusInitVirtualBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
999     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1000     return NV_ERR_NOT_SUPPORTED;
1001 }
1002 #else //__nvoc_kern_bus_h_disabled
1003 #define kbusInitVirtualBar2(pGpu, pKernelBus) kbusInitVirtualBar2_VBAR2(pGpu, pKernelBus)
1004 #endif //__nvoc_kern_bus_h_disabled
1005 
1006 #define kbusInitVirtualBar2_HAL(pGpu, pKernelBus) kbusInitVirtualBar2(pGpu, pKernelBus)
1007 
1008 NV_STATUS kbusPreInitVirtualBar2_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1009 
1010 
1011 #ifdef __nvoc_kern_bus_h_disabled
kbusPreInitVirtualBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1012 static inline NV_STATUS kbusPreInitVirtualBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1013     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1014     return NV_ERR_NOT_SUPPORTED;
1015 }
1016 #else //__nvoc_kern_bus_h_disabled
1017 #define kbusPreInitVirtualBar2(pGpu, pKernelBus) kbusPreInitVirtualBar2_VBAR2(pGpu, pKernelBus)
1018 #endif //__nvoc_kern_bus_h_disabled
1019 
1020 #define kbusPreInitVirtualBar2_HAL(pGpu, pKernelBus) kbusPreInitVirtualBar2(pGpu, pKernelBus)
1021 
1022 NV_STATUS kbusConstructVirtualBar2CpuVisibleHeap_VBAR2(struct KernelBus *pKernelBus, NvU32 gfid);
1023 
1024 
1025 #ifdef __nvoc_kern_bus_h_disabled
kbusConstructVirtualBar2CpuVisibleHeap(struct KernelBus * pKernelBus,NvU32 gfid)1026 static inline NV_STATUS kbusConstructVirtualBar2CpuVisibleHeap(struct KernelBus *pKernelBus, NvU32 gfid) {
1027     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1028     return NV_ERR_NOT_SUPPORTED;
1029 }
1030 #else //__nvoc_kern_bus_h_disabled
1031 #define kbusConstructVirtualBar2CpuVisibleHeap(pKernelBus, gfid) kbusConstructVirtualBar2CpuVisibleHeap_VBAR2(pKernelBus, gfid)
1032 #endif //__nvoc_kern_bus_h_disabled
1033 
1034 #define kbusConstructVirtualBar2CpuVisibleHeap_HAL(pKernelBus, gfid) kbusConstructVirtualBar2CpuVisibleHeap(pKernelBus, gfid)
1035 
1036 NV_STATUS kbusSetupCpuPointerForBusFlush_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1037 
1038 
1039 #ifdef __nvoc_kern_bus_h_disabled
kbusSetupCpuPointerForBusFlush(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1040 static inline NV_STATUS kbusSetupCpuPointerForBusFlush(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1041     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1042     return NV_ERR_NOT_SUPPORTED;
1043 }
1044 #else //__nvoc_kern_bus_h_disabled
1045 #define kbusSetupCpuPointerForBusFlush(pGpu, pKernelBus) kbusSetupCpuPointerForBusFlush_GV100(pGpu, pKernelBus)
1046 #endif //__nvoc_kern_bus_h_disabled
1047 
1048 #define kbusSetupCpuPointerForBusFlush_HAL(pGpu, pKernelBus) kbusSetupCpuPointerForBusFlush(pGpu, pKernelBus)
1049 
1050 void kbusDestroyCpuPointerForBusFlush_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1051 
1052 
1053 #ifdef __nvoc_kern_bus_h_disabled
kbusDestroyCpuPointerForBusFlush(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1054 static inline void kbusDestroyCpuPointerForBusFlush(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1055     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1056 }
1057 #else //__nvoc_kern_bus_h_disabled
1058 #define kbusDestroyCpuPointerForBusFlush(pGpu, pKernelBus) kbusDestroyCpuPointerForBusFlush_GV100(pGpu, pKernelBus)
1059 #endif //__nvoc_kern_bus_h_disabled
1060 
1061 #define kbusDestroyCpuPointerForBusFlush_HAL(pGpu, pKernelBus) kbusDestroyCpuPointerForBusFlush(pGpu, pKernelBus)
1062 
1063 NV_STATUS kbusSetupBar2CpuAperture_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1064 
1065 
1066 #ifdef __nvoc_kern_bus_h_disabled
kbusSetupBar2CpuAperture(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1067 static inline NV_STATUS kbusSetupBar2CpuAperture(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1068     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1069     return NV_ERR_NOT_SUPPORTED;
1070 }
1071 #else //__nvoc_kern_bus_h_disabled
1072 #define kbusSetupBar2CpuAperture(pGpu, pKernelBus, gfid) kbusSetupBar2CpuAperture_GM107(pGpu, pKernelBus, gfid)
1073 #endif //__nvoc_kern_bus_h_disabled
1074 
1075 #define kbusSetupBar2CpuAperture_HAL(pGpu, pKernelBus, gfid) kbusSetupBar2CpuAperture(pGpu, pKernelBus, gfid)
1076 
1077 NV_STATUS kbusSetP2PMailboxBar1Area_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 mailboxBar1Addr, NvU32 mailboxTotalSize);
1078 
1079 
1080 #ifdef __nvoc_kern_bus_h_disabled
kbusSetP2PMailboxBar1Area(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 mailboxBar1Addr,NvU32 mailboxTotalSize)1081 static inline NV_STATUS kbusSetP2PMailboxBar1Area(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 mailboxBar1Addr, NvU32 mailboxTotalSize) {
1082     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1083     return NV_ERR_NOT_SUPPORTED;
1084 }
1085 #else //__nvoc_kern_bus_h_disabled
1086 #define kbusSetP2PMailboxBar1Area(pGpu, pKernelBus, mailboxBar1Addr, mailboxTotalSize) kbusSetP2PMailboxBar1Area_GM200(pGpu, pKernelBus, mailboxBar1Addr, mailboxTotalSize)
1087 #endif //__nvoc_kern_bus_h_disabled
1088 
1089 #define kbusSetP2PMailboxBar1Area_HAL(pGpu, pKernelBus, mailboxBar1Addr, mailboxTotalSize) kbusSetP2PMailboxBar1Area(pGpu, pKernelBus, mailboxBar1Addr, mailboxTotalSize)
1090 
1091 void kbusUnsetP2PMailboxBar1Area_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1092 
1093 
1094 #ifdef __nvoc_kern_bus_h_disabled
kbusUnsetP2PMailboxBar1Area(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1095 static inline void kbusUnsetP2PMailboxBar1Area(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1096     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1097 }
1098 #else //__nvoc_kern_bus_h_disabled
1099 #define kbusUnsetP2PMailboxBar1Area(pGpu, pKernelBus) kbusUnsetP2PMailboxBar1Area_GM200(pGpu, pKernelBus)
1100 #endif //__nvoc_kern_bus_h_disabled
1101 
1102 #define kbusUnsetP2PMailboxBar1Area_HAL(pGpu, pKernelBus) kbusUnsetP2PMailboxBar1Area(pGpu, pKernelBus)
1103 
1104 NV_STATUS kbusAllocP2PMailboxBar1_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid, NvU64 vaRangeMax);
1105 
1106 
1107 #ifdef __nvoc_kern_bus_h_disabled
kbusAllocP2PMailboxBar1(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid,NvU64 vaRangeMax)1108 static inline NV_STATUS kbusAllocP2PMailboxBar1(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid, NvU64 vaRangeMax) {
1109     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1110     return NV_ERR_NOT_SUPPORTED;
1111 }
1112 #else //__nvoc_kern_bus_h_disabled
1113 #define kbusAllocP2PMailboxBar1(pGpu, pKernelBus, gfid, vaRangeMax) kbusAllocP2PMailboxBar1_GM200(pGpu, pKernelBus, gfid, vaRangeMax)
1114 #endif //__nvoc_kern_bus_h_disabled
1115 
1116 #define kbusAllocP2PMailboxBar1_HAL(pGpu, pKernelBus, gfid, vaRangeMax) kbusAllocP2PMailboxBar1(pGpu, pKernelBus, gfid, vaRangeMax)
1117 
1118 RmPhysAddr kbusSetupMailboxAccess_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pGpu1, NvU32 localPeerId, PMEMORY_DESCRIPTOR *ppWMBoxMemDesc);
1119 
1120 
1121 #ifdef __nvoc_kern_bus_h_disabled
kbusSetupMailboxAccess(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJGPU * pGpu1,NvU32 localPeerId,PMEMORY_DESCRIPTOR * ppWMBoxMemDesc)1122 static inline RmPhysAddr kbusSetupMailboxAccess(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pGpu1, NvU32 localPeerId, PMEMORY_DESCRIPTOR *ppWMBoxMemDesc) {
1123     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1124     RmPhysAddr ret;
1125     portMemSet(&ret, 0, sizeof(RmPhysAddr));
1126     return ret;
1127 }
1128 #else //__nvoc_kern_bus_h_disabled
1129 #define kbusSetupMailboxAccess(pGpu, pKernelBus, pGpu1, localPeerId, ppWMBoxMemDesc) kbusSetupMailboxAccess_GM200(pGpu, pKernelBus, pGpu1, localPeerId, ppWMBoxMemDesc)
1130 #endif //__nvoc_kern_bus_h_disabled
1131 
1132 #define kbusSetupMailboxAccess_HAL(pGpu, pKernelBus, pGpu1, localPeerId, ppWMBoxMemDesc) kbusSetupMailboxAccess(pGpu, pKernelBus, pGpu1, localPeerId, ppWMBoxMemDesc)
1133 
1134 void kbusDestroyPeerAccess_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerNum);
1135 
1136 
1137 #ifdef __nvoc_kern_bus_h_disabled
kbusDestroyPeerAccess(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 peerNum)1138 static inline void kbusDestroyPeerAccess(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerNum) {
1139     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1140 }
1141 #else //__nvoc_kern_bus_h_disabled
1142 #define kbusDestroyPeerAccess(pGpu, pKernelBus, peerNum) kbusDestroyPeerAccess_GM200(pGpu, pKernelBus, peerNum)
1143 #endif //__nvoc_kern_bus_h_disabled
1144 
1145 #define kbusDestroyPeerAccess_HAL(pGpu, pKernelBus, peerNum) kbusDestroyPeerAccess(pGpu, pKernelBus, peerNum)
1146 
1147 NvU32 kbusGetPeerIdFromTable_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 locPeerIdx, NvU32 remPeerIdx);
1148 
1149 
1150 #ifdef __nvoc_kern_bus_h_disabled
kbusGetPeerIdFromTable(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 locPeerIdx,NvU32 remPeerIdx)1151 static inline NvU32 kbusGetPeerIdFromTable(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 locPeerIdx, NvU32 remPeerIdx) {
1152     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1153     return 0;
1154 }
1155 #else //__nvoc_kern_bus_h_disabled
1156 #define kbusGetPeerIdFromTable(pGpu, pKernelBus, locPeerIdx, remPeerIdx) kbusGetPeerIdFromTable_GM107(pGpu, pKernelBus, locPeerIdx, remPeerIdx)
1157 #endif //__nvoc_kern_bus_h_disabled
1158 
1159 #define kbusGetPeerIdFromTable_HAL(pGpu, pKernelBus, locPeerIdx, remPeerIdx) kbusGetPeerIdFromTable(pGpu, pKernelBus, locPeerIdx, remPeerIdx)
1160 
1161 NvU32 kbusGetUnusedPeerId_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1162 
1163 
1164 #ifdef __nvoc_kern_bus_h_disabled
kbusGetUnusedPeerId(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1165 static inline NvU32 kbusGetUnusedPeerId(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1166     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1167     return 0;
1168 }
1169 #else //__nvoc_kern_bus_h_disabled
1170 #define kbusGetUnusedPeerId(pGpu, pKernelBus) kbusGetUnusedPeerId_GM107(pGpu, pKernelBus)
1171 #endif //__nvoc_kern_bus_h_disabled
1172 
1173 #define kbusGetUnusedPeerId_HAL(pGpu, pKernelBus) kbusGetUnusedPeerId(pGpu, pKernelBus)
1174 
1175 NV_STATUS kbusReserveP2PPeerIds_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerMask);
1176 
1177 
1178 #ifdef __nvoc_kern_bus_h_disabled
kbusReserveP2PPeerIds(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 peerMask)1179 static inline NV_STATUS kbusReserveP2PPeerIds(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerMask) {
1180     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1181     return NV_ERR_NOT_SUPPORTED;
1182 }
1183 #else //__nvoc_kern_bus_h_disabled
1184 #define kbusReserveP2PPeerIds(pGpu, pKernelBus, peerMask) kbusReserveP2PPeerIds_GM200(pGpu, pKernelBus, peerMask)
1185 #endif //__nvoc_kern_bus_h_disabled
1186 
1187 #define kbusReserveP2PPeerIds_HAL(pGpu, pKernelBus, peerMask) kbusReserveP2PPeerIds(pGpu, pKernelBus, peerMask)
1188 
1189 NV_STATUS kbusCreateP2PMappingForMailbox_GM200(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes);
1190 
1191 
1192 #ifdef __nvoc_kern_bus_h_disabled
kbusCreateP2PMappingForMailbox(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * peer0,NvU32 * peer1,NvU32 attributes)1193 static inline NV_STATUS kbusCreateP2PMappingForMailbox(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes) {
1194     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1195     return NV_ERR_NOT_SUPPORTED;
1196 }
1197 #else //__nvoc_kern_bus_h_disabled
1198 #define kbusCreateP2PMappingForMailbox(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMappingForMailbox_GM200(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1199 #endif //__nvoc_kern_bus_h_disabled
1200 
1201 #define kbusCreateP2PMappingForMailbox_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMappingForMailbox(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1202 
1203 NV_STATUS kbusRemoveP2PMappingForMailbox_GM200(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes);
1204 
1205 
1206 #ifdef __nvoc_kern_bus_h_disabled
kbusRemoveP2PMappingForMailbox(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 peer0,NvU32 peer1,NvU32 attributes)1207 static inline NV_STATUS kbusRemoveP2PMappingForMailbox(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes) {
1208     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1209     return NV_ERR_NOT_SUPPORTED;
1210 }
1211 #else //__nvoc_kern_bus_h_disabled
1212 #define kbusRemoveP2PMappingForMailbox(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMappingForMailbox_GM200(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1213 #endif //__nvoc_kern_bus_h_disabled
1214 
1215 #define kbusRemoveP2PMappingForMailbox_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMappingForMailbox(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1216 
1217 void kbusSetupMailboxes_GM200(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 arg0, NvU32 arg1);
1218 
1219 
1220 #ifdef __nvoc_kern_bus_h_disabled
kbusSetupMailboxes(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 arg0,NvU32 arg1)1221 static inline void kbusSetupMailboxes(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 arg0, NvU32 arg1) {
1222     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1223 }
1224 #else //__nvoc_kern_bus_h_disabled
1225 #define kbusSetupMailboxes(pGpu0, pKernelBus0, pGpu1, pKernelBus1, arg0, arg1) kbusSetupMailboxes_GM200(pGpu0, pKernelBus0, pGpu1, pKernelBus1, arg0, arg1)
1226 #endif //__nvoc_kern_bus_h_disabled
1227 
1228 #define kbusSetupMailboxes_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, arg0, arg1) kbusSetupMailboxes(pGpu0, pKernelBus0, pGpu1, pKernelBus1, arg0, arg1)
1229 
1230 NV_STATUS kbusCreateP2PMappingForNvlink_GP100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes);
1231 
1232 
1233 #ifdef __nvoc_kern_bus_h_disabled
kbusCreateP2PMappingForNvlink(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * peer0,NvU32 * peer1,NvU32 attributes)1234 static inline NV_STATUS kbusCreateP2PMappingForNvlink(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes) {
1235     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1236     return NV_ERR_NOT_SUPPORTED;
1237 }
1238 #else //__nvoc_kern_bus_h_disabled
1239 #define kbusCreateP2PMappingForNvlink(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMappingForNvlink_GP100(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1240 #endif //__nvoc_kern_bus_h_disabled
1241 
1242 #define kbusCreateP2PMappingForNvlink_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusCreateP2PMappingForNvlink(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1243 
1244 NV_STATUS kbusRemoveP2PMappingForNvlink_GP100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes);
1245 
1246 
1247 #ifdef __nvoc_kern_bus_h_disabled
kbusRemoveP2PMappingForNvlink(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 peer0,NvU32 peer1,NvU32 attributes)1248 static inline NV_STATUS kbusRemoveP2PMappingForNvlink(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes) {
1249     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1250     return NV_ERR_NOT_SUPPORTED;
1251 }
1252 #else //__nvoc_kern_bus_h_disabled
1253 #define kbusRemoveP2PMappingForNvlink(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMappingForNvlink_GP100(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1254 #endif //__nvoc_kern_bus_h_disabled
1255 
1256 #define kbusRemoveP2PMappingForNvlink_HAL(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes) kbusRemoveP2PMappingForNvlink(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes)
1257 
1258 NvU32 kbusGetNvlinkPeerNumberMask_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerId);
1259 
1260 
1261 #ifdef __nvoc_kern_bus_h_disabled
kbusGetNvlinkPeerNumberMask(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 peerId)1262 static inline NvU32 kbusGetNvlinkPeerNumberMask(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerId) {
1263     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1264     return 0;
1265 }
1266 #else //__nvoc_kern_bus_h_disabled
1267 #define kbusGetNvlinkPeerNumberMask(pGpu, pKernelBus, peerId) kbusGetNvlinkPeerNumberMask_GP100(pGpu, pKernelBus, peerId)
1268 #endif //__nvoc_kern_bus_h_disabled
1269 
1270 #define kbusGetNvlinkPeerNumberMask_HAL(pGpu, pKernelBus, peerId) kbusGetNvlinkPeerNumberMask(pGpu, pKernelBus, peerId)
1271 
1272 void kbusUnlinkP2P_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBu);
1273 
1274 
1275 #ifdef __nvoc_kern_bus_h_disabled
kbusUnlinkP2P(struct OBJGPU * pGpu,struct KernelBus * pKernelBu)1276 static inline void kbusUnlinkP2P(struct OBJGPU *pGpu, struct KernelBus *pKernelBu) {
1277     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1278 }
1279 #else //__nvoc_kern_bus_h_disabled
1280 #define kbusUnlinkP2P(pGpu, pKernelBu) kbusUnlinkP2P_GM107(pGpu, pKernelBu)
1281 #endif //__nvoc_kern_bus_h_disabled
1282 
1283 #define kbusUnlinkP2P_HAL(pGpu, pKernelBu) kbusUnlinkP2P(pGpu, pKernelBu)
1284 
1285 NV_STATUS kbusFlushSingle_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags);
1286 
1287 
1288 #ifdef __nvoc_kern_bus_h_disabled
kbusFlushSingle(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 flags)1289 static inline NV_STATUS kbusFlushSingle(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags) {
1290     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1291     return NV_ERR_NOT_SUPPORTED;
1292 }
1293 #else //__nvoc_kern_bus_h_disabled
1294 #define kbusFlushSingle(pGpu, pKernelBus, flags) kbusFlushSingle_GV100(pGpu, pKernelBus, flags)
1295 #endif //__nvoc_kern_bus_h_disabled
1296 
1297 #define kbusFlushSingle_HAL(pGpu, pKernelBus, flags) kbusFlushSingle(pGpu, pKernelBus, flags)
1298 
1299 void kbusInitPciBars_GM107(struct KernelBus *pKernelBus);
1300 
1301 
1302 #ifdef __nvoc_kern_bus_h_disabled
kbusInitPciBars(struct KernelBus * pKernelBus)1303 static inline void kbusInitPciBars(struct KernelBus *pKernelBus) {
1304     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1305 }
1306 #else //__nvoc_kern_bus_h_disabled
1307 #define kbusInitPciBars(pKernelBus) kbusInitPciBars_GM107(pKernelBus)
1308 #endif //__nvoc_kern_bus_h_disabled
1309 
1310 #define kbusInitPciBars_HAL(pKernelBus) kbusInitPciBars(pKernelBus)
1311 
1312 NV_STATUS kbusInitBarsBaseInfo_GM107(struct KernelBus *pKernelBus);
1313 
1314 
1315 #ifdef __nvoc_kern_bus_h_disabled
kbusInitBarsBaseInfo(struct KernelBus * pKernelBus)1316 static inline NV_STATUS kbusInitBarsBaseInfo(struct KernelBus *pKernelBus) {
1317     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1318     return NV_ERR_NOT_SUPPORTED;
1319 }
1320 #else //__nvoc_kern_bus_h_disabled
1321 #define kbusInitBarsBaseInfo(pKernelBus) kbusInitBarsBaseInfo_GM107(pKernelBus)
1322 #endif //__nvoc_kern_bus_h_disabled
1323 
1324 #define kbusInitBarsBaseInfo_HAL(pKernelBus) kbusInitBarsBaseInfo(pKernelBus)
1325 
1326 NV_STATUS kbusMemAccessBar0Window_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 physAddr, void *pData, NvU64 accessSize, NvBool bRead, NV_ADDRESS_SPACE addrSpace);
1327 
1328 
1329 #ifdef __nvoc_kern_bus_h_disabled
kbusMemAccessBar0Window(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 physAddr,void * pData,NvU64 accessSize,NvBool bRead,NV_ADDRESS_SPACE addrSpace)1330 static inline NV_STATUS kbusMemAccessBar0Window(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 physAddr, void *pData, NvU64 accessSize, NvBool bRead, NV_ADDRESS_SPACE addrSpace) {
1331     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1332     return NV_ERR_NOT_SUPPORTED;
1333 }
1334 #else //__nvoc_kern_bus_h_disabled
1335 #define kbusMemAccessBar0Window(pGpu, pKernelBus, physAddr, pData, accessSize, bRead, addrSpace) kbusMemAccessBar0Window_GM107(pGpu, pKernelBus, physAddr, pData, accessSize, bRead, addrSpace)
1336 #endif //__nvoc_kern_bus_h_disabled
1337 
1338 #define kbusMemAccessBar0Window_HAL(pGpu, pKernelBus, physAddr, pData, accessSize, bRead, addrSpace) kbusMemAccessBar0Window(pGpu, pKernelBus, physAddr, pData, accessSize, bRead, addrSpace)
1339 
1340 NvU64 kbusGetBAR0WindowAddress_GM107(struct KernelBus *pKernelBus);
1341 
1342 
1343 #ifdef __nvoc_kern_bus_h_disabled
kbusGetBAR0WindowAddress(struct KernelBus * pKernelBus)1344 static inline NvU64 kbusGetBAR0WindowAddress(struct KernelBus *pKernelBus) {
1345     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1346     return 0;
1347 }
1348 #else //__nvoc_kern_bus_h_disabled
1349 #define kbusGetBAR0WindowAddress(pKernelBus) kbusGetBAR0WindowAddress_GM107(pKernelBus)
1350 #endif //__nvoc_kern_bus_h_disabled
1351 
1352 #define kbusGetBAR0WindowAddress_HAL(pKernelBus) kbusGetBAR0WindowAddress(pKernelBus)
1353 
1354 NV_STATUS kbusInitBar2_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1355 
1356 
1357 #ifdef __nvoc_kern_bus_h_disabled
kbusInitBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1358 static inline NV_STATUS kbusInitBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1359     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1360     return NV_ERR_NOT_SUPPORTED;
1361 }
1362 #else //__nvoc_kern_bus_h_disabled
1363 #define kbusInitBar2(pGpu, pKernelBus, gfid) kbusInitBar2_GM107(pGpu, pKernelBus, gfid)
1364 #endif //__nvoc_kern_bus_h_disabled
1365 
1366 #define kbusInitBar2_HAL(pGpu, pKernelBus, gfid) kbusInitBar2(pGpu, pKernelBus, gfid)
1367 
1368 NV_STATUS kbusRestoreBar2_GM107(struct KernelBus *pKernelBus, NvU32 flags);
1369 
1370 
1371 #ifdef __nvoc_kern_bus_h_disabled
kbusRestoreBar2(struct KernelBus * pKernelBus,NvU32 flags)1372 static inline NV_STATUS kbusRestoreBar2(struct KernelBus *pKernelBus, NvU32 flags) {
1373     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1374     return NV_ERR_NOT_SUPPORTED;
1375 }
1376 #else //__nvoc_kern_bus_h_disabled
1377 #define kbusRestoreBar2(pKernelBus, flags) kbusRestoreBar2_GM107(pKernelBus, flags)
1378 #endif //__nvoc_kern_bus_h_disabled
1379 
1380 #define kbusRestoreBar2_HAL(pKernelBus, flags) kbusRestoreBar2(pKernelBus, flags)
1381 
1382 NV_STATUS kbusDestroyBar2_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1383 
1384 
1385 #ifdef __nvoc_kern_bus_h_disabled
kbusDestroyBar2(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1386 static inline NV_STATUS kbusDestroyBar2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1387     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1388     return NV_ERR_NOT_SUPPORTED;
1389 }
1390 #else //__nvoc_kern_bus_h_disabled
1391 #define kbusDestroyBar2(pGpu, pKernelBus, gfid) kbusDestroyBar2_GM107(pGpu, pKernelBus, gfid)
1392 #endif //__nvoc_kern_bus_h_disabled
1393 
1394 #define kbusDestroyBar2_HAL(pGpu, pKernelBus, gfid) kbusDestroyBar2(pGpu, pKernelBus, gfid)
1395 
1396 NV_STATUS kbusInitBar1_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1397 
1398 
1399 #ifdef __nvoc_kern_bus_h_disabled
kbusInitBar1(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1400 static inline NV_STATUS kbusInitBar1(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1401     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1402     return NV_ERR_NOT_SUPPORTED;
1403 }
1404 #else //__nvoc_kern_bus_h_disabled
1405 #define kbusInitBar1(pGpu, pKernelBus, gfid) kbusInitBar1_GM107(pGpu, pKernelBus, gfid)
1406 #endif //__nvoc_kern_bus_h_disabled
1407 
1408 #define kbusInitBar1_HAL(pGpu, pKernelBus, gfid) kbusInitBar1(pGpu, pKernelBus, gfid)
1409 
1410 NV_STATUS kbusDestroyBar1_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1411 
1412 
1413 #ifdef __nvoc_kern_bus_h_disabled
kbusDestroyBar1(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1414 static inline NV_STATUS kbusDestroyBar1(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1415     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1416     return NV_ERR_NOT_SUPPORTED;
1417 }
1418 #else //__nvoc_kern_bus_h_disabled
1419 #define kbusDestroyBar1(pGpu, pKernelBus, gfid) kbusDestroyBar1_GM107(pGpu, pKernelBus, gfid)
1420 #endif //__nvoc_kern_bus_h_disabled
1421 
1422 #define kbusDestroyBar1_HAL(pGpu, pKernelBus, gfid) kbusDestroyBar1(pGpu, pKernelBus, gfid)
1423 
1424 NV_STATUS kbusMapFbAperture_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 offset, NvU64 *pAperOffset, NvU64 *pLength, NvU32 flags, struct Device *pDevice);
1425 
1426 
1427 #ifdef __nvoc_kern_bus_h_disabled
kbusMapFbAperture(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0,NvU64 offset,NvU64 * pAperOffset,NvU64 * pLength,NvU32 flags,struct Device * pDevice)1428 static inline NV_STATUS kbusMapFbAperture(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 offset, NvU64 *pAperOffset, NvU64 *pLength, NvU32 flags, struct Device *pDevice) {
1429     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1430     return NV_ERR_NOT_SUPPORTED;
1431 }
1432 #else //__nvoc_kern_bus_h_disabled
1433 #define kbusMapFbAperture(pGpu, pKernelBus, arg0, offset, pAperOffset, pLength, flags, pDevice) kbusMapFbAperture_GM107(pGpu, pKernelBus, arg0, offset, pAperOffset, pLength, flags, pDevice)
1434 #endif //__nvoc_kern_bus_h_disabled
1435 
1436 #define kbusMapFbAperture_HAL(pGpu, pKernelBus, arg0, offset, pAperOffset, pLength, flags, pDevice) kbusMapFbAperture(pGpu, pKernelBus, arg0, offset, pAperOffset, pLength, flags, pDevice)
1437 
1438 NV_STATUS kbusUnmapFbAperture_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 aperOffset, NvU64 length, NvU32 flags);
1439 
1440 
1441 #ifdef __nvoc_kern_bus_h_disabled
kbusUnmapFbAperture(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0,NvU64 aperOffset,NvU64 length,NvU32 flags)1442 static inline NV_STATUS kbusUnmapFbAperture(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU64 aperOffset, NvU64 length, NvU32 flags) {
1443     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1444     return NV_ERR_NOT_SUPPORTED;
1445 }
1446 #else //__nvoc_kern_bus_h_disabled
1447 #define kbusUnmapFbAperture(pGpu, pKernelBus, arg0, aperOffset, length, flags) kbusUnmapFbAperture_GM107(pGpu, pKernelBus, arg0, aperOffset, length, flags)
1448 #endif //__nvoc_kern_bus_h_disabled
1449 
1450 #define kbusUnmapFbAperture_HAL(pGpu, pKernelBus, arg0, aperOffset, length, flags) kbusUnmapFbAperture(pGpu, pKernelBus, arg0, aperOffset, length, flags)
1451 
1452 void kbusReleaseRmAperture_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0);
1453 
1454 
1455 #ifdef __nvoc_kern_bus_h_disabled
kbusReleaseRmAperture(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0)1456 static inline void kbusReleaseRmAperture(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0) {
1457     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1458 }
1459 #else //__nvoc_kern_bus_h_disabled
1460 #define kbusReleaseRmAperture(pGpu, pKernelBus, arg0) kbusReleaseRmAperture_VBAR2(pGpu, pKernelBus, arg0)
1461 #endif //__nvoc_kern_bus_h_disabled
1462 
1463 #define kbusReleaseRmAperture_HAL(pGpu, pKernelBus, arg0) kbusReleaseRmAperture(pGpu, pKernelBus, arg0)
1464 
1465 struct OBJVASPACE *kbusGetBar1VASpace_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1466 
1467 
1468 #ifdef __nvoc_kern_bus_h_disabled
kbusGetBar1VASpace(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1469 static inline struct OBJVASPACE *kbusGetBar1VASpace(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1470     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1471     return NULL;
1472 }
1473 #else //__nvoc_kern_bus_h_disabled
1474 #define kbusGetBar1VASpace(pGpu, pKernelBus) kbusGetBar1VASpace_GM107(pGpu, pKernelBus)
1475 #endif //__nvoc_kern_bus_h_disabled
1476 
1477 #define kbusGetBar1VASpace_HAL(pGpu, pKernelBus) kbusGetBar1VASpace(pGpu, pKernelBus)
1478 
1479 NvBool kbusCheckEngine_KERNEL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, ENGDESCRIPTOR desc);
1480 
1481 
1482 #ifdef __nvoc_kern_bus_h_disabled
kbusCheckEngine(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,ENGDESCRIPTOR desc)1483 static inline NvBool kbusCheckEngine(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, ENGDESCRIPTOR desc) {
1484     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1485     return NV_FALSE;
1486 }
1487 #else //__nvoc_kern_bus_h_disabled
1488 #define kbusCheckEngine(pGpu, pKernelBus, desc) kbusCheckEngine_KERNEL(pGpu, pKernelBus, desc)
1489 #endif //__nvoc_kern_bus_h_disabled
1490 
1491 #define kbusCheckEngine_HAL(pGpu, pKernelBus, desc) kbusCheckEngine(pGpu, pKernelBus, desc)
1492 
1493 NvBool kbusCheckEngineWithOrderList_KERNEL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, ENGDESCRIPTOR desc, NvBool bCheckEngineOrder);
1494 
1495 
1496 #ifdef __nvoc_kern_bus_h_disabled
kbusCheckEngineWithOrderList(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,ENGDESCRIPTOR desc,NvBool bCheckEngineOrder)1497 static inline NvBool kbusCheckEngineWithOrderList(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, ENGDESCRIPTOR desc, NvBool bCheckEngineOrder) {
1498     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1499     return NV_FALSE;
1500 }
1501 #else //__nvoc_kern_bus_h_disabled
1502 #define kbusCheckEngineWithOrderList(pGpu, pKernelBus, desc, bCheckEngineOrder) kbusCheckEngineWithOrderList_KERNEL(pGpu, pKernelBus, desc, bCheckEngineOrder)
1503 #endif //__nvoc_kern_bus_h_disabled
1504 
1505 #define kbusCheckEngineWithOrderList_HAL(pGpu, pKernelBus, desc, bCheckEngineOrder) kbusCheckEngineWithOrderList(pGpu, pKernelBus, desc, bCheckEngineOrder)
1506 
1507 NV_STATUS kbusFlush_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags);
1508 
1509 
1510 #ifdef __nvoc_kern_bus_h_disabled
kbusFlush(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 flags)1511 static inline NV_STATUS kbusFlush(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags) {
1512     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1513     return NV_ERR_NOT_SUPPORTED;
1514 }
1515 #else //__nvoc_kern_bus_h_disabled
1516 #define kbusFlush(pGpu, pKernelBus, flags) kbusFlush_GM107(pGpu, pKernelBus, flags)
1517 #endif //__nvoc_kern_bus_h_disabled
1518 
1519 #define kbusFlush_HAL(pGpu, pKernelBus, flags) kbusFlush(pGpu, pKernelBus, flags)
1520 
kbusTeardownCoherentCpuMappingAcr_b3696a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1521 static inline void kbusTeardownCoherentCpuMappingAcr_b3696a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1522     return;
1523 }
1524 
1525 
1526 #ifdef __nvoc_kern_bus_h_disabled
kbusTeardownCoherentCpuMappingAcr(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1527 static inline void kbusTeardownCoherentCpuMappingAcr(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1528     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
1529 }
1530 #else //__nvoc_kern_bus_h_disabled
1531 #define kbusTeardownCoherentCpuMappingAcr(pGpu, pKernelBus) kbusTeardownCoherentCpuMappingAcr_b3696a(pGpu, pKernelBus)
1532 #endif //__nvoc_kern_bus_h_disabled
1533 
1534 #define kbusTeardownCoherentCpuMappingAcr_HAL(pGpu, pKernelBus) kbusTeardownCoherentCpuMappingAcr(pGpu, pKernelBus)
1535 
1536 NV_STATUS kbusInitBarsSize_VGPUSTUB(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1537 
1538 NV_STATUS kbusInitBarsSize_KERNEL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1539 
kbusInitBarsSize_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1540 static inline NV_STATUS kbusInitBarsSize_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1541     return pKernelBus->__kbusInitBarsSize__(pGpu, pKernelBus);
1542 }
1543 
1544 NV_STATUS kbusConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, ENGDESCRIPTOR arg0);
1545 
kbusConstructEngine_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,ENGDESCRIPTOR arg0)1546 static inline NV_STATUS kbusConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, ENGDESCRIPTOR arg0) {
1547     return pKernelBus->__kbusConstructEngine__(pGpu, pKernelBus, arg0);
1548 }
1549 
1550 NV_STATUS kbusStatePreInitLocked_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1551 
kbusStatePreInitLocked_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1552 static inline NV_STATUS kbusStatePreInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1553     return pKernelBus->__kbusStatePreInitLocked__(pGpu, pKernelBus);
1554 }
1555 
1556 NV_STATUS kbusStateInitLocked_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1557 
kbusStateInitLocked_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1558 static inline NV_STATUS kbusStateInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1559     return pKernelBus->__kbusStateInitLocked__(pGpu, pKernelBus);
1560 }
1561 
1562 NV_STATUS kbusStatePreLoad_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0);
1563 
kbusStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)1564 static inline NV_STATUS kbusStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
1565     return pKernelBus->__kbusStatePreLoad__(pGpu, pKernelBus, arg0);
1566 }
1567 
1568 NV_STATUS kbusStateLoad_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0);
1569 
kbusStateLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)1570 static inline NV_STATUS kbusStateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
1571     return pKernelBus->__kbusStateLoad__(pGpu, pKernelBus, arg0);
1572 }
1573 
1574 NV_STATUS kbusStatePostLoad_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0);
1575 
kbusStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)1576 static inline NV_STATUS kbusStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
1577     return pKernelBus->__kbusStatePostLoad__(pGpu, pKernelBus, arg0);
1578 }
1579 
1580 NV_STATUS kbusStatePreUnload_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0);
1581 
kbusStatePreUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)1582 static inline NV_STATUS kbusStatePreUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
1583     return pKernelBus->__kbusStatePreUnload__(pGpu, pKernelBus, arg0);
1584 }
1585 
1586 NV_STATUS kbusStateUnload_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags);
1587 
kbusStateUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 flags)1588 static inline NV_STATUS kbusStateUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags) {
1589     return pKernelBus->__kbusStateUnload__(pGpu, pKernelBus, flags);
1590 }
1591 
1592 NV_STATUS kbusStatePostUnload_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags);
1593 
kbusStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 flags)1594 static inline NV_STATUS kbusStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags) {
1595     return pKernelBus->__kbusStatePostUnload__(pGpu, pKernelBus, flags);
1596 }
1597 
1598 void kbusStateDestroy_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1599 
kbusStateDestroy_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1600 static inline void kbusStateDestroy_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1601     pKernelBus->__kbusStateDestroy__(pGpu, pKernelBus);
1602 }
1603 
1604 NvU8 *kbusMapBar2Aperture_VBAR2_SRIOV(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU32 transfer_flags);
1605 
1606 NvU8 *kbusMapBar2Aperture_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU32 transfer_flags);
1607 
kbusMapBar2Aperture_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU32 transfer_flags)1608 static inline NvU8 *kbusMapBar2Aperture_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU32 transfer_flags) {
1609     return pKernelBus->__kbusMapBar2Aperture__(pGpu, pKernelBus, pMemDesc, transfer_flags);
1610 }
1611 
1612 NvU8 *kbusValidateBar2ApertureMapping_VBAR2_SRIOV(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU8 *p);
1613 
1614 NvU8 *kbusValidateBar2ApertureMapping_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU8 *p);
1615 
kbusValidateBar2ApertureMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU8 * p)1616 static inline NvU8 *kbusValidateBar2ApertureMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU8 *p) {
1617     return pKernelBus->__kbusValidateBar2ApertureMapping__(pGpu, pKernelBus, pMemDesc, p);
1618 }
1619 
1620 void kbusUnmapBar2ApertureWithFlags_VBAR2_SRIOV(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU8 **pCpuPtr, NvU32 flags);
1621 
1622 void kbusUnmapBar2ApertureWithFlags_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU8 **pCpuPtr, NvU32 flags);
1623 
kbusUnmapBar2ApertureWithFlags_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU8 ** pCpuPtr,NvU32 flags)1624 static inline void kbusUnmapBar2ApertureWithFlags_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU8 **pCpuPtr, NvU32 flags) {
1625     pKernelBus->__kbusUnmapBar2ApertureWithFlags__(pGpu, pKernelBus, pMemDesc, pCpuPtr, flags);
1626 }
1627 
1628 NvU64 kbusGetVaLimitForBar2_FWCLIENT(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1629 
1630 NvU64 kbusGetVaLimitForBar2_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1631 
kbusGetVaLimitForBar2_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1632 static inline NvU64 kbusGetVaLimitForBar2_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1633     return pKernelBus->__kbusGetVaLimitForBar2__(pGpu, pKernelBus);
1634 }
1635 
kbusCalcCpuInvisibleBar2Range_f2d351(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1636 static inline void kbusCalcCpuInvisibleBar2Range_f2d351(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1637     NV_ASSERT_PRECOMP(0);
1638 }
1639 
1640 void kbusCalcCpuInvisibleBar2Range_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1641 
kbusCalcCpuInvisibleBar2Range_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1642 static inline void kbusCalcCpuInvisibleBar2Range_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1643     pKernelBus->__kbusCalcCpuInvisibleBar2Range__(pGpu, pKernelBus, gfid);
1644 }
1645 
kbusCalcCpuInvisibleBar2ApertureSize_13cd8d(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1646 static inline NvU32 kbusCalcCpuInvisibleBar2ApertureSize_13cd8d(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1647     NV_ASSERT_PRECOMP(0);
1648     return 0;
1649 }
1650 
1651 NvU32 kbusCalcCpuInvisibleBar2ApertureSize_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1652 
kbusCalcCpuInvisibleBar2ApertureSize_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1653 static inline NvU32 kbusCalcCpuInvisibleBar2ApertureSize_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1654     return pKernelBus->__kbusCalcCpuInvisibleBar2ApertureSize__(pGpu, pKernelBus);
1655 }
1656 
1657 NV_STATUS kbusCommitBar2_KERNEL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags);
1658 
1659 NV_STATUS kbusCommitBar2_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags);
1660 
kbusCommitBar2_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 flags)1661 static inline NV_STATUS kbusCommitBar2_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 flags) {
1662     return pKernelBus->__kbusCommitBar2__(pGpu, pKernelBus, flags);
1663 }
1664 
kbusRewritePTEsForExistingMapping_92bfc3(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc)1665 static inline NV_STATUS kbusRewritePTEsForExistingMapping_92bfc3(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc) {
1666     NV_ASSERT_PRECOMP(0);
1667     return NV_ERR_NOT_SUPPORTED;
1668 }
1669 
1670 NV_STATUS kbusRewritePTEsForExistingMapping_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc);
1671 
kbusRewritePTEsForExistingMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc)1672 static inline NV_STATUS kbusRewritePTEsForExistingMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc) {
1673     return pKernelBus->__kbusRewritePTEsForExistingMapping__(pGpu, pKernelBus, pMemDesc);
1674 }
1675 
1676 NV_STATUS kbusPatchBar1Pdb_GSPCLIENT(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1677 
kbusPatchBar1Pdb_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1678 static inline NV_STATUS kbusPatchBar1Pdb_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1679     return NV_OK;
1680 }
1681 
kbusPatchBar1Pdb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1682 static inline NV_STATUS kbusPatchBar1Pdb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1683     return pKernelBus->__kbusPatchBar1Pdb__(pGpu, pKernelBus);
1684 }
1685 
1686 NV_STATUS kbusPatchBar2Pdb_GSPCLIENT(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1687 
kbusPatchBar2Pdb_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1688 static inline NV_STATUS kbusPatchBar2Pdb_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1689     return NV_OK;
1690 }
1691 
kbusPatchBar2Pdb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1692 static inline NV_STATUS kbusPatchBar2Pdb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1693     return pKernelBus->__kbusPatchBar2Pdb__(pGpu, pKernelBus);
1694 }
1695 
kbusConstructVirtualBar2CpuInvisibleHeap_56cd7a(struct KernelBus * pKernelBus,NvU32 gfid)1696 static inline NV_STATUS kbusConstructVirtualBar2CpuInvisibleHeap_56cd7a(struct KernelBus *pKernelBus, NvU32 gfid) {
1697     return NV_OK;
1698 }
1699 
1700 NV_STATUS kbusConstructVirtualBar2CpuInvisibleHeap_VBAR2(struct KernelBus *pKernelBus, NvU32 gfid);
1701 
kbusConstructVirtualBar2CpuInvisibleHeap_DISPATCH(struct KernelBus * pKernelBus,NvU32 gfid)1702 static inline NV_STATUS kbusConstructVirtualBar2CpuInvisibleHeap_DISPATCH(struct KernelBus *pKernelBus, NvU32 gfid) {
1703     return pKernelBus->__kbusConstructVirtualBar2CpuInvisibleHeap__(pKernelBus, gfid);
1704 }
1705 
kbusMapCpuInvisibleBar2Aperture_46f6a7(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 * pVaddr,NvU64 allocSize,NvU32 allocFlags,NvU32 gfid)1706 static inline NV_STATUS kbusMapCpuInvisibleBar2Aperture_46f6a7(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 *pVaddr, NvU64 allocSize, NvU32 allocFlags, NvU32 gfid) {
1707     return NV_ERR_NOT_SUPPORTED;
1708 }
1709 
1710 NV_STATUS kbusMapCpuInvisibleBar2Aperture_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 *pVaddr, NvU64 allocSize, NvU32 allocFlags, NvU32 gfid);
1711 
kbusMapCpuInvisibleBar2Aperture_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 * pVaddr,NvU64 allocSize,NvU32 allocFlags,NvU32 gfid)1712 static inline NV_STATUS kbusMapCpuInvisibleBar2Aperture_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 *pVaddr, NvU64 allocSize, NvU32 allocFlags, NvU32 gfid) {
1713     return pKernelBus->__kbusMapCpuInvisibleBar2Aperture__(pGpu, pKernelBus, pMemDesc, pVaddr, allocSize, allocFlags, gfid);
1714 }
1715 
kbusUnmapCpuInvisibleBar2Aperture_b3696a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 vAddr,NvU32 gfid)1716 static inline void kbusUnmapCpuInvisibleBar2Aperture_b3696a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 vAddr, NvU32 gfid) {
1717     return;
1718 }
1719 
1720 void kbusUnmapCpuInvisibleBar2Aperture_VBAR2(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 vAddr, NvU32 gfid);
1721 
kbusUnmapCpuInvisibleBar2Aperture_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 vAddr,NvU32 gfid)1722 static inline void kbusUnmapCpuInvisibleBar2Aperture_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 vAddr, NvU32 gfid) {
1723     pKernelBus->__kbusUnmapCpuInvisibleBar2Aperture__(pGpu, pKernelBus, pMemDesc, vAddr, gfid);
1724 }
1725 
1726 NV_STATUS kbusTeardownBar2CpuAperture_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1727 
1728 NV_STATUS kbusTeardownBar2CpuAperture_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1729 
kbusTeardownBar2CpuAperture_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1730 static inline NV_STATUS kbusTeardownBar2CpuAperture_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1731     return pKernelBus->__kbusTeardownBar2CpuAperture__(pGpu, pKernelBus, gfid);
1732 }
1733 
1734 NvU32 kbusGetP2PWriteMailboxAddressSize_GH100(struct OBJGPU *pGpu);
1735 
kbusGetP2PWriteMailboxAddressSize_474d46(struct OBJGPU * pGpu)1736 static inline NvU32 kbusGetP2PWriteMailboxAddressSize_474d46(struct OBJGPU *pGpu) {
1737     NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1738 }
1739 
1740 NvU32 kbusGetP2PWriteMailboxAddressSize_STATIC_DISPATCH(struct OBJGPU *pGpu);
1741 
1742 void kbusGetP2PMailboxAttributes_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 *pMailboxAreaSize, NvU32 *pMailboxAlignmentSize, NvU32 *pMailboxMaxOffset64KB);
1743 
1744 void kbusGetP2PMailboxAttributes_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 *pMailboxAreaSize, NvU32 *pMailboxAlignmentSize, NvU32 *pMailboxMaxOffset64KB);
1745 
kbusGetP2PMailboxAttributes_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 * pMailboxAreaSize,NvU32 * pMailboxAlignmentSize,NvU32 * pMailboxMaxOffset64KB)1746 static inline void kbusGetP2PMailboxAttributes_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 *pMailboxAreaSize, NvU32 *pMailboxAlignmentSize, NvU32 *pMailboxMaxOffset64KB) {
1747     pKernelBus->__kbusGetP2PMailboxAttributes__(pGpu, pKernelBus, pMailboxAreaSize, pMailboxAlignmentSize, pMailboxMaxOffset64KB);
1748 }
1749 
1750 NV_STATUS kbusCreateP2PMapping_GP100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes);
1751 
1752 NV_STATUS kbusCreateP2PMapping_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes);
1753 
kbusCreateP2PMapping_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * peer0,NvU32 * peer1,NvU32 attributes)1754 static inline NV_STATUS kbusCreateP2PMapping_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes) {
1755     return pKernelBus0->__kbusCreateP2PMapping__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes);
1756 }
1757 
1758 NV_STATUS kbusRemoveP2PMapping_GP100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes);
1759 
1760 NV_STATUS kbusRemoveP2PMapping_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes);
1761 
kbusRemoveP2PMapping_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 peer0,NvU32 peer1,NvU32 attributes)1762 static inline NV_STATUS kbusRemoveP2PMapping_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes) {
1763     return pKernelBus0->__kbusRemoveP2PMapping__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes);
1764 }
1765 
1766 NvU32 kbusGetEgmPeerId_GH100(struct OBJGPU *pLocalGpu, struct KernelBus *pLocalKernelBus, struct OBJGPU *pRemoteGpu);
1767 
kbusGetEgmPeerId_56cd7a(struct OBJGPU * pLocalGpu,struct KernelBus * pLocalKernelBus,struct OBJGPU * pRemoteGpu)1768 static inline NvU32 kbusGetEgmPeerId_56cd7a(struct OBJGPU *pLocalGpu, struct KernelBus *pLocalKernelBus, struct OBJGPU *pRemoteGpu) {
1769     return NV_OK;
1770 }
1771 
kbusGetEgmPeerId_DISPATCH(struct OBJGPU * pLocalGpu,struct KernelBus * pLocalKernelBus,struct OBJGPU * pRemoteGpu)1772 static inline NvU32 kbusGetEgmPeerId_DISPATCH(struct OBJGPU *pLocalGpu, struct KernelBus *pLocalKernelBus, struct OBJGPU *pRemoteGpu) {
1773     return pLocalKernelBus->__kbusGetEgmPeerId__(pLocalGpu, pLocalKernelBus, pRemoteGpu);
1774 }
1775 
1776 NvU32 kbusGetPeerId_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pPeerGpu);
1777 
1778 NvU32 kbusGetPeerId_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pPeerGpu);
1779 
kbusGetPeerId_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJGPU * pPeerGpu)1780 static inline NvU32 kbusGetPeerId_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pPeerGpu) {
1781     return pKernelBus->__kbusGetPeerId__(pGpu, pKernelBus, pPeerGpu);
1782 }
1783 
1784 NvU32 kbusGetNvlinkPeerId_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pPeerGpu);
1785 
kbusGetNvlinkPeerId_c732fb(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJGPU * pPeerGpu)1786 static inline NvU32 kbusGetNvlinkPeerId_c732fb(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pPeerGpu) {
1787     return 4294967295U;
1788 }
1789 
kbusGetNvlinkPeerId_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJGPU * pPeerGpu)1790 static inline NvU32 kbusGetNvlinkPeerId_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pPeerGpu) {
1791     return pKernelBus->__kbusGetNvlinkPeerId__(pGpu, pKernelBus, pPeerGpu);
1792 }
1793 
1794 NvU32 kbusGetNvSwitchPeerId_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1795 
kbusGetNvSwitchPeerId_c732fb(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1796 static inline NvU32 kbusGetNvSwitchPeerId_c732fb(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1797     return 4294967295U;
1798 }
1799 
kbusGetNvSwitchPeerId_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1800 static inline NvU32 kbusGetNvSwitchPeerId_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1801     return pKernelBus->__kbusGetNvSwitchPeerId__(pGpu, pKernelBus);
1802 }
1803 
1804 NvU32 kbusGetUnusedPciePeerId_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1805 
1806 NvU32 kbusGetUnusedPciePeerId_TU102(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
1807 
kbusGetUnusedPciePeerId_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)1808 static inline NvU32 kbusGetUnusedPciePeerId_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
1809     return pKernelBus->__kbusGetUnusedPciePeerId__(pGpu, pKernelBus);
1810 }
1811 
1812 NV_STATUS kbusIsPeerIdValid_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerId);
1813 
1814 NV_STATUS kbusIsPeerIdValid_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerId);
1815 
kbusIsPeerIdValid_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 peerId)1816 static inline NV_STATUS kbusIsPeerIdValid_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerId) {
1817     return pKernelBus->__kbusIsPeerIdValid__(pGpu, pKernelBus, peerId);
1818 }
1819 
1820 NV_STATUS kbusGetNvlinkP2PPeerId_VGPU(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *nvlinkPeer, NvU32 attributes);
1821 
1822 NV_STATUS kbusGetNvlinkP2PPeerId_GP100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *nvlinkPeer, NvU32 attributes);
1823 
1824 NV_STATUS kbusGetNvlinkP2PPeerId_GA100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *nvlinkPeer, NvU32 attributes);
1825 
kbusGetNvlinkP2PPeerId_56cd7a(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * nvlinkPeer,NvU32 attributes)1826 static inline NV_STATUS kbusGetNvlinkP2PPeerId_56cd7a(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *nvlinkPeer, NvU32 attributes) {
1827     return NV_OK;
1828 }
1829 
kbusGetNvlinkP2PPeerId_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * nvlinkPeer,NvU32 attributes)1830 static inline NV_STATUS kbusGetNvlinkP2PPeerId_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *nvlinkPeer, NvU32 attributes) {
1831     return pKernelBus0->__kbusGetNvlinkP2PPeerId__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, nvlinkPeer, attributes);
1832 }
1833 
1834 void kbusWriteP2PWmbTag_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 remote2Local, NvU64 p2pWmbTag);
1835 
1836 void kbusWriteP2PWmbTag_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 remote2Local, NvU64 p2pWmbTag);
1837 
kbusWriteP2PWmbTag_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 remote2Local,NvU64 p2pWmbTag)1838 static inline void kbusWriteP2PWmbTag_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 remote2Local, NvU64 p2pWmbTag) {
1839     pKernelBus->__kbusWriteP2PWmbTag__(pGpu, pKernelBus, remote2Local, p2pWmbTag);
1840 }
1841 
1842 RmPhysAddr kbusSetupP2PDomainAccess_GM200(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, PMEMORY_DESCRIPTOR *ppP2PDomMemDesc);
1843 
1844 RmPhysAddr kbusSetupP2PDomainAccess_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, PMEMORY_DESCRIPTOR *ppP2PDomMemDesc);
1845 
kbusSetupP2PDomainAccess_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,PMEMORY_DESCRIPTOR * ppP2PDomMemDesc)1846 static inline RmPhysAddr kbusSetupP2PDomainAccess_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, PMEMORY_DESCRIPTOR *ppP2PDomMemDesc) {
1847     return pKernelBus0->__kbusSetupP2PDomainAccess__(pGpu0, pKernelBus0, pGpu1, ppP2PDomMemDesc);
1848 }
1849 
1850 NvBool kbusNeedWarForBug999673_GM200(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pRemoteGpu);
1851 
kbusNeedWarForBug999673_491d52(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJGPU * pRemoteGpu)1852 static inline NvBool kbusNeedWarForBug999673_491d52(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pRemoteGpu) {
1853     return ((NvBool)(0 != 0));
1854 }
1855 
kbusNeedWarForBug999673_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJGPU * pRemoteGpu)1856 static inline NvBool kbusNeedWarForBug999673_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJGPU *pRemoteGpu) {
1857     return pKernelBus->__kbusNeedWarForBug999673__(pGpu, pKernelBus, pRemoteGpu);
1858 }
1859 
1860 NV_STATUS kbusCreateP2PMappingForC2C_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes);
1861 
kbusCreateP2PMappingForC2C_46f6a7(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * peer0,NvU32 * peer1,NvU32 attributes)1862 static inline NV_STATUS kbusCreateP2PMappingForC2C_46f6a7(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes) {
1863     return NV_ERR_NOT_SUPPORTED;
1864 }
1865 
kbusCreateP2PMappingForC2C_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 * peer0,NvU32 * peer1,NvU32 attributes)1866 static inline NV_STATUS kbusCreateP2PMappingForC2C_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 *peer0, NvU32 *peer1, NvU32 attributes) {
1867     return pKernelBus0->__kbusCreateP2PMappingForC2C__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes);
1868 }
1869 
1870 NV_STATUS kbusRemoveP2PMappingForC2C_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes);
1871 
kbusRemoveP2PMappingForC2C_46f6a7(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 peer0,NvU32 peer1,NvU32 attributes)1872 static inline NV_STATUS kbusRemoveP2PMappingForC2C_46f6a7(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes) {
1873     return NV_ERR_NOT_SUPPORTED;
1874 }
1875 
kbusRemoveP2PMappingForC2C_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 peer0,NvU32 peer1,NvU32 attributes)1876 static inline NV_STATUS kbusRemoveP2PMappingForC2C_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 peer0, NvU32 peer1, NvU32 attributes) {
1877     return pKernelBus0->__kbusRemoveP2PMappingForC2C__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, peer0, peer1, attributes);
1878 }
1879 
1880 NV_STATUS kbusUnreserveP2PPeerIds_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerMask);
1881 
kbusUnreserveP2PPeerIds_46f6a7(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 peerMask)1882 static inline NV_STATUS kbusUnreserveP2PPeerIds_46f6a7(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerMask) {
1883     return NV_ERR_NOT_SUPPORTED;
1884 }
1885 
kbusUnreserveP2PPeerIds_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 peerMask)1886 static inline NV_STATUS kbusUnreserveP2PPeerIds_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 peerMask) {
1887     return pKernelBus->__kbusUnreserveP2PPeerIds__(pGpu, pKernelBus, peerMask);
1888 }
1889 
1890 NvBool kbusIsBar1P2PCapable_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1891 
kbusIsBar1P2PCapable_bf6dfa(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1892 static inline NvBool kbusIsBar1P2PCapable_bf6dfa(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1893     return ((NvBool)(0 != 0));
1894 }
1895 
kbusIsBar1P2PCapable_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1896 static inline NvBool kbusIsBar1P2PCapable_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1897     return pKernelBus->__kbusIsBar1P2PCapable__(pGpu, pKernelBus, gfid);
1898 }
1899 
1900 NV_STATUS kbusEnableStaticBar1Mapping_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1901 
kbusEnableStaticBar1Mapping_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1902 static inline NV_STATUS kbusEnableStaticBar1Mapping_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1903     return NV_ERR_NOT_SUPPORTED;
1904 }
1905 
kbusEnableStaticBar1Mapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1906 static inline NV_STATUS kbusEnableStaticBar1Mapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1907     return pKernelBus->__kbusEnableStaticBar1Mapping__(pGpu, pKernelBus, gfid);
1908 }
1909 
kbusDisableStaticBar1Mapping_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1910 static inline void kbusDisableStaticBar1Mapping_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1911     return;
1912 }
1913 
1914 void kbusDisableStaticBar1Mapping_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
1915 
kbusDisableStaticBar1Mapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)1916 static inline void kbusDisableStaticBar1Mapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
1917     pKernelBus->__kbusDisableStaticBar1Mapping__(pGpu, pKernelBus, gfid);
1918 }
1919 
1920 NV_STATUS kbusGetBar1P2PDmaInfo_GH100(struct OBJGPU *pSrcGpu, struct OBJGPU *pPeerGpu, struct KernelBus *pPeerKernelBus, NvU64 *dma_addr, NvU64 *dma_size);
1921 
kbusGetBar1P2PDmaInfo_395e98(struct OBJGPU * pSrcGpu,struct OBJGPU * pPeerGpu,struct KernelBus * pPeerKernelBus,NvU64 * dma_addr,NvU64 * dma_size)1922 static inline NV_STATUS kbusGetBar1P2PDmaInfo_395e98(struct OBJGPU *pSrcGpu, struct OBJGPU *pPeerGpu, struct KernelBus *pPeerKernelBus, NvU64 *dma_addr, NvU64 *dma_size) {
1923     return NV_ERR_NOT_SUPPORTED;
1924 }
1925 
kbusGetBar1P2PDmaInfo_DISPATCH(struct OBJGPU * pSrcGpu,struct OBJGPU * pPeerGpu,struct KernelBus * pPeerKernelBus,NvU64 * dma_addr,NvU64 * dma_size)1926 static inline NV_STATUS kbusGetBar1P2PDmaInfo_DISPATCH(struct OBJGPU *pSrcGpu, struct OBJGPU *pPeerGpu, struct KernelBus *pPeerKernelBus, NvU64 *dma_addr, NvU64 *dma_size) {
1927     return pPeerKernelBus->__kbusGetBar1P2PDmaInfo__(pSrcGpu, pPeerGpu, pPeerKernelBus, dma_addr, dma_size);
1928 }
1929 
1930 NV_STATUS kbusUpdateStaticBar1VAMapping_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 offset, NvU64 length, NvBool bRelease);
1931 
kbusUpdateStaticBar1VAMapping_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 offset,NvU64 length,NvBool bRelease)1932 static inline NV_STATUS kbusUpdateStaticBar1VAMapping_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 offset, NvU64 length, NvBool bRelease) {
1933     return NV_ERR_NOT_SUPPORTED;
1934 }
1935 
kbusUpdateStaticBar1VAMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 offset,NvU64 length,NvBool bRelease)1936 static inline NV_STATUS kbusUpdateStaticBar1VAMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 offset, NvU64 length, NvBool bRelease) {
1937     return pKernelBus->__kbusUpdateStaticBar1VAMapping__(pGpu, pKernelBus, pMemDesc, offset, length, bRelease);
1938 }
1939 
1940 NV_STATUS kbusGetStaticFbAperture_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 offset, NvU64 *pAperOffset, NvU64 *pLength, NvU32 gfid);
1941 
kbusGetStaticFbAperture_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 offset,NvU64 * pAperOffset,NvU64 * pLength,NvU32 gfid)1942 static inline NV_STATUS kbusGetStaticFbAperture_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 offset, NvU64 *pAperOffset, NvU64 *pLength, NvU32 gfid) {
1943     return NV_ERR_NOT_SUPPORTED;
1944 }
1945 
kbusGetStaticFbAperture_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc,NvU64 offset,NvU64 * pAperOffset,NvU64 * pLength,NvU32 gfid)1946 static inline NV_STATUS kbusGetStaticFbAperture_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc, NvU64 offset, NvU64 *pAperOffset, NvU64 *pLength, NvU32 gfid) {
1947     return pKernelBus->__kbusGetStaticFbAperture__(pGpu, pKernelBus, pMemDesc, offset, pAperOffset, pLength, gfid);
1948 }
1949 
1950 NV_STATUS kbusCreateP2PMappingForBar1P2P_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 attributes);
1951 
kbusCreateP2PMappingForBar1P2P_395e98(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 attributes)1952 static inline NV_STATUS kbusCreateP2PMappingForBar1P2P_395e98(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 attributes) {
1953     return NV_ERR_NOT_SUPPORTED;
1954 }
1955 
kbusCreateP2PMappingForBar1P2P_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 attributes)1956 static inline NV_STATUS kbusCreateP2PMappingForBar1P2P_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 attributes) {
1957     return pKernelBus0->__kbusCreateP2PMappingForBar1P2P__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes);
1958 }
1959 
1960 NV_STATUS kbusRemoveP2PMappingForBar1P2P_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 attributes);
1961 
kbusRemoveP2PMappingForBar1P2P_395e98(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 attributes)1962 static inline NV_STATUS kbusRemoveP2PMappingForBar1P2P_395e98(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 attributes) {
1963     return NV_ERR_NOT_SUPPORTED;
1964 }
1965 
kbusRemoveP2PMappingForBar1P2P_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1,NvU32 attributes)1966 static inline NV_STATUS kbusRemoveP2PMappingForBar1P2P_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1, NvU32 attributes) {
1967     return pKernelBus0->__kbusRemoveP2PMappingForBar1P2P__(pGpu0, pKernelBus0, pGpu1, pKernelBus1, attributes);
1968 }
1969 
1970 NvBool kbusHasPcieBar1P2PMapping_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1);
1971 
kbusHasPcieBar1P2PMapping_bf6dfa(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1)1972 static inline NvBool kbusHasPcieBar1P2PMapping_bf6dfa(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1) {
1973     return ((NvBool)(0 != 0));
1974 }
1975 
kbusHasPcieBar1P2PMapping_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1)1976 static inline NvBool kbusHasPcieBar1P2PMapping_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1) {
1977     return pKernelBus0->__kbusHasPcieBar1P2PMapping__(pGpu0, pKernelBus0, pGpu1, pKernelBus1);
1978 }
1979 
1980 NvBool kbusIsPcieBar1P2PMappingSupported_GH100(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1);
1981 
kbusIsPcieBar1P2PMappingSupported_bf6dfa(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1)1982 static inline NvBool kbusIsPcieBar1P2PMappingSupported_bf6dfa(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1) {
1983     return ((NvBool)(0 != 0));
1984 }
1985 
kbusIsPcieBar1P2PMappingSupported_DISPATCH(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,struct KernelBus * pKernelBus1)1986 static inline NvBool kbusIsPcieBar1P2PMappingSupported_DISPATCH(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, struct KernelBus *pKernelBus1) {
1987     return pKernelBus0->__kbusIsPcieBar1P2PMappingSupported__(pGpu0, pKernelBus0, pGpu1, pKernelBus1);
1988 }
1989 
1990 NV_STATUS kbusCheckFlaSupportedAndInit_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size);
1991 
kbusCheckFlaSupportedAndInit_ac1694(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 base,NvU64 size)1992 static inline NV_STATUS kbusCheckFlaSupportedAndInit_ac1694(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size) {
1993     return NV_OK;
1994 }
1995 
kbusCheckFlaSupportedAndInit_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 base,NvU64 size)1996 static inline NV_STATUS kbusCheckFlaSupportedAndInit_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size) {
1997     return pKernelBus->__kbusCheckFlaSupportedAndInit__(pGpu, pKernelBus, base, size);
1998 }
1999 
2000 NV_STATUS kbusDetermineFlaRangeAndAllocate_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size);
2001 
2002 NV_STATUS kbusDetermineFlaRangeAndAllocate_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size);
2003 
kbusDetermineFlaRangeAndAllocate_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 base,NvU64 size)2004 static inline NV_STATUS kbusDetermineFlaRangeAndAllocate_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size) {
2005     return NV_ERR_NOT_SUPPORTED;
2006 }
2007 
kbusDetermineFlaRangeAndAllocate_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 base,NvU64 size)2008 static inline NV_STATUS kbusDetermineFlaRangeAndAllocate_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 base, NvU64 size) {
2009     return pKernelBus->__kbusDetermineFlaRangeAndAllocate__(pGpu, pKernelBus, base, size);
2010 }
2011 
2012 NV_STATUS kbusAllocateFlaVaspace_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1);
2013 
2014 NV_STATUS kbusAllocateFlaVaspace_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1);
2015 
kbusAllocateFlaVaspace_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0,NvU64 arg1)2016 static inline NV_STATUS kbusAllocateFlaVaspace_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1) {
2017     return NV_ERR_NOT_SUPPORTED;
2018 }
2019 
kbusAllocateFlaVaspace_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0,NvU64 arg1)2020 static inline NV_STATUS kbusAllocateFlaVaspace_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1) {
2021     return pKernelBus->__kbusAllocateFlaVaspace__(pGpu, pKernelBus, arg0, arg1);
2022 }
2023 
2024 NV_STATUS kbusGetFlaRange_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0, NvU64 *arg1, NvBool arg2);
2025 
2026 NV_STATUS kbusGetFlaRange_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0, NvU64 *arg1, NvBool arg2);
2027 
kbusGetFlaRange_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 * arg0,NvU64 * arg1,NvBool arg2)2028 static inline NV_STATUS kbusGetFlaRange_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0, NvU64 *arg1, NvBool arg2) {
2029     return NV_ERR_NOT_SUPPORTED;
2030 }
2031 
kbusGetFlaRange_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 * arg0,NvU64 * arg1,NvBool arg2)2032 static inline NV_STATUS kbusGetFlaRange_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0, NvU64 *arg1, NvBool arg2) {
2033     return pKernelBus->__kbusGetFlaRange__(pGpu, pKernelBus, arg0, arg1, arg2);
2034 }
2035 
2036 NV_STATUS kbusAllocateLegacyFlaVaspace_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1);
2037 
kbusAllocateLegacyFlaVaspace_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0,NvU64 arg1)2038 static inline NV_STATUS kbusAllocateLegacyFlaVaspace_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1) {
2039     return NV_ERR_NOT_SUPPORTED;
2040 }
2041 
kbusAllocateLegacyFlaVaspace_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0,NvU64 arg1)2042 static inline NV_STATUS kbusAllocateLegacyFlaVaspace_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1) {
2043     return pKernelBus->__kbusAllocateLegacyFlaVaspace__(pGpu, pKernelBus, arg0, arg1);
2044 }
2045 
2046 NV_STATUS kbusAllocateHostManagedFlaVaspace_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle arg0, NvHandle arg1, NvHandle arg2, NvHandle arg3, NvU64 arg4, NvU64 arg5, NvU32 arg6);
2047 
kbusAllocateHostManagedFlaVaspace_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvHandle arg0,NvHandle arg1,NvHandle arg2,NvHandle arg3,NvU64 arg4,NvU64 arg5,NvU32 arg6)2048 static inline NV_STATUS kbusAllocateHostManagedFlaVaspace_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle arg0, NvHandle arg1, NvHandle arg2, NvHandle arg3, NvU64 arg4, NvU64 arg5, NvU32 arg6) {
2049     return NV_ERR_NOT_SUPPORTED;
2050 }
2051 
kbusAllocateHostManagedFlaVaspace_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvHandle arg0,NvHandle arg1,NvHandle arg2,NvHandle arg3,NvU64 arg4,NvU64 arg5,NvU32 arg6)2052 static inline NV_STATUS kbusAllocateHostManagedFlaVaspace_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle arg0, NvHandle arg1, NvHandle arg2, NvHandle arg3, NvU64 arg4, NvU64 arg5, NvU32 arg6) {
2053     return pKernelBus->__kbusAllocateHostManagedFlaVaspace__(pGpu, pKernelBus, arg0, arg1, arg2, arg3, arg4, arg5, arg6);
2054 }
2055 
2056 void kbusDestroyFla_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2057 
2058 void kbusDestroyFla_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2059 
kbusDestroyFla_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2060 static inline void kbusDestroyFla_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2061     return;
2062 }
2063 
kbusDestroyFla_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2064 static inline void kbusDestroyFla_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2065     pKernelBus->__kbusDestroyFla__(pGpu, pKernelBus);
2066 }
2067 
2068 NV_STATUS kbusGetFlaVaspace_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJVASPACE **arg0);
2069 
kbusGetFlaVaspace_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJVASPACE ** arg0)2070 static inline NV_STATUS kbusGetFlaVaspace_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJVASPACE **arg0) {
2071     return NV_ERR_NOT_SUPPORTED;
2072 }
2073 
kbusGetFlaVaspace_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct OBJVASPACE ** arg0)2074 static inline NV_STATUS kbusGetFlaVaspace_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct OBJVASPACE **arg0) {
2075     return pKernelBus->__kbusGetFlaVaspace__(pGpu, pKernelBus, arg0);
2076 }
2077 
2078 void kbusDestroyHostManagedFlaVaspace_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0);
2079 
kbusDestroyHostManagedFlaVaspace_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)2080 static inline void kbusDestroyHostManagedFlaVaspace_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
2081     return;
2082 }
2083 
kbusDestroyHostManagedFlaVaspace_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)2084 static inline void kbusDestroyHostManagedFlaVaspace_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
2085     pKernelBus->__kbusDestroyHostManagedFlaVaspace__(pGpu, pKernelBus, arg0);
2086 }
2087 
2088 NvBool kbusVerifyFlaRange_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1);
2089 
kbusVerifyFlaRange_bf6dfa(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0,NvU64 arg1)2090 static inline NvBool kbusVerifyFlaRange_bf6dfa(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1) {
2091     return ((NvBool)(0 != 0));
2092 }
2093 
kbusVerifyFlaRange_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0,NvU64 arg1)2094 static inline NvBool kbusVerifyFlaRange_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0, NvU64 arg1) {
2095     return pKernelBus->__kbusVerifyFlaRange__(pGpu, pKernelBus, arg0, arg1);
2096 }
2097 
2098 NV_STATUS kbusConstructFlaInstBlk_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0);
2099 
kbusConstructFlaInstBlk_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)2100 static inline NV_STATUS kbusConstructFlaInstBlk_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
2101     return NV_ERR_NOT_SUPPORTED;
2102 }
2103 
kbusConstructFlaInstBlk_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 arg0)2104 static inline NV_STATUS kbusConstructFlaInstBlk_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 arg0) {
2105     return pKernelBus->__kbusConstructFlaInstBlk__(pGpu, pKernelBus, arg0);
2106 }
2107 
2108 void kbusDestructFlaInstBlk_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2109 
kbusDestructFlaInstBlk_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2110 static inline void kbusDestructFlaInstBlk_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2111     return;
2112 }
2113 
kbusDestructFlaInstBlk_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2114 static inline void kbusDestructFlaInstBlk_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2115     pKernelBus->__kbusDestructFlaInstBlk__(pGpu, pKernelBus);
2116 }
2117 
2118 NV_STATUS kbusValidateFlaBaseAddress_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 flaBaseAddr);
2119 
kbusValidateFlaBaseAddress_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 flaBaseAddr)2120 static inline NV_STATUS kbusValidateFlaBaseAddress_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 flaBaseAddr) {
2121     return NV_ERR_NOT_SUPPORTED;
2122 }
2123 
kbusValidateFlaBaseAddress_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 flaBaseAddr)2124 static inline NV_STATUS kbusValidateFlaBaseAddress_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 flaBaseAddr) {
2125     return pKernelBus->__kbusValidateFlaBaseAddress__(pGpu, pKernelBus, flaBaseAddr);
2126 }
2127 
2128 NV_STATUS kbusSetupUnbindFla_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2129 
2130 NV_STATUS kbusSetupUnbindFla_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2131 
kbusSetupUnbindFla_46f6a7(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2132 static inline NV_STATUS kbusSetupUnbindFla_46f6a7(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2133     return NV_ERR_NOT_SUPPORTED;
2134 }
2135 
kbusSetupUnbindFla_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2136 static inline NV_STATUS kbusSetupUnbindFla_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2137     return pKernelBus->__kbusSetupUnbindFla__(pGpu, pKernelBus);
2138 }
2139 
2140 NV_STATUS kbusSetupBindFla_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
2141 
2142 NV_STATUS kbusSetupBindFla_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
2143 
kbusSetupBindFla_46f6a7(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2144 static inline NV_STATUS kbusSetupBindFla_46f6a7(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2145     return NV_ERR_NOT_SUPPORTED;
2146 }
2147 
kbusSetupBindFla_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2148 static inline NV_STATUS kbusSetupBindFla_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2149     return pKernelBus->__kbusSetupBindFla__(pGpu, pKernelBus, gfid);
2150 }
2151 
kbusSendSysmembarSingle_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2152 static inline NV_STATUS kbusSendSysmembarSingle_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2153     return NV_OK;
2154 }
2155 
2156 NV_STATUS kbusSendSysmembarSingle_KERNEL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2157 
kbusSendSysmembarSingle_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2158 static inline NV_STATUS kbusSendSysmembarSingle_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2159     return pKernelBus->__kbusSendSysmembarSingle__(pGpu, pKernelBus);
2160 }
2161 
2162 void kbusCacheBAR1ResizeSize_WAR_BUG_3249028_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2163 
2164 void kbusCacheBAR1ResizeSize_WAR_BUG_3249028_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2165 
kbusCacheBAR1ResizeSize_WAR_BUG_3249028_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2166 static inline void kbusCacheBAR1ResizeSize_WAR_BUG_3249028_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2167     return;
2168 }
2169 
kbusCacheBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2170 static inline void kbusCacheBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2171     pKernelBus->__kbusCacheBAR1ResizeSize_WAR_BUG_3249028__(pGpu, pKernelBus);
2172 }
2173 
2174 NV_STATUS kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2175 
2176 NV_STATUS kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2177 
kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_ac1694(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2178 static inline NV_STATUS kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_ac1694(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2179     return NV_OK;
2180 }
2181 
kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2182 static inline NV_STATUS kbusRestoreBAR1ResizeSize_WAR_BUG_3249028_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2183     return pKernelBus->__kbusRestoreBAR1ResizeSize_WAR_BUG_3249028__(pGpu, pKernelBus);
2184 }
2185 
2186 NV_STATUS kbusIsDirectMappingAllowed_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU32 arg1, NvBool *arg2);
2187 
2188 NV_STATUS kbusIsDirectMappingAllowed_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU32 arg1, NvBool *arg2);
2189 
kbusIsDirectMappingAllowed_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0,NvU32 arg1,NvBool * arg2)2190 static inline NV_STATUS kbusIsDirectMappingAllowed_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0, NvU32 arg1, NvBool *arg2) {
2191     return pKernelBus->__kbusIsDirectMappingAllowed__(pGpu, pKernelBus, arg0, arg1, arg2);
2192 }
2193 
2194 NV_STATUS kbusUseDirectSysmemMap_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *arg0, NvBool *arg1);
2195 
2196 NV_STATUS kbusUseDirectSysmemMap_GA100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *arg0, NvBool *arg1);
2197 
kbusUseDirectSysmemMap_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * arg0,NvBool * arg1)2198 static inline NV_STATUS kbusUseDirectSysmemMap_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *arg0, NvBool *arg1) {
2199     return pKernelBus->__kbusUseDirectSysmemMap__(pGpu, pKernelBus, arg0, arg1);
2200 }
2201 
2202 NV_STATUS kbusMemCopyBar0Window_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, RmPhysAddr physAddr, void *pData, NvLength copySize, NvBool bRead);
2203 
kbusMemCopyBar0Window_46f6a7(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,RmPhysAddr physAddr,void * pData,NvLength copySize,NvBool bRead)2204 static inline NV_STATUS kbusMemCopyBar0Window_46f6a7(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, RmPhysAddr physAddr, void *pData, NvLength copySize, NvBool bRead) {
2205     return NV_ERR_NOT_SUPPORTED;
2206 }
2207 
kbusMemCopyBar0Window_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,RmPhysAddr physAddr,void * pData,NvLength copySize,NvBool bRead)2208 static inline NV_STATUS kbusMemCopyBar0Window_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, RmPhysAddr physAddr, void *pData, NvLength copySize, NvBool bRead) {
2209     return pKernelBus->__kbusMemCopyBar0Window__(pGpu, pKernelBus, physAddr, pData, copySize, bRead);
2210 }
2211 
2212 NV_STATUS kbusWriteBAR0WindowBase_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 base);
2213 
kbusWriteBAR0WindowBase_395e98(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 base)2214 static inline NV_STATUS kbusWriteBAR0WindowBase_395e98(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 base) {
2215     return NV_ERR_NOT_SUPPORTED;
2216 }
2217 
kbusWriteBAR0WindowBase_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 base)2218 static inline NV_STATUS kbusWriteBAR0WindowBase_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 base) {
2219     return pKernelBus->__kbusWriteBAR0WindowBase__(pGpu, pKernelBus, base);
2220 }
2221 
2222 NvU32 kbusReadBAR0WindowBase_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2223 
kbusReadBAR0WindowBase_13cd8d(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2224 static inline NvU32 kbusReadBAR0WindowBase_13cd8d(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2225     NV_ASSERT_PRECOMP(0);
2226     return 0;
2227 }
2228 
kbusReadBAR0WindowBase_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2229 static inline NvU32 kbusReadBAR0WindowBase_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2230     return pKernelBus->__kbusReadBAR0WindowBase__(pGpu, pKernelBus);
2231 }
2232 
2233 NvBool kbusValidateBAR0WindowBase_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 base);
2234 
kbusValidateBAR0WindowBase_ceaee8(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 base)2235 static inline NvBool kbusValidateBAR0WindowBase_ceaee8(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 base) {
2236     NV_ASSERT_PRECOMP(0);
2237     return ((NvBool)(0 != 0));
2238 }
2239 
kbusValidateBAR0WindowBase_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 base)2240 static inline NvBool kbusValidateBAR0WindowBase_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 base) {
2241     return pKernelBus->__kbusValidateBAR0WindowBase__(pGpu, pKernelBus, base);
2242 }
2243 
kbusSetBAR0WindowVidOffset_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 vidOffset)2244 static inline NV_STATUS kbusSetBAR0WindowVidOffset_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 vidOffset) {
2245     return NV_OK;
2246 }
2247 
2248 NV_STATUS kbusSetBAR0WindowVidOffset_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 vidOffset);
2249 
2250 NV_STATUS kbusSetBAR0WindowVidOffset_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 vidOffset);
2251 
kbusSetBAR0WindowVidOffset_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 vidOffset)2252 static inline NV_STATUS kbusSetBAR0WindowVidOffset_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 vidOffset) {
2253     return pKernelBus->__kbusSetBAR0WindowVidOffset__(pGpu, pKernelBus, vidOffset);
2254 }
2255 
2256 NvU64 kbusGetBAR0WindowVidOffset_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2257 
2258 NvU64 kbusGetBAR0WindowVidOffset_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2259 
kbusGetBAR0WindowVidOffset_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2260 static inline NvU64 kbusGetBAR0WindowVidOffset_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2261     return pKernelBus->__kbusGetBAR0WindowVidOffset__(pGpu, pKernelBus);
2262 }
2263 
kbusSetupBar0WindowBeforeBar2Bootstrap_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 * arg0)2264 static inline NV_STATUS kbusSetupBar0WindowBeforeBar2Bootstrap_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0) {
2265     return NV_OK;
2266 }
2267 
2268 NV_STATUS kbusSetupBar0WindowBeforeBar2Bootstrap_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0);
2269 
kbusSetupBar0WindowBeforeBar2Bootstrap_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 * arg0)2270 static inline NV_STATUS kbusSetupBar0WindowBeforeBar2Bootstrap_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 *arg0) {
2271     return pKernelBus->__kbusSetupBar0WindowBeforeBar2Bootstrap__(pGpu, pKernelBus, arg0);
2272 }
2273 
kbusRestoreBar0WindowAfterBar2Bootstrap_b3696a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0)2274 static inline void kbusRestoreBar0WindowAfterBar2Bootstrap_b3696a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0) {
2275     return;
2276 }
2277 
2278 void kbusRestoreBar0WindowAfterBar2Bootstrap_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0);
2279 
kbusRestoreBar0WindowAfterBar2Bootstrap_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 arg0)2280 static inline void kbusRestoreBar0WindowAfterBar2Bootstrap_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 arg0) {
2281     pKernelBus->__kbusRestoreBar0WindowAfterBar2Bootstrap__(pGpu, pKernelBus, arg0);
2282 }
2283 
kbusVerifyBar2_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR memDescIn,NvU8 * pCpuPtrIn,NvU64 offset,NvU64 size)2284 static inline NV_STATUS kbusVerifyBar2_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR memDescIn, NvU8 *pCpuPtrIn, NvU64 offset, NvU64 size) {
2285     return NV_OK;
2286 }
2287 
2288 NV_STATUS kbusVerifyBar2_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR memDescIn, NvU8 *pCpuPtrIn, NvU64 offset, NvU64 size);
2289 
2290 NV_STATUS kbusVerifyBar2_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR memDescIn, NvU8 *pCpuPtrIn, NvU64 offset, NvU64 size);
2291 
kbusVerifyBar2_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR memDescIn,NvU8 * pCpuPtrIn,NvU64 offset,NvU64 size)2292 static inline NV_STATUS kbusVerifyBar2_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR memDescIn, NvU8 *pCpuPtrIn, NvU64 offset, NvU64 size) {
2293     return pKernelBus->__kbusVerifyBar2__(pGpu, pKernelBus, memDescIn, pCpuPtrIn, offset, size);
2294 }
2295 
2296 NV_STATUS kbusBar2BootStrapInPhysicalMode_VGPUSTUB(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2297 
kbusBar2BootStrapInPhysicalMode_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2298 static inline NV_STATUS kbusBar2BootStrapInPhysicalMode_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2299     return NV_OK;
2300 }
2301 
kbusBar2BootStrapInPhysicalMode_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2302 static inline NV_STATUS kbusBar2BootStrapInPhysicalMode_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2303     return pKernelBus->__kbusBar2BootStrapInPhysicalMode__(pGpu, pKernelBus);
2304 }
2305 
kbusBindBar2_5baef9(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,BAR2_MODE arg0)2306 static inline NV_STATUS kbusBindBar2_5baef9(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, BAR2_MODE arg0) {
2307     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
2308 }
2309 
2310 NV_STATUS kbusBindBar2_TU102(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, BAR2_MODE arg0);
2311 
2312 NV_STATUS kbusBindBar2_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, BAR2_MODE arg0);
2313 
kbusBindBar2_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,BAR2_MODE arg0)2314 static inline NV_STATUS kbusBindBar2_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, BAR2_MODE arg0) {
2315     return pKernelBus->__kbusBindBar2__(pGpu, pKernelBus, arg0);
2316 }
2317 
kbusInstBlkWriteAddrLimit_f2d351(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool arg0,NvU64 arg1,NvU8 * arg2,NvU64 arg3)2318 static inline void kbusInstBlkWriteAddrLimit_f2d351(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0, NvU64 arg1, NvU8 *arg2, NvU64 arg3) {
2319     NV_ASSERT_PRECOMP(0);
2320 }
2321 
2322 void kbusInstBlkWriteAddrLimit_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0, NvU64 arg1, NvU8 *arg2, NvU64 arg3);
2323 
kbusInstBlkWriteAddrLimit_b3696a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool arg0,NvU64 arg1,NvU8 * arg2,NvU64 arg3)2324 static inline void kbusInstBlkWriteAddrLimit_b3696a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0, NvU64 arg1, NvU8 *arg2, NvU64 arg3) {
2325     return;
2326 }
2327 
kbusInstBlkWriteAddrLimit_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool arg0,NvU64 arg1,NvU8 * arg2,NvU64 arg3)2328 static inline void kbusInstBlkWriteAddrLimit_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0, NvU64 arg1, NvU8 *arg2, NvU64 arg3) {
2329     pKernelBus->__kbusInstBlkWriteAddrLimit__(pGpu, pKernelBus, arg0, arg1, arg2, arg3);
2330 }
2331 
kbusInitInstBlk_ac1694(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR pInstBlkMemDesc,PMEMORY_DESCRIPTOR pPDB,NvU64 vaLimit,NvU64 bigPageSize,struct OBJVASPACE * pVAS)2332 static inline NV_STATUS kbusInitInstBlk_ac1694(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR pInstBlkMemDesc, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize, struct OBJVASPACE *pVAS) {
2333     return NV_OK;
2334 }
2335 
2336 NV_STATUS kbusInitInstBlk_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR pInstBlkMemDesc, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize, struct OBJVASPACE *pVAS);
2337 
kbusInitInstBlk_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR pInstBlkMemDesc,PMEMORY_DESCRIPTOR pPDB,NvU64 vaLimit,NvU64 bigPageSize,struct OBJVASPACE * pVAS)2338 static inline NV_STATUS kbusInitInstBlk_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR pInstBlkMemDesc, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize, struct OBJVASPACE *pVAS) {
2339     return pKernelBus->__kbusInitInstBlk__(pGpu, pKernelBus, pInstBlkMemDesc, pPDB, vaLimit, bigPageSize, pVAS);
2340 }
2341 
kbusBar2InstBlkWrite_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU8 * pMap,PMEMORY_DESCRIPTOR pPDB,NvU64 vaLimit,NvU64 bigPageSize)2342 static inline void kbusBar2InstBlkWrite_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU8 *pMap, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize) {
2343     return;
2344 }
2345 
2346 void kbusBar2InstBlkWrite_GP100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU8 *pMap, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize);
2347 
kbusBar2InstBlkWrite_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU8 * pMap,PMEMORY_DESCRIPTOR pPDB,NvU64 vaLimit,NvU64 bigPageSize)2348 static inline void kbusBar2InstBlkWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU8 *pMap, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize) {
2349     pKernelBus->__kbusBar2InstBlkWrite__(pGpu, pKernelBus, pMap, pPDB, vaLimit, bigPageSize);
2350 }
2351 
kbusSetupBar2PageTablesAtBottomOfFb_22ba1e(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2352 static inline NV_STATUS kbusSetupBar2PageTablesAtBottomOfFb_22ba1e(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2353     NV_ASSERT_PRECOMP(0);
2354     {
2355         return NV_ERR_NOT_SUPPORTED;
2356     }
2357 }
2358 
2359 NV_STATUS kbusSetupBar2PageTablesAtBottomOfFb_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
2360 
kbusSetupBar2PageTablesAtBottomOfFb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2361 static inline NV_STATUS kbusSetupBar2PageTablesAtBottomOfFb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2362     return pKernelBus->__kbusSetupBar2PageTablesAtBottomOfFb__(pGpu, pKernelBus, gfid);
2363 }
2364 
kbusTeardownBar2PageTablesAtBottomOfFb_566dba(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2365 static inline void kbusTeardownBar2PageTablesAtBottomOfFb_566dba(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2366     NV_ASSERT_PRECOMP(0);
2367     {
2368         return;
2369     }
2370 }
2371 
2372 void kbusTeardownBar2PageTablesAtBottomOfFb_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
2373 
kbusTeardownBar2PageTablesAtBottomOfFb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2374 static inline void kbusTeardownBar2PageTablesAtBottomOfFb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2375     pKernelBus->__kbusTeardownBar2PageTablesAtBottomOfFb__(pGpu, pKernelBus, gfid);
2376 }
2377 
kbusSetupBar2InstBlkAtBottomOfFb_22ba1e(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR pPDB,NvU64 vaLimit,NvU64 bigPageSize,NvU32 gfid)2378 static inline NV_STATUS kbusSetupBar2InstBlkAtBottomOfFb_22ba1e(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize, NvU32 gfid) {
2379     NV_ASSERT_PRECOMP(0);
2380     {
2381         return NV_ERR_NOT_SUPPORTED;
2382     }
2383 }
2384 
2385 NV_STATUS kbusSetupBar2InstBlkAtBottomOfFb_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize, NvU32 gfid);
2386 
kbusSetupBar2InstBlkAtBottomOfFb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR pPDB,NvU64 vaLimit,NvU64 bigPageSize,NvU32 gfid)2387 static inline NV_STATUS kbusSetupBar2InstBlkAtBottomOfFb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR pPDB, NvU64 vaLimit, NvU64 bigPageSize, NvU32 gfid) {
2388     return pKernelBus->__kbusSetupBar2InstBlkAtBottomOfFb__(pGpu, pKernelBus, pPDB, vaLimit, bigPageSize, gfid);
2389 }
2390 
kbusTeardownBar2InstBlkAtBottomOfFb_566dba(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2391 static inline void kbusTeardownBar2InstBlkAtBottomOfFb_566dba(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2392     NV_ASSERT_PRECOMP(0);
2393     {
2394         return;
2395     }
2396 }
2397 
2398 void kbusTeardownBar2InstBlkAtBottomOfFb_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
2399 
kbusTeardownBar2InstBlkAtBottomOfFb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2400 static inline void kbusTeardownBar2InstBlkAtBottomOfFb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2401     pKernelBus->__kbusTeardownBar2InstBlkAtBottomOfFb__(pGpu, pKernelBus, gfid);
2402 }
2403 
kbusSetupBar2PageTablesAtTopOfFb_22ba1e(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2404 static inline NV_STATUS kbusSetupBar2PageTablesAtTopOfFb_22ba1e(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2405     NV_ASSERT_PRECOMP(0);
2406     {
2407         return NV_ERR_NOT_SUPPORTED;
2408     }
2409 }
2410 
2411 NV_STATUS kbusSetupBar2PageTablesAtTopOfFb_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid);
2412 
kbusSetupBar2PageTablesAtTopOfFb_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU32 gfid)2413 static inline NV_STATUS kbusSetupBar2PageTablesAtTopOfFb_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU32 gfid) {
2414     return pKernelBus->__kbusSetupBar2PageTablesAtTopOfFb__(pGpu, pKernelBus, gfid);
2415 }
2416 
kbusCommitBar2PDEs_22ba1e(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2417 static inline NV_STATUS kbusCommitBar2PDEs_22ba1e(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2418     NV_ASSERT_PRECOMP(0);
2419     {
2420         return NV_ERR_NOT_SUPPORTED;
2421     }
2422 }
2423 
2424 NV_STATUS kbusCommitBar2PDEs_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2425 
kbusCommitBar2PDEs_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2426 static inline NV_STATUS kbusCommitBar2PDEs_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2427     return pKernelBus->__kbusCommitBar2PDEs__(pGpu, pKernelBus);
2428 }
2429 
kbusVerifyCoherentLink_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2430 static inline NV_STATUS kbusVerifyCoherentLink_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2431     return NV_OK;
2432 }
2433 
2434 NV_STATUS kbusVerifyCoherentLink_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2435 
kbusVerifyCoherentLink_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2436 static inline NV_STATUS kbusVerifyCoherentLink_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2437     return pKernelBus->__kbusVerifyCoherentLink__(pGpu, pKernelBus);
2438 }
2439 
2440 void kbusTeardownMailbox_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2441 
2442 void kbusTeardownMailbox_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2443 
kbusTeardownMailbox_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2444 static inline void kbusTeardownMailbox_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2445     pKernelBus->__kbusTeardownMailbox__(pGpu, pKernelBus);
2446 }
2447 
kbusBar1InstBlkVasUpdate_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2448 static inline NV_STATUS kbusBar1InstBlkVasUpdate_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2449     return NV_OK;
2450 }
2451 
2452 NV_STATUS kbusBar1InstBlkVasUpdate_GM107(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2453 
kbusBar1InstBlkVasUpdate_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2454 static inline NV_STATUS kbusBar1InstBlkVasUpdate_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2455     return pKernelBus->__kbusBar1InstBlkVasUpdate__(pGpu, pKernelBus);
2456 }
2457 
2458 NV_STATUS kbusFlushPcieForBar0Doorbell_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2459 
kbusFlushPcieForBar0Doorbell_56cd7a(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2460 static inline NV_STATUS kbusFlushPcieForBar0Doorbell_56cd7a(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2461     return NV_OK;
2462 }
2463 
kbusFlushPcieForBar0Doorbell_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2464 static inline NV_STATUS kbusFlushPcieForBar0Doorbell_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2465     return pKernelBus->__kbusFlushPcieForBar0Doorbell__(pGpu, pKernelBus);
2466 }
2467 
2468 NV_STATUS kbusCreateCoherentCpuMapping_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 numaOnlineMemorySize, NvBool bFlush);
2469 
kbusCreateCoherentCpuMapping_46f6a7(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 numaOnlineMemorySize,NvBool bFlush)2470 static inline NV_STATUS kbusCreateCoherentCpuMapping_46f6a7(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 numaOnlineMemorySize, NvBool bFlush) {
2471     return NV_ERR_NOT_SUPPORTED;
2472 }
2473 
kbusCreateCoherentCpuMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU64 numaOnlineMemorySize,NvBool bFlush)2474 static inline NV_STATUS kbusCreateCoherentCpuMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU64 numaOnlineMemorySize, NvBool bFlush) {
2475     return pKernelBus->__kbusCreateCoherentCpuMapping__(pGpu, pKernelBus, numaOnlineMemorySize, bFlush);
2476 }
2477 
2478 NvU8 *kbusMapCoherentCpuMapping_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0);
2479 
kbusMapCoherentCpuMapping_9e2234(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0)2480 static inline NvU8 *kbusMapCoherentCpuMapping_9e2234(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0) {
2481     return ((void *)0);
2482 }
2483 
kbusMapCoherentCpuMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0)2484 static inline NvU8 *kbusMapCoherentCpuMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0) {
2485     return pKernelBus->__kbusMapCoherentCpuMapping__(pGpu, pKernelBus, arg0);
2486 }
2487 
2488 void kbusUnmapCoherentCpuMapping_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0);
2489 
kbusUnmapCoherentCpuMapping_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0)2490 static inline void kbusUnmapCoherentCpuMapping_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0) {
2491     return;
2492 }
2493 
kbusUnmapCoherentCpuMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,PMEMORY_DESCRIPTOR arg0)2494 static inline void kbusUnmapCoherentCpuMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, PMEMORY_DESCRIPTOR arg0) {
2495     pKernelBus->__kbusUnmapCoherentCpuMapping__(pGpu, pKernelBus, arg0);
2496 }
2497 
2498 void kbusTeardownCoherentCpuMapping_GV100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0);
2499 
kbusTeardownCoherentCpuMapping_d44104(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool arg0)2500 static inline void kbusTeardownCoherentCpuMapping_d44104(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0) {
2501     return;
2502 }
2503 
kbusTeardownCoherentCpuMapping_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvBool arg0)2504 static inline void kbusTeardownCoherentCpuMapping_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvBool arg0) {
2505     pKernelBus->__kbusTeardownCoherentCpuMapping__(pGpu, pKernelBus, arg0);
2506 }
2507 
kbusBar1InstBlkBind_92bfc3(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2508 static inline NV_STATUS kbusBar1InstBlkBind_92bfc3(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2509     NV_ASSERT_PRECOMP(0);
2510     return NV_ERR_NOT_SUPPORTED;
2511 }
2512 
2513 NV_STATUS kbusBar1InstBlkBind_TU102(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2514 
2515 NV_STATUS kbusBar1InstBlkBind_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2516 
kbusBar1InstBlkBind_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2517 static inline NV_STATUS kbusBar1InstBlkBind_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2518     return pKernelBus->__kbusBar1InstBlkBind__(pGpu, pKernelBus);
2519 }
2520 
2521 NvU32 kbusGetEccCounts_GH100(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2522 
kbusGetEccCounts_4a4dee(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2523 static inline NvU32 kbusGetEccCounts_4a4dee(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2524     return 0;
2525 }
2526 
kbusGetEccCounts_DISPATCH(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2527 static inline NvU32 kbusGetEccCounts_DISPATCH(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2528     return pKernelBus->__kbusGetEccCounts__(pGpu, pKernelBus);
2529 }
2530 
kbusStateInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelBus * pEngstate)2531 static inline NV_STATUS kbusStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelBus *pEngstate) {
2532     return pEngstate->__kbusStateInitUnlocked__(pGpu, pEngstate);
2533 }
2534 
kbusInitMissing_DISPATCH(POBJGPU pGpu,struct KernelBus * pEngstate)2535 static inline void kbusInitMissing_DISPATCH(POBJGPU pGpu, struct KernelBus *pEngstate) {
2536     pEngstate->__kbusInitMissing__(pGpu, pEngstate);
2537 }
2538 
kbusStatePreInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelBus * pEngstate)2539 static inline NV_STATUS kbusStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelBus *pEngstate) {
2540     return pEngstate->__kbusStatePreInitUnlocked__(pGpu, pEngstate);
2541 }
2542 
kbusIsPresent_DISPATCH(POBJGPU pGpu,struct KernelBus * pEngstate)2543 static inline NvBool kbusIsPresent_DISPATCH(POBJGPU pGpu, struct KernelBus *pEngstate) {
2544     return pEngstate->__kbusIsPresent__(pGpu, pEngstate);
2545 }
2546 
kbusIsBar1Force64KBMappingEnabled(struct KernelBus * pKernelBus)2547 static inline NvBool kbusIsBar1Force64KBMappingEnabled(struct KernelBus *pKernelBus) {
2548     return pKernelBus->bBar1Force64KBMapping;
2549 }
2550 
kbusIsBar1PhysicalModeEnabled(struct KernelBus * pKernelBus)2551 static inline NvBool kbusIsBar1PhysicalModeEnabled(struct KernelBus *pKernelBus) {
2552     return pKernelBus->bBar1PhysicalModeEnabled;
2553 }
2554 
kbusIsBar2Initialized(struct KernelBus * pKernelBus)2555 static inline NvBool kbusIsBar2Initialized(struct KernelBus *pKernelBus) {
2556     return pKernelBus->bIsBar2Initialized;
2557 }
2558 
kbusIsBar2SysmemAccessEnabled(struct KernelBus * pKernelBus)2559 static inline NvBool kbusIsBar2SysmemAccessEnabled(struct KernelBus *pKernelBus) {
2560     return pKernelBus->bBar2SysmemAccessEnabled;
2561 }
2562 
kbusIsBar2TestSkipped(struct KernelBus * pKernelBus)2563 static inline NvBool kbusIsBar2TestSkipped(struct KernelBus *pKernelBus) {
2564     return pKernelBus->bBar2TestSkipped;
2565 }
2566 
kbusIsPhysicalBar2InitPagetableEnabled(struct KernelBus * pKernelBus)2567 static inline NvBool kbusIsPhysicalBar2InitPagetableEnabled(struct KernelBus *pKernelBus) {
2568     return pKernelBus->bUsePhysicalBar2InitPagetable;
2569 }
2570 
kbusIsFlaSupported(struct KernelBus * pKernelBus)2571 static inline NvBool kbusIsFlaSupported(struct KernelBus *pKernelBus) {
2572     return pKernelBus->bFlaSupported;
2573 }
2574 
kbusIsFlaEnabled(struct KernelBus * pKernelBus)2575 static inline NvBool kbusIsFlaEnabled(struct KernelBus *pKernelBus) {
2576     return pKernelBus->bFlaEnabled;
2577 }
2578 
kbusIsFlaDummyPageEnabled(struct KernelBus * pKernelBus)2579 static inline NvBool kbusIsFlaDummyPageEnabled(struct KernelBus *pKernelBus) {
2580     return pKernelBus->bFlaDummyPageEnabled;
2581 }
2582 
kbusIsBug2751296LimitBar2PtSize(struct KernelBus * pKernelBus)2583 static inline NvBool kbusIsBug2751296LimitBar2PtSize(struct KernelBus *pKernelBus) {
2584     return pKernelBus->bBug2751296LimitBar2PtSize;
2585 }
2586 
kbusIsReflectedMappingAccessAllowed(struct KernelBus * pKernelBus)2587 static inline NvBool kbusIsReflectedMappingAccessAllowed(struct KernelBus *pKernelBus) {
2588     return pKernelBus->bAllowReflectedMappingAccess;
2589 }
2590 
kbusIsPreserveBar1ConsoleEnabled(struct KernelBus * pKernelBus)2591 static inline NvBool kbusIsPreserveBar1ConsoleEnabled(struct KernelBus *pKernelBus) {
2592     return pKernelBus->bPreserveBar1ConsoleEnabled;
2593 }
2594 
kbusIsP2pInitialized(struct KernelBus * pKernelBus)2595 static inline NvBool kbusIsP2pInitialized(struct KernelBus *pKernelBus) {
2596     return pKernelBus->bP2pInitialized;
2597 }
2598 
kbusIsP2pMailboxClientAllocated(struct KernelBus * pKernelBus)2599 static inline NvBool kbusIsP2pMailboxClientAllocated(struct KernelBus *pKernelBus) {
2600     return pKernelBus->bP2pMailboxClientAllocated;
2601 }
2602 
kbusIsFbFlushDisabled(struct KernelBus * pKernelBus)2603 static inline NvBool kbusIsFbFlushDisabled(struct KernelBus *pKernelBus) {
2604     return pKernelBus->bFbFlushDisabled;
2605 }
2606 
kbusIsReadCpuPointerToFlushEnabled(struct KernelBus * pKernelBus)2607 static inline NvBool kbusIsReadCpuPointerToFlushEnabled(struct KernelBus *pKernelBus) {
2608     return pKernelBus->bReadCpuPointerToFlush;
2609 }
2610 
kbusIsBarAccessBlocked(struct KernelBus * pKernelBus)2611 static inline NvBool kbusIsBarAccessBlocked(struct KernelBus *pKernelBus) {
2612     return pKernelBus->bBarAccessBlocked;
2613 }
2614 
kbusSetFlaSupported(struct KernelBus * pKernelBus,NvBool bSupported)2615 static inline void kbusSetFlaSupported(struct KernelBus *pKernelBus, NvBool bSupported) {
2616     pKernelBus->bFlaSupported = bSupported;
2617 }
2618 
2619 void kbusDestruct_IMPL(struct KernelBus *pKernelBus);
2620 
2621 #define __nvoc_kbusDestruct(pKernelBus) kbusDestruct_IMPL(pKernelBus)
2622 void kbusGetDeviceCaps_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU8 *pHostCaps, NvBool bCapsInitialized);
2623 
2624 #ifdef __nvoc_kern_bus_h_disabled
kbusGetDeviceCaps(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvU8 * pHostCaps,NvBool bCapsInitialized)2625 static inline void kbusGetDeviceCaps(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvU8 *pHostCaps, NvBool bCapsInitialized) {
2626     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2627 }
2628 #else //__nvoc_kern_bus_h_disabled
2629 #define kbusGetDeviceCaps(pGpu, pKernelBus, pHostCaps, bCapsInitialized) kbusGetDeviceCaps_IMPL(pGpu, pKernelBus, pHostCaps, bCapsInitialized)
2630 #endif //__nvoc_kern_bus_h_disabled
2631 
2632 void kbusDestroyMailbox_IMPL(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, NvU32 peerIdx);
2633 
2634 #ifdef __nvoc_kern_bus_h_disabled
kbusDestroyMailbox(struct OBJGPU * pGpu0,struct KernelBus * pKernelBus0,struct OBJGPU * pGpu1,NvU32 peerIdx)2635 static inline void kbusDestroyMailbox(struct OBJGPU *pGpu0, struct KernelBus *pKernelBus0, struct OBJGPU *pGpu1, NvU32 peerIdx) {
2636     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2637 }
2638 #else //__nvoc_kern_bus_h_disabled
2639 #define kbusDestroyMailbox(pGpu0, pKernelBus0, pGpu1, peerIdx) kbusDestroyMailbox_IMPL(pGpu0, pKernelBus0, pGpu1, peerIdx)
2640 #endif //__nvoc_kern_bus_h_disabled
2641 
2642 RmPhysAddr kbusSetupPeerBarAccess_IMPL(struct OBJGPU *pGpu0, struct OBJGPU *pGpu1, RmPhysAddr arg0, NvU64 arg1, PMEMORY_DESCRIPTOR *arg2);
2643 
2644 #define kbusSetupPeerBarAccess(pGpu0, pGpu1, arg0, arg1, arg2) kbusSetupPeerBarAccess_IMPL(pGpu0, pGpu1, arg0, arg1, arg2)
2645 NvBool kbusIsStaticBar1Enabled_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2646 
2647 #ifdef __nvoc_kern_bus_h_disabled
kbusIsStaticBar1Enabled(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2648 static inline NvBool kbusIsStaticBar1Enabled(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2649     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2650     return NV_FALSE;
2651 }
2652 #else //__nvoc_kern_bus_h_disabled
2653 #define kbusIsStaticBar1Enabled(pGpu, pKernelBus) kbusIsStaticBar1Enabled_IMPL(pGpu, pKernelBus)
2654 #endif //__nvoc_kern_bus_h_disabled
2655 
2656 NV_STATUS kbusSendSysmembar_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2657 
2658 #ifdef __nvoc_kern_bus_h_disabled
kbusSendSysmembar(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2659 static inline NV_STATUS kbusSendSysmembar(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2660     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2661     return NV_ERR_NOT_SUPPORTED;
2662 }
2663 #else //__nvoc_kern_bus_h_disabled
2664 #define kbusSendSysmembar(pGpu, pKernelBus) kbusSendSysmembar_IMPL(pGpu, pKernelBus)
2665 #endif //__nvoc_kern_bus_h_disabled
2666 
2667 NV_STATUS kbusSendBusInfo_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NV2080_CTRL_BUS_INFO *pBusInfo);
2668 
2669 #ifdef __nvoc_kern_bus_h_disabled
kbusSendBusInfo(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NV2080_CTRL_BUS_INFO * pBusInfo)2670 static inline NV_STATUS kbusSendBusInfo(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NV2080_CTRL_BUS_INFO *pBusInfo) {
2671     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2672     return NV_ERR_NOT_SUPPORTED;
2673 }
2674 #else //__nvoc_kern_bus_h_disabled
2675 #define kbusSendBusInfo(pGpu, pKernelBus, pBusInfo) kbusSendBusInfo_IMPL(pGpu, pKernelBus, pBusInfo)
2676 #endif //__nvoc_kern_bus_h_disabled
2677 
2678 NvU64 kbusGetPciBarSize_IMPL(struct KernelBus *pKernelBus, NvU32 index);
2679 
2680 #ifdef __nvoc_kern_bus_h_disabled
kbusGetPciBarSize(struct KernelBus * pKernelBus,NvU32 index)2681 static inline NvU64 kbusGetPciBarSize(struct KernelBus *pKernelBus, NvU32 index) {
2682     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2683     return 0;
2684 }
2685 #else //__nvoc_kern_bus_h_disabled
2686 #define kbusGetPciBarSize(pKernelBus, index) kbusGetPciBarSize_IMPL(pKernelBus, index)
2687 #endif //__nvoc_kern_bus_h_disabled
2688 
2689 RmPhysAddr kbusGetPciBarOffset_IMPL(struct KernelBus *pKernelBus, NvU32 index);
2690 
2691 #ifdef __nvoc_kern_bus_h_disabled
kbusGetPciBarOffset(struct KernelBus * pKernelBus,NvU32 index)2692 static inline RmPhysAddr kbusGetPciBarOffset(struct KernelBus *pKernelBus, NvU32 index) {
2693     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2694     RmPhysAddr ret;
2695     portMemSet(&ret, 0, sizeof(RmPhysAddr));
2696     return ret;
2697 }
2698 #else //__nvoc_kern_bus_h_disabled
2699 #define kbusGetPciBarOffset(pKernelBus, index) kbusGetPciBarOffset_IMPL(pKernelBus, index)
2700 #endif //__nvoc_kern_bus_h_disabled
2701 
2702 NV_STATUS kbusIsGpuP2pAlive_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2703 
2704 #ifdef __nvoc_kern_bus_h_disabled
kbusIsGpuP2pAlive(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2705 static inline NV_STATUS kbusIsGpuP2pAlive(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2706     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2707     return NV_ERR_NOT_SUPPORTED;
2708 }
2709 #else //__nvoc_kern_bus_h_disabled
2710 #define kbusIsGpuP2pAlive(pGpu, pKernelBus) kbusIsGpuP2pAlive_IMPL(pGpu, pKernelBus)
2711 #endif //__nvoc_kern_bus_h_disabled
2712 
2713 NV_STATUS kbusUpdateRusdStatistics_IMPL(struct OBJGPU *pGpu);
2714 
2715 #define kbusUpdateRusdStatistics(pGpu) kbusUpdateRusdStatistics_IMPL(pGpu)
2716 void kbusDetermineBar1Force64KBMapping_IMPL(struct KernelBus *pKernelBus);
2717 
2718 #ifdef __nvoc_kern_bus_h_disabled
kbusDetermineBar1Force64KBMapping(struct KernelBus * pKernelBus)2719 static inline void kbusDetermineBar1Force64KBMapping(struct KernelBus *pKernelBus) {
2720     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2721 }
2722 #else //__nvoc_kern_bus_h_disabled
2723 #define kbusDetermineBar1Force64KBMapping(pKernelBus) kbusDetermineBar1Force64KBMapping_IMPL(pKernelBus)
2724 #endif //__nvoc_kern_bus_h_disabled
2725 
2726 void kbusDetermineBar1ApertureLength_IMPL(struct KernelBus *pKernelBus, NvU32 gfid);
2727 
2728 #ifdef __nvoc_kern_bus_h_disabled
kbusDetermineBar1ApertureLength(struct KernelBus * pKernelBus,NvU32 gfid)2729 static inline void kbusDetermineBar1ApertureLength(struct KernelBus *pKernelBus, NvU32 gfid) {
2730     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2731 }
2732 #else //__nvoc_kern_bus_h_disabled
2733 #define kbusDetermineBar1ApertureLength(pKernelBus, gfid) kbusDetermineBar1ApertureLength_IMPL(pKernelBus, gfid)
2734 #endif //__nvoc_kern_bus_h_disabled
2735 
2736 NV_STATUS kbusMapFbApertureByHandle_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle hClient, NvHandle hMemory, NvU64 offset, NvU64 size, NvU64 *pBar1Va, struct Device *pDevice);
2737 
2738 #ifdef __nvoc_kern_bus_h_disabled
kbusMapFbApertureByHandle(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvHandle hClient,NvHandle hMemory,NvU64 offset,NvU64 size,NvU64 * pBar1Va,struct Device * pDevice)2739 static inline NV_STATUS kbusMapFbApertureByHandle(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle hClient, NvHandle hMemory, NvU64 offset, NvU64 size, NvU64 *pBar1Va, struct Device *pDevice) {
2740     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2741     return NV_ERR_NOT_SUPPORTED;
2742 }
2743 #else //__nvoc_kern_bus_h_disabled
2744 #define kbusMapFbApertureByHandle(pGpu, pKernelBus, hClient, hMemory, offset, size, pBar1Va, pDevice) kbusMapFbApertureByHandle_IMPL(pGpu, pKernelBus, hClient, hMemory, offset, size, pBar1Va, pDevice)
2745 #endif //__nvoc_kern_bus_h_disabled
2746 
2747 NV_STATUS kbusUnmapFbApertureByHandle_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle hClient, NvHandle hMemory, NvU64 bar1Va);
2748 
2749 #ifdef __nvoc_kern_bus_h_disabled
kbusUnmapFbApertureByHandle(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,NvHandle hClient,NvHandle hMemory,NvU64 bar1Va)2750 static inline NV_STATUS kbusUnmapFbApertureByHandle(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, NvHandle hClient, NvHandle hMemory, NvU64 bar1Va) {
2751     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2752     return NV_ERR_NOT_SUPPORTED;
2753 }
2754 #else //__nvoc_kern_bus_h_disabled
2755 #define kbusUnmapFbApertureByHandle(pGpu, pKernelBus, hClient, hMemory, bar1Va) kbusUnmapFbApertureByHandle_IMPL(pGpu, pKernelBus, hClient, hMemory, bar1Va)
2756 #endif //__nvoc_kern_bus_h_disabled
2757 
2758 NV_STATUS kbusGetBar1VARangeForDevice_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct Device *pDevice, struct NV_RANGE *arg0);
2759 
2760 #ifdef __nvoc_kern_bus_h_disabled
kbusGetBar1VARangeForDevice(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,struct Device * pDevice,struct NV_RANGE * arg0)2761 static inline NV_STATUS kbusGetBar1VARangeForDevice(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, struct Device *pDevice, struct NV_RANGE *arg0) {
2762     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2763     return NV_ERR_NOT_SUPPORTED;
2764 }
2765 #else //__nvoc_kern_bus_h_disabled
2766 #define kbusGetBar1VARangeForDevice(pGpu, pKernelBus, pDevice, arg0) kbusGetBar1VARangeForDevice_IMPL(pGpu, pKernelBus, pDevice, arg0)
2767 #endif //__nvoc_kern_bus_h_disabled
2768 
2769 NvU32 kbusGetFlushAperture_IMPL(struct KernelBus *pKernelBus, NV_ADDRESS_SPACE addrSpace);
2770 
2771 #ifdef __nvoc_kern_bus_h_disabled
kbusGetFlushAperture(struct KernelBus * pKernelBus,NV_ADDRESS_SPACE addrSpace)2772 static inline NvU32 kbusGetFlushAperture(struct KernelBus *pKernelBus, NV_ADDRESS_SPACE addrSpace) {
2773     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2774     return 0;
2775 }
2776 #else //__nvoc_kern_bus_h_disabled
2777 #define kbusGetFlushAperture(pKernelBus, addrSpace) kbusGetFlushAperture_IMPL(pKernelBus, addrSpace)
2778 #endif //__nvoc_kern_bus_h_disabled
2779 
2780 NvU8 *kbusCpuOffsetInBar2WindowGet_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc);
2781 
2782 #ifdef __nvoc_kern_bus_h_disabled
kbusCpuOffsetInBar2WindowGet(struct OBJGPU * pGpu,struct KernelBus * pKernelBus,MEMORY_DESCRIPTOR * pMemDesc)2783 static inline NvU8 *kbusCpuOffsetInBar2WindowGet(struct OBJGPU *pGpu, struct KernelBus *pKernelBus, MEMORY_DESCRIPTOR *pMemDesc) {
2784     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2785     return NULL;
2786 }
2787 #else //__nvoc_kern_bus_h_disabled
2788 #define kbusCpuOffsetInBar2WindowGet(pGpu, pKernelBus, pMemDesc) kbusCpuOffsetInBar2WindowGet_IMPL(pGpu, pKernelBus, pMemDesc)
2789 #endif //__nvoc_kern_bus_h_disabled
2790 
2791 NvU64 kbusGetVfBar0SizeBytes_IMPL(struct OBJGPU *pGpu, struct KernelBus *pKernelBus);
2792 
2793 #ifdef __nvoc_kern_bus_h_disabled
kbusGetVfBar0SizeBytes(struct OBJGPU * pGpu,struct KernelBus * pKernelBus)2794 static inline NvU64 kbusGetVfBar0SizeBytes(struct OBJGPU *pGpu, struct KernelBus *pKernelBus) {
2795     NV_ASSERT_FAILED_PRECOMP("KernelBus was disabled!");
2796     return 0;
2797 }
2798 #else //__nvoc_kern_bus_h_disabled
2799 #define kbusGetVfBar0SizeBytes(pGpu, pKernelBus) kbusGetVfBar0SizeBytes_IMPL(pGpu, pKernelBus)
2800 #endif //__nvoc_kern_bus_h_disabled
2801 
2802 #undef PRIVATE_FIELD
2803 
2804 
2805 #define kbusMapRmAperture_HAL(pGpu, pMemDesc) memdescMapInternal(pGpu, pMemDesc, 0)
2806 
2807 #define kbusUnmapRmApertureWithFlags_HAL(pGpu, pMemDesc, pCpuPtr, flags) (memdescUnmapInternal(pGpu, pMemDesc, flags), ((void) (*(pCpuPtr) = NULL)))
2808 
2809 #define kbusUnmapRmAperture_HAL(pGpu, pMemDesc, pCpuPtr, bFlush) \
2810     kbusUnmapRmApertureWithFlags_HAL(pGpu, pMemDesc, pCpuPtr,    \
2811                                     (bFlush) ? TRANSFER_FLAGS_NONE : TRANSFER_FLAGS_DEFER_FLUSH)
2812 
2813 //
2814 // For SHH/GH180, BAR0 PRAMIN and CPU-visible BAR1/2 should be disabled when C2C is being used.
2815 // For P9+GV100, BAR0 PRAMIN and CPU-visible BAR1/2 should never be disabled.
2816 //
2817 #define KBUS_BAR0_PRAMIN_DISABLED(pGpu)                             \
2818     ( pGpu->getProperty(pGpu, PDB_PROP_GPU_COHERENT_CPU_MAPPING) && \
2819       gpuIsSelfHosted(pGpu) )
2820 #define KBUS_CPU_VISIBLE_BAR12_DISABLED(pGpu)                       \
2821     ( ( pGpu->getProperty(pGpu, PDB_PROP_GPU_COHERENT_CPU_MAPPING) && \
2822       gpuIsSelfHosted(pGpu) ) ||                                    \
2823       pGpu->getProperty(pGpu, PDB_PROP_GPU_BAR1_BAR2_DISABLED) )
2824 
2825 #endif // KERN_BUS_H
2826 
2827 #ifdef __cplusplus
2828 } // extern "C"
2829 #endif
2830 
2831 #endif // _G_KERN_BUS_NVOC_H_
2832