1 #ifndef _G_KERNEL_GSP_NVOC_H_
2 #define _G_KERNEL_GSP_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2017-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kernel_gsp_nvoc.h"
33 
34 #ifndef KERNEL_GSP_H
35 #define KERNEL_GSP_H
36 
37 /*!
38  * This file provides definitions for all KernelGsp data structures
39  * and interfaces.  KernelGsp is responsible for initiating the boot
40  * of RM on the GSP core (GSP-RM) and helps facilitate communication
41  * between Kernel RM and GSP-RM.
42  */
43 
44 #include "core/core.h"
45 #include "core/bin_data.h"
46 #include "gpu/eng_state.h"
47 #include "gpu/intr/intr_service.h"
48 #include "gpu/falcon/kernel_falcon.h"
49 #include "gpu/gsp/gsp_static_config.h"
50 #include "gpu/gsp/gsp_init_args.h"
51 #include "gpu/gsp/gsp_fw_heap.h"
52 #include "nv-firmware.h"
53 #include "nv_sriov_defines.h"
54 #include "rmRiscvUcode.h"
55 
56 #include "libos_init_args.h"
57 #include "gsp_fw_wpr_meta.h"
58 #include "gsp_fw_sr_meta.h"
59 #include "liblogdecode.h"
60 
61 /*!
62  * Forward declarations
63  */
64 typedef struct SimAccessBuffer SimAccessBuffer;
65 typedef struct GSP_FMC_BOOT_PARAMS GSP_FMC_BOOT_PARAMS;
66 
67 /*!
68  * Structure for VBIOS image for early FRTS.
69  */
70 typedef struct KernelGspVbiosImg
71 {
72     NvU8 *pImage;
73     NvU32 biosSize;
74     NvU32 expansionRomOffset;
75 } KernelGspVbiosImg;
76 
77 /*!
78  * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that
79  * loads directly without the generic falcon bootloader.
80  */
81 typedef struct KernelGspFlcnUcodeBootDirect
82 {
83     NvU8 *pImage;
84     NvU32 size;
85 
86     NvU32 imemSize;
87     NvU32 imemNsSize;
88     NvU32 imemNsPa;
89     NvU32 imemSecSize;
90     NvU32 imemSecPa;
91 
92     NvU32 dataOffset;
93     NvU32 dmemSize;
94     NvU32 dmemPa;
95 } KernelGspFlcnUcodeBootDirect;
96 
97 /*!
98  * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that
99  * loads via the generic falcon bootloader.
100  */
101 typedef struct KernelGspFlcnUcodeBootWithLoader
102 {
103     MEMORY_DESCRIPTOR *pCodeMemDesc;
104     MEMORY_DESCRIPTOR *pDataMemDesc;
105 
106     NvU32 codeOffset;
107     NvU32 imemSize;
108     NvU32 imemNsSize;
109     NvU32 imemNsPa;
110     NvU32 imemSecSize;
111     NvU32 imemSecPa;
112     NvU32 codeEntry;
113 
114     NvU32 dataOffset;
115     NvU32 dmemSize;
116     NvU32 dmemPa;
117 
118     // Extra fields used for falcon ucodes from VBIOS
119     NvU32 interfaceOffset;
120 } KernelGspFlcnUcodeBootWithLoader;
121 
122 /*!
123  * Variant of KernelGspFlcnUcode representing a Boot-from-HS ucode.
124  */
125 typedef struct KernelGspFlcnUcodeBootFromHs
126 {
127     MEMORY_DESCRIPTOR *pUcodeMemDesc;
128     NvU32 size;
129 
130     NvU32 codeOffset;
131     NvU32 imemSize;
132     NvU32 imemPa;
133     NvU32 imemVa;
134 
135     NvU32 dataOffset;
136     NvU32 dmemSize;
137     NvU32 dmemPa;
138     NvU32 dmemVa;
139 
140     NvU32 hsSigDmemAddr;
141     NvU32 ucodeId;
142     NvU32 engineIdMask;
143 
144     // Extra fields used for falcon ucodes from VBIOS
145     NvU32 *pSignatures;
146     NvU32 signaturesTotalSize;  // size of buffer pointed by pSignatures
147     NvU32 sigSize;  // size of one signature
148     NvU32 sigCount;
149 
150     NvU32 vbiosSigVersions;
151     NvU32 interfaceOffset;
152 } KernelGspFlcnUcodeBootFromHs;
153 
154 /*!
155  * Type of KernelGspFlcnUcode. Used as tag in tagged union KernelGspFlcnUcode.
156  * Affects how the ucode is loaded/booted.
157  */
158 typedef enum KernelGspFlcnUcodeBootType
159 {
160     KGSP_FLCN_UCODE_BOOT_DIRECT,
161     KGSP_FLCN_UCODE_BOOT_WITH_LOADER,
162     KGSP_FLCN_UCODE_BOOT_FROM_HS
163 } KernelGspFlcnUcodeBootType;
164 
165 /*!
166  * Tagged union of falcon ucode variants used by early FRTS and GSP-RM boot.
167  */
168 typedef struct KernelGspFlcnUcode
169 {
170     KernelGspFlcnUcodeBootType bootType;
171     union
172     {
173         KernelGspFlcnUcodeBootDirect ucodeBootDirect;
174         KernelGspFlcnUcodeBootWithLoader ucodeBootWithLoader;
175         KernelGspFlcnUcodeBootFromHs ucodeBootFromHs;
176     };
177 } KernelGspFlcnUcode;
178 
179 /*!
180  * GSP-RM source when running in Emulated/Simulated RISCV environment is
181  * extremely slow, so we need a factor (X) to scale timeouts by.
182  */
183 #define GSP_SCALE_TIMEOUT_EMU_SIM  2500
184 
185 /*!
186  * Size of libos init arguments packet.
187  */
188 #define LIBOS_INIT_ARGUMENTS_SIZE       0x1000
189 
190 /*!
191  * Structure for passing GSP-RM firmware data
192  */
193 typedef struct GSP_FIRMWARE
194 {
195     const void *pBuf;           // buffer holding the firmware (ucode)
196     NvU32       size;           // size of the firmware
197     const void *pImageData;     // points to the GSP FW image start inside the pBuf buffer
198     NvU64       imageSize;      // GSP FW image size inside the pBuf buffer
199     const void *pSignatureData; // points to the GSP FW signature start inside the pBuf buffer
200     NvU64       signatureSize;  // GSP FW signature size inside the pBuf buffer
201     const void *pLogElf;        // firmware logging section and symbol information to decode logs
202     NvU32       logElfSize;     // size of the gsp log elf binary
203 } GSP_FIRMWARE;
204 
205 /*!
206  * Known ELF section names (or name prefixes) of gsp_*.bin or gsp_log_*.bin.
207  */
208 #define GSP_VERSION_SECTION_NAME           ".fwversion"
209 #define GSP_IMAGE_SECTION_NAME             ".fwimage"
210 #define GSP_LOGGING_SECTION_NAME           ".fwlogging"
211 #define GSP_SIGNATURE_SECTION_NAME_PREFIX  ".fwsignature_"
212 #define GSP_CC_SIGNATURE_SECTION_NAME_PREFIX  ".fwsignature_cc_"
213 
214 /*!
215  * Index into libosLogDecode array.
216  */
217 enum
218 {
219     LOGIDX_INIT,
220     LOGIDX_INTR,
221     LOGIDX_RM,
222     LOGIDX_SIZE
223 };
224 
225 /*!
226  * LIBOS task logging.
227  */
228 typedef struct
229 {
230     /* Memory for task logging */
231     MEMORY_DESCRIPTOR                  *pTaskLogDescriptor;
232     NvU64                              *pTaskLogBuffer;
233     NvP64                               pTaskLogMappingPriv;
234     NvU64                               id8;
235 } RM_LIBOS_LOG_MEM;
236 
237 /*!
238  * KernelGsp object definition
239  */
240 #ifdef NVOC_KERNEL_GSP_H_PRIVATE_ACCESS_ALLOWED
241 #define PRIVATE_FIELD(x) x
242 #else
243 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
244 #endif
245 struct MESSAGE_QUEUE_COLLECTION;
246 
247 
248 struct KernelGsp {
249     const struct NVOC_RTTI *__nvoc_rtti;
250     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
251     struct IntrService __nvoc_base_IntrService;
252     struct KernelFalcon __nvoc_base_KernelFalcon;
253     struct Object *__nvoc_pbase_Object;
254     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
255     struct IntrService *__nvoc_pbase_IntrService;
256     struct CrashCatEngine *__nvoc_pbase_CrashCatEngine;
257     struct KernelCrashCatEngine *__nvoc_pbase_KernelCrashCatEngine;
258     struct KernelFalcon *__nvoc_pbase_KernelFalcon;
259     struct KernelGsp *__nvoc_pbase_KernelGsp;
260     NV_STATUS (*__kgspConstructEngine__)(struct OBJGPU *, struct KernelGsp *, ENGDESCRIPTOR);
261     void (*__kgspRegisterIntrService__)(struct OBJGPU *, struct KernelGsp *, IntrServiceRecord *);
262     NvU32 (*__kgspServiceInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceServiceInterruptArguments *);
263     void (*__kgspConfigureFalcon__)(struct OBJGPU *, struct KernelGsp *);
264     NvBool (*__kgspIsDebugModeEnabled__)(struct OBJGPU *, struct KernelGsp *);
265     NV_STATUS (*__kgspAllocBootArgs__)(struct OBJGPU *, struct KernelGsp *);
266     void (*__kgspFreeBootArgs__)(struct OBJGPU *, struct KernelGsp *);
267     NV_STATUS (*__kgspBootstrapRiscvOSEarly__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
268     void (*__kgspGetGspRmBootUcodeStorage__)(struct OBJGPU *, struct KernelGsp *, BINDATA_STORAGE **, BINDATA_STORAGE **);
269     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmBoot__)(struct KernelGsp *);
270     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveConcatenatedFMCDesc__)(struct KernelGsp *);
271     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveConcatenatedFMC__)(struct KernelGsp *);
272     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmFmcGfwDebugSigned__)(struct KernelGsp *);
273     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmFmcGfwProdSigned__)(struct KernelGsp *);
274     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__)(struct KernelGsp *);
275     NV_STATUS (*__kgspCalculateFbLayout__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
276     NvU32 (*__kgspGetNonWprHeapSize__)(struct OBJGPU *, struct KernelGsp *);
277     NV_STATUS (*__kgspExecuteSequencerCommand__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32 *, NvU32);
278     NvU32 (*__kgspReadUcodeFuseVersion__)(struct OBJGPU *, struct KernelGsp *, NvU32);
279     NV_STATUS (*__kgspResetHw__)(struct OBJGPU *, struct KernelGsp *);
280     NvBool (*__kgspIsWpr2Up__)(struct OBJGPU *, struct KernelGsp *);
281     NvU32 (*__kgspGetFrtsSize__)(struct OBJGPU *, struct KernelGsp *);
282     NvU64 (*__kgspGetPrescrubbedTopFbSize__)(struct OBJGPU *, struct KernelGsp *);
283     NV_STATUS (*__kgspExtractVbiosFromRom__)(struct OBJGPU *, struct KernelGsp *, KernelGspVbiosImg **);
284     NV_STATUS (*__kgspExecuteFwsecFrts__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, const NvU64);
285     NV_STATUS (*__kgspExecuteFwsecSb__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *);
286     NV_STATUS (*__kgspExecuteScrubberIfNeeded__)(struct OBJGPU *, struct KernelGsp *);
287     NV_STATUS (*__kgspExecuteBooterLoad__)(struct OBJGPU *, struct KernelGsp *, const NvU64);
288     NV_STATUS (*__kgspExecuteBooterUnloadIfNeeded__)(struct OBJGPU *, struct KernelGsp *, const NvU64);
289     NV_STATUS (*__kgspExecuteHsFalcon__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, struct KernelFalcon *, NvU32 *, NvU32 *);
290     NV_STATUS (*__kgspWaitForGfwBootOk__)(struct OBJGPU *, struct KernelGsp *);
291     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveBooterLoadUcode__)(struct KernelGsp *);
292     const BINDATA_ARCHIVE *(*__kgspGetBinArchiveBooterUnloadUcode__)(struct KernelGsp *);
293     NvU64 (*__kgspGetMinWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp *);
294     NvU64 (*__kgspGetMaxWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp *);
295     NV_STATUS (*__kgspInitVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU64, NvU64, NvU64, NvU64);
296     NV_STATUS (*__kgspFreeVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp *, NvU32);
297     const char *(*__kgspGetSignatureSectionNamePrefix__)(struct OBJGPU *, struct KernelGsp *);
298     NV_STATUS (*__kgspSetupGspFmcArgs__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
299     NvBool (*__kgspConfigured__)(struct KernelGsp *);
300     NvU32 (*__kgspPriRead__)(struct KernelGsp *, NvU32);
301     void (*__kgspRegWrite__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32);
302     NvU32 (*__kgspMaskDmemAddr__)(struct OBJGPU *, struct KernelGsp *, NvU32);
303     void (*__kgspStateDestroy__)(POBJGPU, struct KernelGsp *);
304     void (*__kgspVprintf__)(struct KernelGsp *, NvBool, const char *, va_list);
305     NvBool (*__kgspClearInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceClearInterruptArguments *);
306     void (*__kgspPriWrite__)(struct KernelGsp *, NvU32, NvU32);
307     void *(*__kgspMapBufferDescriptor__)(struct KernelGsp *, CrashCatBufferDescriptor *);
308     void (*__kgspSyncBufferDescriptor__)(struct KernelGsp *, CrashCatBufferDescriptor *, NvU32, NvU32);
309     NvU32 (*__kgspRegRead__)(struct OBJGPU *, struct KernelGsp *, NvU32);
310     NvBool (*__kgspIsPresent__)(POBJGPU, struct KernelGsp *);
311     void (*__kgspReadEmem__)(struct KernelGsp *, NvU64, NvU64, void *);
312     NV_STATUS (*__kgspStateLoad__)(POBJGPU, struct KernelGsp *, NvU32);
313     const NvU32 *(*__kgspGetScratchOffsets__)(struct KernelGsp *, NV_CRASHCAT_SCRATCH_GROUP_ID);
314     void (*__kgspUnload__)(struct KernelGsp *);
315     NV_STATUS (*__kgspStateUnload__)(POBJGPU, struct KernelGsp *, NvU32);
316     NV_STATUS (*__kgspServiceNotificationInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceServiceNotificationInterruptArguments *);
317     NvU32 (*__kgspGetWFL0Offset__)(struct KernelGsp *);
318     NV_STATUS (*__kgspStateInitLocked__)(POBJGPU, struct KernelGsp *);
319     NV_STATUS (*__kgspStatePreLoad__)(POBJGPU, struct KernelGsp *, NvU32);
320     NV_STATUS (*__kgspStatePostUnload__)(POBJGPU, struct KernelGsp *, NvU32);
321     NV_STATUS (*__kgspStatePreUnload__)(POBJGPU, struct KernelGsp *, NvU32);
322     NV_STATUS (*__kgspStateInitUnlocked__)(POBJGPU, struct KernelGsp *);
323     void (*__kgspInitMissing__)(POBJGPU, struct KernelGsp *);
324     NV_STATUS (*__kgspStatePreInitLocked__)(POBJGPU, struct KernelGsp *);
325     NV_STATUS (*__kgspStatePreInitUnlocked__)(POBJGPU, struct KernelGsp *);
326     NV_STATUS (*__kgspStatePostLoad__)(POBJGPU, struct KernelGsp *, NvU32);
327     void (*__kgspUnmapBufferDescriptor__)(struct KernelGsp *, CrashCatBufferDescriptor *);
328     void (*__kgspReadDmem__)(struct KernelGsp *, NvU32, NvU32, void *);
329     struct MESSAGE_QUEUE_COLLECTION *pMQCollection;
330     struct OBJRPC *pRpc;
331     struct OBJRPC *pLocklessRpc;
332     char vbiosVersionStr[16];
333     KernelGspFlcnUcode *pFwsecUcode;
334     KernelGspFlcnUcode *pScrubberUcode;
335     KernelGspFlcnUcode *pBooterLoadUcode;
336     KernelGspFlcnUcode *pBooterUnloadUcode;
337     MEMORY_DESCRIPTOR *pWprMetaDescriptor;
338     GspFwWprMeta *pWprMeta;
339     NvP64 pWprMetaMappingPriv;
340     MEMORY_DESCRIPTOR *pSRMetaDescriptor;
341     MEMORY_DESCRIPTOR *pSRRadix3Descriptor;
342     MEMORY_DESCRIPTOR *pGspFmcArgumentsDescriptor;
343     GSP_FMC_BOOT_PARAMS *pGspFmcArgumentsCached;
344     NvP64 pGspFmcArgumentsMappingPriv;
345     MEMORY_DESCRIPTOR *pLibosInitArgumentsDescriptor;
346     LibosMemoryRegionInitArgument *pLibosInitArgumentsCached;
347     NvP64 pLibosInitArgumentsMappingPriv;
348     MEMORY_DESCRIPTOR *pGspArgumentsDescriptor;
349     GSP_ARGUMENTS_CACHED *pGspArgumentsCached;
350     NvP64 pGspArgumentsMappingPriv;
351     MEMORY_DESCRIPTOR *pGspRmBootUcodeMemdesc;
352     NvP64 pGspRmBootUcodeMemdescPriv;
353     NvU32 gspRmBootUcodeSize;
354     NvU8 *pGspRmBootUcodeImage;
355     RM_RISCV_UCODE_DESC *pGspRmBootUcodeDesc;
356     MEMORY_DESCRIPTOR *pGspUCodeRadix3Descriptor;
357     MEMORY_DESCRIPTOR *pSignatureMemdesc;
358     LIBOS_LOG_DECODE logDecode;
359     LIBOS_LOG_DECODE logDecodeVgpuPartition[32];
360     RM_LIBOS_LOG_MEM rmLibosLogMem[3];
361     RM_LIBOS_LOG_MEM gspPluginInitTaskLogMem[32];
362     RM_LIBOS_LOG_MEM gspPluginVgpuTaskLogMem[32];
363     NvBool bHasVgpuLogs;
364     void *pLogElf;
365     NvU64 logElfDataSize;
366     PORT_MUTEX *pNvlogFlushMtx;
367     NvBool bLibosLogsPollingEnabled;
368     NvBool bInInit;
369     NvBool bInLockdown;
370     NvBool bPollingForRpcResponse;
371     NvBool bFatalError;
372     MEMORY_DESCRIPTOR *pMemDesc_simAccessBuf;
373     SimAccessBuffer *pSimAccessBuf;
374     NvP64 pSimAccessBufPriv;
375     MEMORY_DESCRIPTOR *pProfilerSamplesMD;
376     void *pProfilerSamplesMDPriv;
377     void *pProfilerSamples;
378     GspStaticConfigInfo gspStaticInfo;
379     NvBool bIsTaskIsrQueueRequired;
380     NvBool bPartitionedFmc;
381     NvBool bScrubberUcodeSupported;
382     NvU32 fwHeapParamBaseSize;
383     NvU32 fwHeapParamOsCarveoutSize;
384 };
385 
386 #ifndef __NVOC_CLASS_KernelGsp_TYPEDEF__
387 #define __NVOC_CLASS_KernelGsp_TYPEDEF__
388 typedef struct KernelGsp KernelGsp;
389 #endif /* __NVOC_CLASS_KernelGsp_TYPEDEF__ */
390 
391 #ifndef __nvoc_class_id_KernelGsp
392 #define __nvoc_class_id_KernelGsp 0x311d4e
393 #endif /* __nvoc_class_id_KernelGsp */
394 
395 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGsp;
396 
397 #define __staticCast_KernelGsp(pThis) \
398     ((pThis)->__nvoc_pbase_KernelGsp)
399 
400 #ifdef __nvoc_kernel_gsp_h_disabled
401 #define __dynamicCast_KernelGsp(pThis) ((KernelGsp*)NULL)
402 #else //__nvoc_kernel_gsp_h_disabled
403 #define __dynamicCast_KernelGsp(pThis) \
404     ((KernelGsp*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelGsp)))
405 #endif //__nvoc_kernel_gsp_h_disabled
406 
407 #define PDB_PROP_KGSP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
408 #define PDB_PROP_KGSP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
409 
410 NV_STATUS __nvoc_objCreateDynamic_KernelGsp(KernelGsp**, Dynamic*, NvU32, va_list);
411 
412 NV_STATUS __nvoc_objCreate_KernelGsp(KernelGsp**, Dynamic*, NvU32);
413 #define __objCreate_KernelGsp(ppNewObj, pParent, createFlags) \
414     __nvoc_objCreate_KernelGsp((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
415 
416 #define kgspConstructEngine(pGpu, pKernelGsp, arg0) kgspConstructEngine_DISPATCH(pGpu, pKernelGsp, arg0)
417 #define kgspRegisterIntrService(pGpu, pKernelGsp, pRecords) kgspRegisterIntrService_DISPATCH(pGpu, pKernelGsp, pRecords)
418 #define kgspServiceInterrupt(pGpu, pKernelGsp, pParams) kgspServiceInterrupt_DISPATCH(pGpu, pKernelGsp, pParams)
419 #define kgspConfigureFalcon(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp)
420 #define kgspConfigureFalcon_HAL(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp)
421 #define kgspIsDebugModeEnabled(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp)
422 #define kgspIsDebugModeEnabled_HAL(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp)
423 #define kgspAllocBootArgs(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp)
424 #define kgspAllocBootArgs_HAL(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp)
425 #define kgspFreeBootArgs(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp)
426 #define kgspFreeBootArgs_HAL(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp)
427 #define kgspBootstrapRiscvOSEarly(pGpu, pKernelGsp, pGspFw) kgspBootstrapRiscvOSEarly_DISPATCH(pGpu, pKernelGsp, pGspFw)
428 #define kgspBootstrapRiscvOSEarly_HAL(pGpu, pKernelGsp, pGspFw) kgspBootstrapRiscvOSEarly_DISPATCH(pGpu, pKernelGsp, pGspFw)
429 #define kgspGetGspRmBootUcodeStorage(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc)
430 #define kgspGetGspRmBootUcodeStorage_HAL(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc)
431 #define kgspGetBinArchiveGspRmBoot(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp)
432 #define kgspGetBinArchiveGspRmBoot_HAL(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp)
433 #define kgspGetBinArchiveConcatenatedFMCDesc(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp)
434 #define kgspGetBinArchiveConcatenatedFMCDesc_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp)
435 #define kgspGetBinArchiveConcatenatedFMC(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp)
436 #define kgspGetBinArchiveConcatenatedFMC_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp)
437 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp)
438 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp)
439 #define kgspGetBinArchiveGspRmFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp)
440 #define kgspGetBinArchiveGspRmFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp)
441 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp)
442 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp)
443 #define kgspCalculateFbLayout(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw)
444 #define kgspCalculateFbLayout_HAL(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw)
445 #define kgspGetNonWprHeapSize(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp)
446 #define kgspGetNonWprHeapSize_HAL(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp)
447 #define kgspExecuteSequencerCommand(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize)
448 #define kgspExecuteSequencerCommand_HAL(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize)
449 #define kgspReadUcodeFuseVersion(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId)
450 #define kgspReadUcodeFuseVersion_HAL(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId)
451 #define kgspResetHw(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp)
452 #define kgspResetHw_HAL(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp)
453 #define kgspIsWpr2Up(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp)
454 #define kgspIsWpr2Up_HAL(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp)
455 #define kgspGetFrtsSize(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp)
456 #define kgspGetFrtsSize_HAL(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp)
457 #define kgspGetPrescrubbedTopFbSize(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp)
458 #define kgspGetPrescrubbedTopFbSize_HAL(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp)
459 #define kgspExtractVbiosFromRom(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg)
460 #define kgspExtractVbiosFromRom_HAL(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg)
461 #define kgspExecuteFwsecFrts(pGpu, pKernelGsp, pFwsecUcode, frtsOffset) kgspExecuteFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset)
462 #define kgspExecuteFwsecFrts_HAL(pGpu, pKernelGsp, pFwsecUcode, frtsOffset) kgspExecuteFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset)
463 #define kgspExecuteFwsecSb(pGpu, pKernelGsp, pFwsecUcode) kgspExecuteFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode)
464 #define kgspExecuteFwsecSb_HAL(pGpu, pKernelGsp, pFwsecUcode) kgspExecuteFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode)
465 #define kgspExecuteScrubberIfNeeded(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp)
466 #define kgspExecuteScrubberIfNeeded_HAL(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp)
467 #define kgspExecuteBooterLoad(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData)
468 #define kgspExecuteBooterLoad_HAL(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData)
469 #define kgspExecuteBooterUnloadIfNeeded(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData)
470 #define kgspExecuteBooterUnloadIfNeeded_HAL(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData)
471 #define kgspExecuteHsFalcon(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1)
472 #define kgspExecuteHsFalcon_HAL(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1)
473 #define kgspWaitForGfwBootOk(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp)
474 #define kgspWaitForGfwBootOk_HAL(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp)
475 #define kgspGetBinArchiveBooterLoadUcode(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp)
476 #define kgspGetBinArchiveBooterLoadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp)
477 #define kgspGetBinArchiveBooterUnloadUcode(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp)
478 #define kgspGetBinArchiveBooterUnloadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp)
479 #define kgspGetMinWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
480 #define kgspGetMinWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
481 #define kgspGetMaxWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
482 #define kgspGetMaxWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
483 #define kgspInitVgpuPartitionLogging(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize)
484 #define kgspInitVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize)
485 #define kgspFreeVgpuPartitionLogging(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid)
486 #define kgspFreeVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid)
487 #define kgspGetSignatureSectionNamePrefix(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp)
488 #define kgspGetSignatureSectionNamePrefix_HAL(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp)
489 #define kgspSetupGspFmcArgs(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw)
490 #define kgspSetupGspFmcArgs_HAL(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw)
491 #define kgspConfigured(arg0) kgspConfigured_DISPATCH(arg0)
492 #define kgspPriRead(arg0, offset) kgspPriRead_DISPATCH(arg0, offset)
493 #define kgspRegWrite(pGpu, pKernelFlcn, offset, data) kgspRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
494 #define kgspMaskDmemAddr(pGpu, pKernelFlcn, addr) kgspMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
495 #define kgspStateDestroy(pGpu, pEngstate) kgspStateDestroy_DISPATCH(pGpu, pEngstate)
496 #define kgspVprintf(arg0, bReportStart, fmt, args) kgspVprintf_DISPATCH(arg0, bReportStart, fmt, args)
497 #define kgspClearInterrupt(pGpu, pIntrService, pParams) kgspClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
498 #define kgspPriWrite(arg0, offset, data) kgspPriWrite_DISPATCH(arg0, offset, data)
499 #define kgspMapBufferDescriptor(arg0, pBufDesc) kgspMapBufferDescriptor_DISPATCH(arg0, pBufDesc)
500 #define kgspSyncBufferDescriptor(arg0, pBufDesc, offset, size) kgspSyncBufferDescriptor_DISPATCH(arg0, pBufDesc, offset, size)
501 #define kgspRegRead(pGpu, pKernelFlcn, offset) kgspRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
502 #define kgspIsPresent(pGpu, pEngstate) kgspIsPresent_DISPATCH(pGpu, pEngstate)
503 #define kgspReadEmem(arg0, offset, size, pBuf) kgspReadEmem_DISPATCH(arg0, offset, size, pBuf)
504 #define kgspStateLoad(pGpu, pEngstate, arg0) kgspStateLoad_DISPATCH(pGpu, pEngstate, arg0)
505 #define kgspGetScratchOffsets(arg0, scratchGroupId) kgspGetScratchOffsets_DISPATCH(arg0, scratchGroupId)
506 #define kgspUnload(arg0) kgspUnload_DISPATCH(arg0)
507 #define kgspStateUnload(pGpu, pEngstate, arg0) kgspStateUnload_DISPATCH(pGpu, pEngstate, arg0)
508 #define kgspServiceNotificationInterrupt(pGpu, pIntrService, pParams) kgspServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams)
509 #define kgspGetWFL0Offset(arg0) kgspGetWFL0Offset_DISPATCH(arg0)
510 #define kgspStateInitLocked(pGpu, pEngstate) kgspStateInitLocked_DISPATCH(pGpu, pEngstate)
511 #define kgspStatePreLoad(pGpu, pEngstate, arg0) kgspStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
512 #define kgspStatePostUnload(pGpu, pEngstate, arg0) kgspStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
513 #define kgspStatePreUnload(pGpu, pEngstate, arg0) kgspStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
514 #define kgspStateInitUnlocked(pGpu, pEngstate) kgspStateInitUnlocked_DISPATCH(pGpu, pEngstate)
515 #define kgspInitMissing(pGpu, pEngstate) kgspInitMissing_DISPATCH(pGpu, pEngstate)
516 #define kgspStatePreInitLocked(pGpu, pEngstate) kgspStatePreInitLocked_DISPATCH(pGpu, pEngstate)
517 #define kgspStatePreInitUnlocked(pGpu, pEngstate) kgspStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
518 #define kgspStatePostLoad(pGpu, pEngstate, arg0) kgspStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
519 #define kgspUnmapBufferDescriptor(arg0, pBufDesc) kgspUnmapBufferDescriptor_DISPATCH(arg0, pBufDesc)
520 #define kgspReadDmem(arg0, offset, size, pBuf) kgspReadDmem_DISPATCH(arg0, offset, size, pBuf)
521 void kgspProgramLibosBootArgsAddr_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
522 
523 
524 #ifdef __nvoc_kernel_gsp_h_disabled
525 static inline void kgspProgramLibosBootArgsAddr(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
526     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
527 }
528 #else //__nvoc_kernel_gsp_h_disabled
529 #define kgspProgramLibosBootArgsAddr(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr_TU102(pGpu, pKernelGsp)
530 #endif //__nvoc_kernel_gsp_h_disabled
531 
532 #define kgspProgramLibosBootArgsAddr_HAL(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr(pGpu, pKernelGsp)
533 
534 NV_STATUS kgspSetCmdQueueHead_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value);
535 
536 
537 #ifdef __nvoc_kernel_gsp_h_disabled
538 static inline NV_STATUS kgspSetCmdQueueHead(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value) {
539     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
540     return NV_ERR_NOT_SUPPORTED;
541 }
542 #else //__nvoc_kernel_gsp_h_disabled
543 #define kgspSetCmdQueueHead(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead_TU102(pGpu, pKernelGsp, queueIdx, value)
544 #endif //__nvoc_kernel_gsp_h_disabled
545 
546 #define kgspSetCmdQueueHead_HAL(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead(pGpu, pKernelGsp, queueIdx, value)
547 
548 NvBool kgspHealthCheck_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
549 
550 
551 #ifdef __nvoc_kernel_gsp_h_disabled
552 static inline NvBool kgspHealthCheck(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
553     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
554     return NV_FALSE;
555 }
556 #else //__nvoc_kernel_gsp_h_disabled
557 #define kgspHealthCheck(pGpu, pKernelGsp) kgspHealthCheck_TU102(pGpu, pKernelGsp)
558 #endif //__nvoc_kernel_gsp_h_disabled
559 
560 #define kgspHealthCheck_HAL(pGpu, pKernelGsp) kgspHealthCheck(pGpu, pKernelGsp)
561 
562 NvU32 kgspService_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
563 
564 
565 #ifdef __nvoc_kernel_gsp_h_disabled
566 static inline NvU32 kgspService(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
567     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
568     return 0;
569 }
570 #else //__nvoc_kernel_gsp_h_disabled
571 #define kgspService(pGpu, pKernelGsp) kgspService_TU102(pGpu, pKernelGsp)
572 #endif //__nvoc_kernel_gsp_h_disabled
573 
574 #define kgspService_HAL(pGpu, pKernelGsp) kgspService(pGpu, pKernelGsp)
575 
576 NV_STATUS kgspWaitForProcessorSuspend_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
577 
578 
579 #ifdef __nvoc_kernel_gsp_h_disabled
580 static inline NV_STATUS kgspWaitForProcessorSuspend(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
581     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
582     return NV_ERR_NOT_SUPPORTED;
583 }
584 #else //__nvoc_kernel_gsp_h_disabled
585 #define kgspWaitForProcessorSuspend(pGpu, pKernelGsp) kgspWaitForProcessorSuspend_TU102(pGpu, pKernelGsp)
586 #endif //__nvoc_kernel_gsp_h_disabled
587 
588 #define kgspWaitForProcessorSuspend_HAL(pGpu, pKernelGsp) kgspWaitForProcessorSuspend(pGpu, pKernelGsp)
589 
590 NV_STATUS kgspSavePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
591 
592 
593 #ifdef __nvoc_kernel_gsp_h_disabled
594 static inline NV_STATUS kgspSavePowerMgmtState(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
595     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
596     return NV_ERR_NOT_SUPPORTED;
597 }
598 #else //__nvoc_kernel_gsp_h_disabled
599 #define kgspSavePowerMgmtState(pGpu, pKernelGsp) kgspSavePowerMgmtState_TU102(pGpu, pKernelGsp)
600 #endif //__nvoc_kernel_gsp_h_disabled
601 
602 #define kgspSavePowerMgmtState_HAL(pGpu, pKernelGsp) kgspSavePowerMgmtState(pGpu, pKernelGsp)
603 
604 NV_STATUS kgspRestorePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
605 
606 
607 #ifdef __nvoc_kernel_gsp_h_disabled
608 static inline NV_STATUS kgspRestorePowerMgmtState(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
609     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
610     return NV_ERR_NOT_SUPPORTED;
611 }
612 #else //__nvoc_kernel_gsp_h_disabled
613 #define kgspRestorePowerMgmtState(pGpu, pKernelGsp) kgspRestorePowerMgmtState_TU102(pGpu, pKernelGsp)
614 #endif //__nvoc_kernel_gsp_h_disabled
615 
616 #define kgspRestorePowerMgmtState_HAL(pGpu, pKernelGsp) kgspRestorePowerMgmtState(pGpu, pKernelGsp)
617 
618 void kgspFreeSuspendResumeData_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
619 
620 
621 #ifdef __nvoc_kernel_gsp_h_disabled
622 static inline void kgspFreeSuspendResumeData(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
623     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
624 }
625 #else //__nvoc_kernel_gsp_h_disabled
626 #define kgspFreeSuspendResumeData(pGpu, pKernelGsp) kgspFreeSuspendResumeData_TU102(pGpu, pKernelGsp)
627 #endif //__nvoc_kernel_gsp_h_disabled
628 
629 #define kgspFreeSuspendResumeData_HAL(pGpu, pKernelGsp) kgspFreeSuspendResumeData(pGpu, pKernelGsp)
630 
631 NV_STATUS kgspConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg0);
632 
633 static inline NV_STATUS kgspConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg0) {
634     return pKernelGsp->__kgspConstructEngine__(pGpu, pKernelGsp, arg0);
635 }
636 
637 void kgspRegisterIntrService_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[167]);
638 
639 static inline void kgspRegisterIntrService_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[167]) {
640     pKernelGsp->__kgspRegisterIntrService__(pGpu, pKernelGsp, pRecords);
641 }
642 
643 NvU32 kgspServiceInterrupt_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams);
644 
645 static inline NvU32 kgspServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams) {
646     return pKernelGsp->__kgspServiceInterrupt__(pGpu, pKernelGsp, pParams);
647 }
648 
649 void kgspConfigureFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
650 
651 void kgspConfigureFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
652 
653 static inline void kgspConfigureFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
654     pKernelGsp->__kgspConfigureFalcon__(pGpu, pKernelGsp);
655 }
656 
657 NvBool kgspIsDebugModeEnabled_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
658 
659 NvBool kgspIsDebugModeEnabled_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
660 
661 static inline NvBool kgspIsDebugModeEnabled_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
662     return pKernelGsp->__kgspIsDebugModeEnabled__(pGpu, pKernelGsp);
663 }
664 
665 NV_STATUS kgspAllocBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
666 
667 NV_STATUS kgspAllocBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
668 
669 static inline NV_STATUS kgspAllocBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
670     return pKernelGsp->__kgspAllocBootArgs__(pGpu, pKernelGsp);
671 }
672 
673 void kgspFreeBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
674 
675 void kgspFreeBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
676 
677 static inline void kgspFreeBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
678     pKernelGsp->__kgspFreeBootArgs__(pGpu, pKernelGsp);
679 }
680 
681 NV_STATUS kgspBootstrapRiscvOSEarly_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
682 
683 NV_STATUS kgspBootstrapRiscvOSEarly_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
684 
685 NV_STATUS kgspBootstrapRiscvOSEarly_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
686 
687 static inline NV_STATUS kgspBootstrapRiscvOSEarly_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
688     return pKernelGsp->__kgspBootstrapRiscvOSEarly__(pGpu, pKernelGsp, pGspFw);
689 }
690 
691 void kgspGetGspRmBootUcodeStorage_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
692 
693 void kgspGetGspRmBootUcodeStorage_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
694 
695 void kgspGetGspRmBootUcodeStorage_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
696 
697 static inline void kgspGetGspRmBootUcodeStorage_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc) {
698     pKernelGsp->__kgspGetGspRmBootUcodeStorage__(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc);
699 }
700 
701 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_TU102(struct KernelGsp *pKernelGsp);
702 
703 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA100(struct KernelGsp *pKernelGsp);
704 
705 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA102(struct KernelGsp *pKernelGsp);
706 
707 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GH100(struct KernelGsp *pKernelGsp);
708 
709 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_AD102(struct KernelGsp *pKernelGsp);
710 
711 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_DISPATCH(struct KernelGsp *pKernelGsp) {
712     return pKernelGsp->__kgspGetBinArchiveGspRmBoot__(pKernelGsp);
713 }
714 
715 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_GH100(struct KernelGsp *pKernelGsp);
716 
717 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_80f438(struct KernelGsp *pKernelGsp) {
718     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
719 }
720 
721 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(struct KernelGsp *pKernelGsp) {
722     return pKernelGsp->__kgspGetBinArchiveConcatenatedFMCDesc__(pKernelGsp);
723 }
724 
725 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_GH100(struct KernelGsp *pKernelGsp);
726 
727 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_80f438(struct KernelGsp *pKernelGsp) {
728     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
729 }
730 
731 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_DISPATCH(struct KernelGsp *pKernelGsp) {
732     return pKernelGsp->__kgspGetBinArchiveConcatenatedFMC__(pKernelGsp);
733 }
734 
735 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_GH100(struct KernelGsp *pKernelGsp);
736 
737 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_80f438(struct KernelGsp *pKernelGsp) {
738     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
739 }
740 
741 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
742     return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwDebugSigned__(pKernelGsp);
743 }
744 
745 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp);
746 
747 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) {
748     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
749 }
750 
751 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
752     return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwProdSigned__(pKernelGsp);
753 }
754 
755 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp);
756 
757 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) {
758     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
759 }
760 
761 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
762     return pKernelGsp->__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__(pKernelGsp);
763 }
764 
765 NV_STATUS kgspCalculateFbLayout_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
766 
767 NV_STATUS kgspCalculateFbLayout_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
768 
769 static inline NV_STATUS kgspCalculateFbLayout_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
770     return pKernelGsp->__kgspCalculateFbLayout__(pGpu, pKernelGsp, pGspFw);
771 }
772 
773 static inline NvU32 kgspGetNonWprHeapSize_ed6b8b(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
774     return 1048576;
775 }
776 
777 static inline NvU32 kgspGetNonWprHeapSize_d505ea(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
778     return 2097152;
779 }
780 
781 static inline NvU32 kgspGetNonWprHeapSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
782     return pKernelGsp->__kgspGetNonWprHeapSize__(pGpu, pKernelGsp);
783 }
784 
785 NV_STATUS kgspExecuteSequencerCommand_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize);
786 
787 NV_STATUS kgspExecuteSequencerCommand_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize);
788 
789 static inline NV_STATUS kgspExecuteSequencerCommand_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize) {
790     return pKernelGsp->__kgspExecuteSequencerCommand__(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize);
791 }
792 
793 static inline NvU32 kgspReadUcodeFuseVersion_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
794     return 0;
795 }
796 
797 NvU32 kgspReadUcodeFuseVersion_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId);
798 
799 static inline NvU32 kgspReadUcodeFuseVersion_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
800     return pKernelGsp->__kgspReadUcodeFuseVersion__(pGpu, pKernelGsp, ucodeId);
801 }
802 
803 NV_STATUS kgspResetHw_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
804 
805 NV_STATUS kgspResetHw_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
806 
807 static inline NV_STATUS kgspResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
808     return pKernelGsp->__kgspResetHw__(pGpu, pKernelGsp);
809 }
810 
811 NvBool kgspIsWpr2Up_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
812 
813 NvBool kgspIsWpr2Up_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
814 
815 static inline NvBool kgspIsWpr2Up_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
816     return pKernelGsp->__kgspIsWpr2Up__(pGpu, pKernelGsp);
817 }
818 
819 NvU32 kgspGetFrtsSize_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
820 
821 static inline NvU32 kgspGetFrtsSize_4a4dee(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
822     return 0;
823 }
824 
825 static inline NvU32 kgspGetFrtsSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
826     return pKernelGsp->__kgspGetFrtsSize__(pGpu, pKernelGsp);
827 }
828 
829 static inline NvU64 kgspGetPrescrubbedTopFbSize_e1e623(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
830     return 256 * 1024 * 1024;
831 }
832 
833 static inline NvU64 kgspGetPrescrubbedTopFbSize_604eb7(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
834     return (+18446744073709551615ULL);
835 }
836 
837 static inline NvU64 kgspGetPrescrubbedTopFbSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
838     return pKernelGsp->__kgspGetPrescrubbedTopFbSize__(pGpu, pKernelGsp);
839 }
840 
841 NV_STATUS kgspExtractVbiosFromRom_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg);
842 
843 static inline NV_STATUS kgspExtractVbiosFromRom_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
844     return NV_ERR_NOT_SUPPORTED;
845 }
846 
847 static inline NV_STATUS kgspExtractVbiosFromRom_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
848     return pKernelGsp->__kgspExtractVbiosFromRom__(pGpu, pKernelGsp, ppVbiosImg);
849 }
850 
851 NV_STATUS kgspExecuteFwsecFrts_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset);
852 
853 static inline NV_STATUS kgspExecuteFwsecFrts_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset) {
854     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
855 }
856 
857 static inline NV_STATUS kgspExecuteFwsecFrts_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset) {
858     return pKernelGsp->__kgspExecuteFwsecFrts__(pGpu, pKernelGsp, pFwsecUcode, frtsOffset);
859 }
860 
861 NV_STATUS kgspExecuteFwsecSb_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode);
862 
863 static inline NV_STATUS kgspExecuteFwsecSb_ac1694(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode) {
864     return NV_OK;
865 }
866 
867 static inline NV_STATUS kgspExecuteFwsecSb_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode) {
868     return pKernelGsp->__kgspExecuteFwsecSb__(pGpu, pKernelGsp, pFwsecUcode);
869 }
870 
871 NV_STATUS kgspExecuteScrubberIfNeeded_AD102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
872 
873 static inline NV_STATUS kgspExecuteScrubberIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
874     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
875 }
876 
877 static inline NV_STATUS kgspExecuteScrubberIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
878     return pKernelGsp->__kgspExecuteScrubberIfNeeded__(pGpu, pKernelGsp);
879 }
880 
881 NV_STATUS kgspExecuteBooterLoad_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData);
882 
883 static inline NV_STATUS kgspExecuteBooterLoad_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) {
884     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
885 }
886 
887 static inline NV_STATUS kgspExecuteBooterLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) {
888     return pKernelGsp->__kgspExecuteBooterLoad__(pGpu, pKernelGsp, sysmemAddrOfData);
889 }
890 
891 NV_STATUS kgspExecuteBooterUnloadIfNeeded_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData);
892 
893 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) {
894     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
895 }
896 
897 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) {
898     return pKernelGsp->__kgspExecuteBooterUnloadIfNeeded__(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData);
899 }
900 
901 NV_STATUS kgspExecuteHsFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1);
902 
903 NV_STATUS kgspExecuteHsFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1);
904 
905 static inline NV_STATUS kgspExecuteHsFalcon_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) {
906     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
907 }
908 
909 static inline NV_STATUS kgspExecuteHsFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) {
910     return pKernelGsp->__kgspExecuteHsFalcon__(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1);
911 }
912 
913 NV_STATUS kgspWaitForGfwBootOk_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
914 
915 NV_STATUS kgspWaitForGfwBootOk_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
916 
917 static inline NV_STATUS kgspWaitForGfwBootOk_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
918     return pKernelGsp->__kgspWaitForGfwBootOk__(pGpu, pKernelGsp);
919 }
920 
921 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU102(struct KernelGsp *pKernelGsp);
922 
923 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU116(struct KernelGsp *pKernelGsp);
924 
925 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA100(struct KernelGsp *pKernelGsp);
926 
927 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA102(struct KernelGsp *pKernelGsp);
928 
929 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_AD102(struct KernelGsp *pKernelGsp);
930 
931 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_80f438(struct KernelGsp *pKernelGsp) {
932     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
933 }
934 
935 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_DISPATCH(struct KernelGsp *pKernelGsp) {
936     return pKernelGsp->__kgspGetBinArchiveBooterLoadUcode__(pKernelGsp);
937 }
938 
939 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU102(struct KernelGsp *pKernelGsp);
940 
941 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU116(struct KernelGsp *pKernelGsp);
942 
943 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA100(struct KernelGsp *pKernelGsp);
944 
945 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA102(struct KernelGsp *pKernelGsp);
946 
947 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_AD102(struct KernelGsp *pKernelGsp);
948 
949 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_80f438(struct KernelGsp *pKernelGsp) {
950     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
951 }
952 
953 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_DISPATCH(struct KernelGsp *pKernelGsp) {
954     return pKernelGsp->__kgspGetBinArchiveBooterUnloadUcode__(pKernelGsp);
955 }
956 
957 static inline NvU64 kgspGetMinWprHeapSizeMB_7185bf(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
958     return (64U);
959 }
960 
961 static inline NvU64 kgspGetMinWprHeapSizeMB_907c84(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
962     return pGpu->bVgpuGspPluginOffloadEnabled ? (549U) : (84U);
963 }
964 
965 static inline NvU64 kgspGetMinWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
966     return pKernelGsp->__kgspGetMinWprHeapSizeMB__(pGpu, pKernelGsp);
967 }
968 
969 static inline NvU64 kgspGetMaxWprHeapSizeMB_ad4e6a(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
970     return (256U);
971 }
972 
973 static inline NvU64 kgspGetMaxWprHeapSizeMB_5839e2(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
974     return pGpu->bVgpuGspPluginOffloadEnabled ? (1024U) : (276U);
975 }
976 
977 static inline NvU64 kgspGetMaxWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
978     return pKernelGsp->__kgspGetMaxWprHeapSizeMB__(pGpu, pKernelGsp);
979 }
980 
981 static inline NV_STATUS kgspInitVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize) {
982     return NV_ERR_NOT_SUPPORTED;
983 }
984 
985 NV_STATUS kgspInitVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize);
986 
987 static inline NV_STATUS kgspInitVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize) {
988     return pKernelGsp->__kgspInitVgpuPartitionLogging__(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize);
989 }
990 
991 static inline NV_STATUS kgspFreeVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) {
992     return NV_ERR_NOT_SUPPORTED;
993 }
994 
995 NV_STATUS kgspFreeVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid);
996 
997 static inline NV_STATUS kgspFreeVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) {
998     return pKernelGsp->__kgspFreeVgpuPartitionLogging__(pGpu, pKernelGsp, gfid);
999 }
1000 
1001 const char *kgspGetSignatureSectionNamePrefix_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1002 
1003 static inline const char *kgspGetSignatureSectionNamePrefix_789efb(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1004     return ".fwsignature_";
1005 }
1006 
1007 static inline const char *kgspGetSignatureSectionNamePrefix_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1008     return pKernelGsp->__kgspGetSignatureSectionNamePrefix__(pGpu, pKernelGsp);
1009 }
1010 
1011 NV_STATUS kgspSetupGspFmcArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1012 
1013 static inline NV_STATUS kgspSetupGspFmcArgs_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1014     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1015 }
1016 
1017 static inline NV_STATUS kgspSetupGspFmcArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1018     return pKernelGsp->__kgspSetupGspFmcArgs__(pGpu, pKernelGsp, pGspFw);
1019 }
1020 
1021 static inline NvBool kgspConfigured_DISPATCH(struct KernelGsp *arg0) {
1022     return arg0->__kgspConfigured__(arg0);
1023 }
1024 
1025 static inline NvU32 kgspPriRead_DISPATCH(struct KernelGsp *arg0, NvU32 offset) {
1026     return arg0->__kgspPriRead__(arg0, offset);
1027 }
1028 
1029 static inline void kgspRegWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 offset, NvU32 data) {
1030     pKernelFlcn->__kgspRegWrite__(pGpu, pKernelFlcn, offset, data);
1031 }
1032 
1033 static inline NvU32 kgspMaskDmemAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 addr) {
1034     return pKernelFlcn->__kgspMaskDmemAddr__(pGpu, pKernelFlcn, addr);
1035 }
1036 
1037 static inline void kgspStateDestroy_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1038     pEngstate->__kgspStateDestroy__(pGpu, pEngstate);
1039 }
1040 
1041 static inline void kgspVprintf_DISPATCH(struct KernelGsp *arg0, NvBool bReportStart, const char *fmt, va_list args) {
1042     arg0->__kgspVprintf__(arg0, bReportStart, fmt, args);
1043 }
1044 
1045 static inline NvBool kgspClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceClearInterruptArguments *pParams) {
1046     return pIntrService->__kgspClearInterrupt__(pGpu, pIntrService, pParams);
1047 }
1048 
1049 static inline void kgspPriWrite_DISPATCH(struct KernelGsp *arg0, NvU32 offset, NvU32 data) {
1050     arg0->__kgspPriWrite__(arg0, offset, data);
1051 }
1052 
1053 static inline void *kgspMapBufferDescriptor_DISPATCH(struct KernelGsp *arg0, CrashCatBufferDescriptor *pBufDesc) {
1054     return arg0->__kgspMapBufferDescriptor__(arg0, pBufDesc);
1055 }
1056 
1057 static inline void kgspSyncBufferDescriptor_DISPATCH(struct KernelGsp *arg0, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) {
1058     arg0->__kgspSyncBufferDescriptor__(arg0, pBufDesc, offset, size);
1059 }
1060 
1061 static inline NvU32 kgspRegRead_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 offset) {
1062     return pKernelFlcn->__kgspRegRead__(pGpu, pKernelFlcn, offset);
1063 }
1064 
1065 static inline NvBool kgspIsPresent_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1066     return pEngstate->__kgspIsPresent__(pGpu, pEngstate);
1067 }
1068 
1069 static inline void kgspReadEmem_DISPATCH(struct KernelGsp *arg0, NvU64 offset, NvU64 size, void *pBuf) {
1070     arg0->__kgspReadEmem__(arg0, offset, size, pBuf);
1071 }
1072 
1073 static inline NV_STATUS kgspStateLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1074     return pEngstate->__kgspStateLoad__(pGpu, pEngstate, arg0);
1075 }
1076 
1077 static inline const NvU32 *kgspGetScratchOffsets_DISPATCH(struct KernelGsp *arg0, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) {
1078     return arg0->__kgspGetScratchOffsets__(arg0, scratchGroupId);
1079 }
1080 
1081 static inline void kgspUnload_DISPATCH(struct KernelGsp *arg0) {
1082     arg0->__kgspUnload__(arg0);
1083 }
1084 
1085 static inline NV_STATUS kgspStateUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1086     return pEngstate->__kgspStateUnload__(pGpu, pEngstate, arg0);
1087 }
1088 
1089 static inline NV_STATUS kgspServiceNotificationInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) {
1090     return pIntrService->__kgspServiceNotificationInterrupt__(pGpu, pIntrService, pParams);
1091 }
1092 
1093 static inline NvU32 kgspGetWFL0Offset_DISPATCH(struct KernelGsp *arg0) {
1094     return arg0->__kgspGetWFL0Offset__(arg0);
1095 }
1096 
1097 static inline NV_STATUS kgspStateInitLocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1098     return pEngstate->__kgspStateInitLocked__(pGpu, pEngstate);
1099 }
1100 
1101 static inline NV_STATUS kgspStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1102     return pEngstate->__kgspStatePreLoad__(pGpu, pEngstate, arg0);
1103 }
1104 
1105 static inline NV_STATUS kgspStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1106     return pEngstate->__kgspStatePostUnload__(pGpu, pEngstate, arg0);
1107 }
1108 
1109 static inline NV_STATUS kgspStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1110     return pEngstate->__kgspStatePreUnload__(pGpu, pEngstate, arg0);
1111 }
1112 
1113 static inline NV_STATUS kgspStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1114     return pEngstate->__kgspStateInitUnlocked__(pGpu, pEngstate);
1115 }
1116 
1117 static inline void kgspInitMissing_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1118     pEngstate->__kgspInitMissing__(pGpu, pEngstate);
1119 }
1120 
1121 static inline NV_STATUS kgspStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1122     return pEngstate->__kgspStatePreInitLocked__(pGpu, pEngstate);
1123 }
1124 
1125 static inline NV_STATUS kgspStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1126     return pEngstate->__kgspStatePreInitUnlocked__(pGpu, pEngstate);
1127 }
1128 
1129 static inline NV_STATUS kgspStatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1130     return pEngstate->__kgspStatePostLoad__(pGpu, pEngstate, arg0);
1131 }
1132 
1133 static inline void kgspUnmapBufferDescriptor_DISPATCH(struct KernelGsp *arg0, CrashCatBufferDescriptor *pBufDesc) {
1134     arg0->__kgspUnmapBufferDescriptor__(arg0, pBufDesc);
1135 }
1136 
1137 static inline void kgspReadDmem_DISPATCH(struct KernelGsp *arg0, NvU32 offset, NvU32 size, void *pBuf) {
1138     arg0->__kgspReadDmem__(arg0, offset, size, pBuf);
1139 }
1140 
1141 void kgspDestruct_IMPL(struct KernelGsp *pKernelGsp);
1142 
1143 #define __nvoc_kgspDestruct(pKernelGsp) kgspDestruct_IMPL(pKernelGsp)
1144 void kgspPopulateGspRmInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs);
1145 
1146 #ifdef __nvoc_kernel_gsp_h_disabled
1147 static inline void kgspPopulateGspRmInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs) {
1148     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1149 }
1150 #else //__nvoc_kernel_gsp_h_disabled
1151 #define kgspPopulateGspRmInitArgs(pGpu, pKernelGsp, pGspSrInitArgs) kgspPopulateGspRmInitArgs_IMPL(pGpu, pKernelGsp, pGspSrInitArgs)
1152 #endif //__nvoc_kernel_gsp_h_disabled
1153 
1154 NV_STATUS kgspInitRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1155 
1156 #ifdef __nvoc_kernel_gsp_h_disabled
1157 static inline NV_STATUS kgspInitRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1158     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1159     return NV_ERR_NOT_SUPPORTED;
1160 }
1161 #else //__nvoc_kernel_gsp_h_disabled
1162 #define kgspInitRm(pGpu, pKernelGsp, pGspFw) kgspInitRm_IMPL(pGpu, pKernelGsp, pGspFw)
1163 #endif //__nvoc_kernel_gsp_h_disabled
1164 
1165 NV_STATUS kgspCreateRadix3_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData);
1166 
1167 #ifdef __nvoc_kernel_gsp_h_disabled
1168 static inline NV_STATUS kgspCreateRadix3(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData) {
1169     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1170     return NV_ERR_NOT_SUPPORTED;
1171 }
1172 #else //__nvoc_kernel_gsp_h_disabled
1173 #define kgspCreateRadix3(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData) kgspCreateRadix3_IMPL(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData)
1174 #endif //__nvoc_kernel_gsp_h_disabled
1175 
1176 NV_STATUS kgspUnloadRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1177 
1178 #ifdef __nvoc_kernel_gsp_h_disabled
1179 static inline NV_STATUS kgspUnloadRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1180     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1181     return NV_ERR_NOT_SUPPORTED;
1182 }
1183 #else //__nvoc_kernel_gsp_h_disabled
1184 #define kgspUnloadRm(pGpu, pKernelGsp) kgspUnloadRm_IMPL(pGpu, pKernelGsp)
1185 #endif //__nvoc_kernel_gsp_h_disabled
1186 
1187 NV_STATUS kgspPrepareBootBinaryImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1188 
1189 #ifdef __nvoc_kernel_gsp_h_disabled
1190 static inline NV_STATUS kgspPrepareBootBinaryImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1191     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1192     return NV_ERR_NOT_SUPPORTED;
1193 }
1194 #else //__nvoc_kernel_gsp_h_disabled
1195 #define kgspPrepareBootBinaryImage(pGpu, pKernelGsp) kgspPrepareBootBinaryImage_IMPL(pGpu, pKernelGsp)
1196 #endif //__nvoc_kernel_gsp_h_disabled
1197 
1198 NvU64 kgspGetFwHeapSize_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize);
1199 
1200 #ifdef __nvoc_kernel_gsp_h_disabled
1201 static inline NvU64 kgspGetFwHeapSize(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize) {
1202     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1203     return 0;
1204 }
1205 #else //__nvoc_kernel_gsp_h_disabled
1206 #define kgspGetFwHeapSize(pGpu, pKernelGsp, posteriorFbSize) kgspGetFwHeapSize_IMPL(pGpu, pKernelGsp, posteriorFbSize)
1207 #endif //__nvoc_kernel_gsp_h_disabled
1208 
1209 void kgspSetupLibosInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1210 
1211 #ifdef __nvoc_kernel_gsp_h_disabled
1212 static inline void kgspSetupLibosInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1213     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1214 }
1215 #else //__nvoc_kernel_gsp_h_disabled
1216 #define kgspSetupLibosInitArgs(pGpu, pKernelGsp) kgspSetupLibosInitArgs_IMPL(pGpu, pKernelGsp)
1217 #endif //__nvoc_kernel_gsp_h_disabled
1218 
1219 void kgspRpcRecvEvents_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1220 
1221 #ifdef __nvoc_kernel_gsp_h_disabled
1222 static inline void kgspRpcRecvEvents(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1223     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1224 }
1225 #else //__nvoc_kernel_gsp_h_disabled
1226 #define kgspRpcRecvEvents(pGpu, pKernelGsp) kgspRpcRecvEvents_IMPL(pGpu, pKernelGsp)
1227 #endif //__nvoc_kernel_gsp_h_disabled
1228 
1229 NV_STATUS kgspWaitForRmInitDone_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1230 
1231 #ifdef __nvoc_kernel_gsp_h_disabled
1232 static inline NV_STATUS kgspWaitForRmInitDone(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1233     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1234     return NV_ERR_NOT_SUPPORTED;
1235 }
1236 #else //__nvoc_kernel_gsp_h_disabled
1237 #define kgspWaitForRmInitDone(pGpu, pKernelGsp) kgspWaitForRmInitDone_IMPL(pGpu, pKernelGsp)
1238 #endif //__nvoc_kernel_gsp_h_disabled
1239 
1240 NV_STATUS kgspStartLogPolling_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1241 
1242 #ifdef __nvoc_kernel_gsp_h_disabled
1243 static inline NV_STATUS kgspStartLogPolling(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1244     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1245     return NV_ERR_NOT_SUPPORTED;
1246 }
1247 #else //__nvoc_kernel_gsp_h_disabled
1248 #define kgspStartLogPolling(pGpu, pKernelGsp) kgspStartLogPolling_IMPL(pGpu, pKernelGsp)
1249 #endif //__nvoc_kernel_gsp_h_disabled
1250 
1251 void kgspDumpGspLogs_IMPL(struct KernelGsp *pKernelGsp, NvBool arg0);
1252 
1253 #ifdef __nvoc_kernel_gsp_h_disabled
1254 static inline void kgspDumpGspLogs(struct KernelGsp *pKernelGsp, NvBool arg0) {
1255     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1256 }
1257 #else //__nvoc_kernel_gsp_h_disabled
1258 #define kgspDumpGspLogs(pKernelGsp, arg0) kgspDumpGspLogs_IMPL(pKernelGsp, arg0)
1259 #endif //__nvoc_kernel_gsp_h_disabled
1260 
1261 void kgspDumpGspLogsUnlocked_IMPL(struct KernelGsp *pKernelGsp, NvBool arg0);
1262 
1263 #ifdef __nvoc_kernel_gsp_h_disabled
1264 static inline void kgspDumpGspLogsUnlocked(struct KernelGsp *pKernelGsp, NvBool arg0) {
1265     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1266 }
1267 #else //__nvoc_kernel_gsp_h_disabled
1268 #define kgspDumpGspLogsUnlocked(pKernelGsp, arg0) kgspDumpGspLogsUnlocked_IMPL(pKernelGsp, arg0)
1269 #endif //__nvoc_kernel_gsp_h_disabled
1270 
1271 NV_STATUS kgspExecuteSequencerBuffer_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams);
1272 
1273 #ifdef __nvoc_kernel_gsp_h_disabled
1274 static inline NV_STATUS kgspExecuteSequencerBuffer(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams) {
1275     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1276     return NV_ERR_NOT_SUPPORTED;
1277 }
1278 #else //__nvoc_kernel_gsp_h_disabled
1279 #define kgspExecuteSequencerBuffer(pGpu, pKernelGsp, pRunCpuSeqParams) kgspExecuteSequencerBuffer_IMPL(pGpu, pKernelGsp, pRunCpuSeqParams)
1280 #endif //__nvoc_kernel_gsp_h_disabled
1281 
1282 NV_STATUS kgspParseFwsecUcodeFromVbiosImg_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined);
1283 
1284 #ifdef __nvoc_kernel_gsp_h_disabled
1285 static inline NV_STATUS kgspParseFwsecUcodeFromVbiosImg(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined) {
1286     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1287     return NV_ERR_NOT_SUPPORTED;
1288 }
1289 #else //__nvoc_kernel_gsp_h_disabled
1290 #define kgspParseFwsecUcodeFromVbiosImg(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined) kgspParseFwsecUcodeFromVbiosImg_IMPL(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined)
1291 #endif //__nvoc_kernel_gsp_h_disabled
1292 
1293 NV_STATUS kgspAllocateScrubberUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode);
1294 
1295 #ifdef __nvoc_kernel_gsp_h_disabled
1296 static inline NV_STATUS kgspAllocateScrubberUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode) {
1297     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1298     return NV_ERR_NOT_SUPPORTED;
1299 }
1300 #else //__nvoc_kernel_gsp_h_disabled
1301 #define kgspAllocateScrubberUcodeImage(pGpu, pKernelGsp, ppScrubberUcode) kgspAllocateScrubberUcodeImage_IMPL(pGpu, pKernelGsp, ppScrubberUcode)
1302 #endif //__nvoc_kernel_gsp_h_disabled
1303 
1304 NV_STATUS kgspAllocateBooterLoadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode);
1305 
1306 #ifdef __nvoc_kernel_gsp_h_disabled
1307 static inline NV_STATUS kgspAllocateBooterLoadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode) {
1308     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1309     return NV_ERR_NOT_SUPPORTED;
1310 }
1311 #else //__nvoc_kernel_gsp_h_disabled
1312 #define kgspAllocateBooterLoadUcodeImage(pGpu, pKernelGsp, ppBooterLoadUcode) kgspAllocateBooterLoadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterLoadUcode)
1313 #endif //__nvoc_kernel_gsp_h_disabled
1314 
1315 NV_STATUS kgspAllocateBooterUnloadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode);
1316 
1317 #ifdef __nvoc_kernel_gsp_h_disabled
1318 static inline NV_STATUS kgspAllocateBooterUnloadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode) {
1319     NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1320     return NV_ERR_NOT_SUPPORTED;
1321 }
1322 #else //__nvoc_kernel_gsp_h_disabled
1323 #define kgspAllocateBooterUnloadUcodeImage(pGpu, pKernelGsp, ppBooterUnloadUcode) kgspAllocateBooterUnloadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterUnloadUcode)
1324 #endif //__nvoc_kernel_gsp_h_disabled
1325 
1326 #undef PRIVATE_FIELD
1327 
1328 
1329 NV_STATUS rpcRmApiControl_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject,
1330                               NvU32 cmd, void *pParamStructPtr, NvU32 paramsSize);
1331 NV_STATUS rpcRmApiAlloc_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent,
1332                             NvHandle hObject, NvU32 hClass, void *pAllocParams, NvU32 allocParamsSize);
1333 NV_STATUS rpcRmApiDupObject_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent, NvHandle *phObject,
1334                                 NvHandle hClientSrc, NvHandle hObjectSrc, NvU32 flags);
1335 NV_STATUS rpcRmApiFree_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject);
1336 
1337 /* Free a KernelGspVbiosImg structure */
1338 void kgspFreeVbiosImg(KernelGspVbiosImg *pVbiosImg);
1339 /* Free a KernelGspFlcnUcode structure */
1340 void kgspFreeFlcnUcode(KernelGspFlcnUcode *pFlcnUcode);
1341 
1342 #endif // KERNEL_GSP_H
1343 
1344 #ifdef __cplusplus
1345 } // extern "C"
1346 #endif
1347 #endif // _G_KERNEL_GSP_NVOC_H_
1348