1 #ifndef _G_KERNEL_GSP_NVOC_H_
2 #define _G_KERNEL_GSP_NVOC_H_
3 #include "nvoc/runtime.h"
4
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8
9 /*
10 * SPDX-FileCopyrightText: Copyright (c) 2017-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11 * SPDX-License-Identifier: MIT
12 *
13 * Permission is hereby granted, free of charge, to any person obtaining a
14 * copy of this software and associated documentation files (the "Software"),
15 * to deal in the Software without restriction, including without limitation
16 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17 * and/or sell copies of the Software, and to permit persons to whom the
18 * Software is furnished to do so, subject to the following conditions:
19 *
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29 * DEALINGS IN THE SOFTWARE.
30 */
31
32 #include "g_kernel_gsp_nvoc.h"
33
34 #ifndef KERNEL_GSP_H
35 #define KERNEL_GSP_H
36
37 /*!
38 * This file provides definitions for all KernelGsp data structures
39 * and interfaces. KernelGsp is responsible for initiating the boot
40 * of RM on the GSP core (GSP-RM) and helps facilitate communication
41 * between Kernel RM and GSP-RM.
42 */
43
44 #include "core/core.h"
45 #include "core/bin_data.h"
46 #include "gpu/eng_state.h"
47 #include "gpu/intr/intr_service.h"
48 #include "gpu/falcon/kernel_falcon.h"
49 #include "gpu/gsp/gsp_static_config.h"
50 #include "gpu/gsp/gsp_init_args.h"
51 #include "gpu/gsp/gsp_fw_heap.h"
52 #include "nv-firmware.h"
53 #include "nv_sriov_defines.h"
54 #include "rmRiscvUcode.h"
55
56 #include "libos_init_args.h"
57 #include "gsp_fw_wpr_meta.h"
58 #include "gsp_fw_sr_meta.h"
59 #include "liblogdecode.h"
60
61 /*!
62 * Forward declarations
63 */
64 typedef struct SimAccessBuffer SimAccessBuffer;
65 typedef struct GSP_FMC_BOOT_PARAMS GSP_FMC_BOOT_PARAMS;
66
67 /*!
68 * Structure for VBIOS image for early FRTS.
69 */
70 typedef struct KernelGspVbiosImg
71 {
72 NvU8 *pImage;
73 NvU32 biosSize;
74 NvU32 expansionRomOffset;
75 } KernelGspVbiosImg;
76
77 /*!
78 * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that
79 * loads directly without the generic falcon bootloader.
80 */
81 typedef struct KernelGspFlcnUcodeBootDirect
82 {
83 NvU8 *pImage;
84 NvU32 size;
85
86 NvU32 imemSize;
87 NvU32 imemNsSize;
88 NvU32 imemNsPa;
89 NvU32 imemSecSize;
90 NvU32 imemSecPa;
91
92 NvU32 dataOffset;
93 NvU32 dmemSize;
94 NvU32 dmemPa;
95 } KernelGspFlcnUcodeBootDirect;
96
97 /*!
98 * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that
99 * loads via the generic falcon bootloader.
100 */
101 typedef struct KernelGspFlcnUcodeBootWithLoader
102 {
103 MEMORY_DESCRIPTOR *pCodeMemDesc;
104 MEMORY_DESCRIPTOR *pDataMemDesc;
105
106 NvU32 codeOffset;
107 NvU32 imemSize;
108 NvU32 imemNsSize;
109 NvU32 imemNsPa;
110 NvU32 imemSecSize;
111 NvU32 imemSecPa;
112 NvU32 codeEntry;
113
114 NvU32 dataOffset;
115 NvU32 dmemSize;
116 NvU32 dmemPa;
117
118 // Extra fields used for falcon ucodes from VBIOS
119 NvU32 interfaceOffset;
120 } KernelGspFlcnUcodeBootWithLoader;
121
122 /*!
123 * Variant of KernelGspFlcnUcode representing a Boot-from-HS ucode.
124 */
125 typedef struct KernelGspFlcnUcodeBootFromHs
126 {
127 MEMORY_DESCRIPTOR *pUcodeMemDesc;
128 NvU32 size;
129
130 NvU32 codeOffset;
131 NvU32 imemSize;
132 NvU32 imemPa;
133 NvU32 imemVa;
134
135 NvU32 dataOffset;
136 NvU32 dmemSize;
137 NvU32 dmemPa;
138 NvU32 dmemVa;
139
140 NvU32 hsSigDmemAddr;
141 NvU32 ucodeId;
142 NvU32 engineIdMask;
143
144 // Extra fields used for falcon ucodes from VBIOS
145 NvU32 *pSignatures;
146 NvU32 signaturesTotalSize; // size of buffer pointed by pSignatures
147 NvU32 sigSize; // size of one signature
148 NvU32 sigCount;
149
150 NvU32 vbiosSigVersions;
151 NvU32 interfaceOffset;
152 } KernelGspFlcnUcodeBootFromHs;
153
154 /*!
155 * Type of KernelGspFlcnUcode. Used as tag in tagged union KernelGspFlcnUcode.
156 * Affects how the ucode is loaded/booted.
157 */
158 typedef enum KernelGspFlcnUcodeBootType
159 {
160 KGSP_FLCN_UCODE_BOOT_DIRECT,
161 KGSP_FLCN_UCODE_BOOT_WITH_LOADER,
162 KGSP_FLCN_UCODE_BOOT_FROM_HS
163 } KernelGspFlcnUcodeBootType;
164
165 /*!
166 * RPC processing trigger
167 */
168 typedef enum KernelGspRpcEventHandlerContext
169 {
170 KGSP_RPC_EVENT_HANDLER_CONTEXT_POLL, // called after issuing an RPC
171 KGSP_RPC_EVENT_HANDLER_CONTEXT_POLL_BOOTUP, // called from kgspWaitForRmInitDone
172 KGSP_RPC_EVENT_HANDLER_CONTEXT_INTERRUPT // called in bottom-half interrupt path
173 } KernelGspRpcEventHandlerContext;
174
175 /*!
176 * Tagged union of falcon ucode variants used by early FRTS and GSP-RM boot.
177 */
178 typedef struct KernelGspFlcnUcode
179 {
180 KernelGspFlcnUcodeBootType bootType;
181 union
182 {
183 KernelGspFlcnUcodeBootDirect ucodeBootDirect;
184 KernelGspFlcnUcodeBootWithLoader ucodeBootWithLoader;
185 KernelGspFlcnUcodeBootFromHs ucodeBootFromHs;
186 };
187 } KernelGspFlcnUcode;
188
189 /*!
190 * Structure for used for executing a FWSEC command
191 */
192 typedef struct KernelGspPreparedFwsecCmd
193 {
194 KernelGspFlcnUcode *pFwsecUcode;
195 NvU32 cmd;
196 NvU64 frtsOffset;
197 } KernelGspPreparedFwsecCmd;
198
199 /*!
200 * GSP-RM source when running in Emulated/Simulated RISCV environment is
201 * extremely slow, so we need a factor (X) to scale timeouts by.
202 */
203 #define GSP_SCALE_TIMEOUT_EMU_SIM 2500
204
205 /*!
206 * Size of libos init arguments packet.
207 */
208 #define LIBOS_INIT_ARGUMENTS_SIZE 0x1000
209
210 /*!
211 * Structure for passing GSP-RM firmware data
212 */
213 typedef struct GSP_FIRMWARE
214 {
215 const void *pBuf; // buffer holding the firmware (ucode)
216 NvU32 size; // size of the firmware
217 const void *pImageData; // points to the GSP FW image start inside the pBuf buffer
218 NvU64 imageSize; // GSP FW image size inside the pBuf buffer
219 const void *pSignatureData; // points to the GSP FW signature start inside the pBuf buffer
220 NvU64 signatureSize; // GSP FW signature size inside the pBuf buffer
221 const void *pLogElf; // firmware logging section and symbol information to decode logs
222 NvU32 logElfSize; // size of the gsp log elf binary
223 } GSP_FIRMWARE;
224
225 /*!
226 * Known ELF section names (or name prefixes) of gsp_*.bin or gsp_log_*.bin.
227 */
228 #define GSP_VERSION_SECTION_NAME ".fwversion"
229 #define GSP_IMAGE_SECTION_NAME ".fwimage"
230 #define GSP_LOGGING_SECTION_NAME ".fwlogging"
231 #define GSP_SIGNATURE_SECTION_NAME_PREFIX ".fwsignature_"
232 #define GSP_CC_SIGNATURE_SECTION_NAME_PREFIX ".fwsignature_cc_"
233
234 /*!
235 * GSP Notify op infra. Used by UVM in HCC mode.
236 */
237 #define GSP_NOTIFY_OP_RESERVED_OPCODE 0
238 // Request fault buffer flush.
239 #define GSP_NOTIFY_OP_FLUSH_REPLAYABLE_FAULT_BUFFER_OPCODE 1
240 #define GSP_NOTIFY_OP_FLUSH_REPLAYABLE_FAULT_BUFFER_VALID_ARGC 1
241 #define GSP_NOTIFY_OP_FLUSH_REPLAYABLE_FAULT_BUFFER_FLUSH_MODE_ARGIDX 0
242 // Fault on prefetch toggle.
243 #define GSP_NOTIFY_OP_TOGGLE_FAULT_ON_PREFETCH_OPCODE 2
244 #define GSP_NOTIFY_OP_TOGGLE_FAULT_ON_PREFETCH_VALID_ARGC 1
245 #define GSP_NOTIFY_OP_TOGGLE_FAULT_ON_PREFETCH_EN_ARGIDX 0
246 // Always keep this as the last defined value
247 #define GSP_NOTIFY_OP_OPCODE_MAX 3
248 #define GSP_NOTIFY_OP_NO_ARGUMENTS 0
249 #define GSP_NOTIFY_OP_MAX_ARGUMENT_COUNT 1
250 typedef struct NotifyOpSharedSurface
251 {
252 NvU32 inUse; // 0 - signals free, 1 - signals busy
253 // An atomic swap is issued in a loop over this field from the
254 // KernelRM side to synchronize access to the shared notify op resource.
255 // Once the operation finishes the exiting thread flips the value back to 0.
256 NvU32 seqNum; // Read by KernelRM; Written by GSP. Provides synchronization so the
257 // requester knows when the operation is finished by GSP.
258 NvU32 opCode; // Written by KernelRM; Read by GSP. Specifies the operation to be performed.
259 NvU32 status; // Read by KernelRM; Written by GSP. Specifies the status of the operation.
260 // Becomes valid for the current operation after seqNum is incremented.
261 NvU32 argc; // Written by KernelRM; Read by GSP. Specifies the number of arguments.
262 NvU32 args[GSP_NOTIFY_OP_MAX_ARGUMENT_COUNT]; // Written by KernelRM; Read by GSP. Contains a list of NvU32 args used
263 // by the operation.
264 } NotifyOpSharedSurface;
265
266 /*!
267 * Index into libosLogDecode array.
268 */
269 enum
270 {
271 LOGIDX_INIT,
272 LOGIDX_INTR,
273 LOGIDX_RM,
274 LOGIDX_SIZE
275 };
276
277 /*!
278 * LIBOS task logging.
279 */
280 typedef struct
281 {
282 /* Memory for task logging */
283 MEMORY_DESCRIPTOR *pTaskLogDescriptor;
284 NvU64 *pTaskLogBuffer;
285 NvP64 pTaskLogMappingPriv;
286 NvU64 id8;
287 } RM_LIBOS_LOG_MEM;
288
289 /*!
290 * KernelGsp object definition
291 */
292
293 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
294 // the matching C source file, but causes diagnostics to be issued if another
295 // source file references the field.
296 #ifdef NVOC_KERNEL_GSP_H_PRIVATE_ACCESS_ALLOWED
297 #define PRIVATE_FIELD(x) x
298 #else
299 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
300 #endif
301
302 struct MESSAGE_QUEUE_COLLECTION;
303
304
305 struct KernelGsp {
306 const struct NVOC_RTTI *__nvoc_rtti;
307 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
308 struct IntrService __nvoc_base_IntrService;
309 struct KernelFalcon __nvoc_base_KernelFalcon;
310 struct Object *__nvoc_pbase_Object;
311 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
312 struct IntrService *__nvoc_pbase_IntrService;
313 struct CrashCatEngine *__nvoc_pbase_CrashCatEngine;
314 struct KernelCrashCatEngine *__nvoc_pbase_KernelCrashCatEngine;
315 struct KernelFalcon *__nvoc_pbase_KernelFalcon;
316 struct KernelGsp *__nvoc_pbase_KernelGsp;
317 NV_STATUS (*__kgspConstructEngine__)(struct OBJGPU *, struct KernelGsp *, ENGDESCRIPTOR);
318 void (*__kgspRegisterIntrService__)(struct OBJGPU *, struct KernelGsp *, IntrServiceRecord *);
319 NvU32 (*__kgspServiceInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceServiceInterruptArguments *);
320 void (*__kgspConfigureFalcon__)(struct OBJGPU *, struct KernelGsp *);
321 NvBool (*__kgspIsDebugModeEnabled__)(struct OBJGPU *, struct KernelGsp *);
322 NV_STATUS (*__kgspAllocBootArgs__)(struct OBJGPU *, struct KernelGsp *);
323 void (*__kgspFreeBootArgs__)(struct OBJGPU *, struct KernelGsp *);
324 void (*__kgspProgramLibosBootArgsAddr__)(struct OBJGPU *, struct KernelGsp *);
325 NV_STATUS (*__kgspSetCmdQueueHead__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32);
326 NV_STATUS (*__kgspPrepareForBootstrap__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
327 NV_STATUS (*__kgspBootstrap__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
328 void (*__kgspGetGspRmBootUcodeStorage__)(struct OBJGPU *, struct KernelGsp *, BINDATA_STORAGE **, BINDATA_STORAGE **);
329 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmBoot__)(struct KernelGsp *);
330 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveConcatenatedFMCDesc__)(struct KernelGsp *);
331 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveConcatenatedFMC__)(struct KernelGsp *);
332 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmFmcGfwDebugSigned__)(struct KernelGsp *);
333 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmFmcGfwProdSigned__)(struct KernelGsp *);
334 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__)(struct KernelGsp *);
335 NV_STATUS (*__kgspCalculateFbLayout__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
336 NvU32 (*__kgspGetNonWprHeapSize__)(struct OBJGPU *, struct KernelGsp *);
337 NV_STATUS (*__kgspExecuteSequencerCommand__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32 *, NvU32);
338 NvU32 (*__kgspReadUcodeFuseVersion__)(struct OBJGPU *, struct KernelGsp *, NvU32);
339 NV_STATUS (*__kgspResetHw__)(struct OBJGPU *, struct KernelGsp *);
340 NvBool (*__kgspHealthCheck__)(struct OBJGPU *, struct KernelGsp *);
341 NvU32 (*__kgspService__)(struct OBJGPU *, struct KernelGsp *);
342 NvBool (*__kgspIsWpr2Up__)(struct OBJGPU *, struct KernelGsp *);
343 NvU32 (*__kgspGetFrtsSize__)(struct OBJGPU *, struct KernelGsp *);
344 NvU64 (*__kgspGetPrescrubbedTopFbSize__)(struct OBJGPU *, struct KernelGsp *);
345 NV_STATUS (*__kgspExtractVbiosFromRom__)(struct OBJGPU *, struct KernelGsp *, KernelGspVbiosImg **);
346 NV_STATUS (*__kgspPrepareForFwsecFrts__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, const NvU64, KernelGspPreparedFwsecCmd *);
347 NV_STATUS (*__kgspPrepareForFwsecSb__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, KernelGspPreparedFwsecCmd *);
348 NV_STATUS (*__kgspExecuteFwsec__)(struct OBJGPU *, struct KernelGsp *, KernelGspPreparedFwsecCmd *);
349 NV_STATUS (*__kgspExecuteScrubberIfNeeded__)(struct OBJGPU *, struct KernelGsp *);
350 NV_STATUS (*__kgspExecuteBooterLoad__)(struct OBJGPU *, struct KernelGsp *, const NvU64);
351 NV_STATUS (*__kgspExecuteBooterUnloadIfNeeded__)(struct OBJGPU *, struct KernelGsp *, const NvU64);
352 NV_STATUS (*__kgspExecuteHsFalcon__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, struct KernelFalcon *, NvU32 *, NvU32 *);
353 NV_STATUS (*__kgspWaitForProcessorSuspend__)(struct OBJGPU *, struct KernelGsp *);
354 NV_STATUS (*__kgspSavePowerMgmtState__)(struct OBJGPU *, struct KernelGsp *);
355 NV_STATUS (*__kgspRestorePowerMgmtState__)(struct OBJGPU *, struct KernelGsp *);
356 void (*__kgspFreeSuspendResumeData__)(struct OBJGPU *, struct KernelGsp *);
357 NV_STATUS (*__kgspWaitForGfwBootOk__)(struct OBJGPU *, struct KernelGsp *);
358 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveBooterLoadUcode__)(struct KernelGsp *);
359 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveBooterUnloadUcode__)(struct KernelGsp *);
360 NvU64 (*__kgspGetMinWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp *);
361 NvU64 (*__kgspGetMaxWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp *);
362 NvU32 (*__kgspGetFwHeapParamOsCarveoutSize__)(struct OBJGPU *, struct KernelGsp *);
363 NV_STATUS (*__kgspInitVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU64, NvU64, NvU64, NvU64);
364 NV_STATUS (*__kgspFreeVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp *, NvU32);
365 const char *(*__kgspGetSignatureSectionNamePrefix__)(struct OBJGPU *, struct KernelGsp *);
366 NV_STATUS (*__kgspSetupGspFmcArgs__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *);
367 void (*__kgspReadEmem__)(struct KernelGsp *, NvU64, NvU64, void *);
368 NV_STATUS (*__kgspIssueNotifyOp__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32 *, NvU32);
369 NV_STATUS (*__kgspCheckGspRmCcCleanup__)(struct OBJGPU *, struct KernelGsp *);
370 NvBool (*__kgspConfigured__)(struct KernelGsp *);
371 NvU32 (*__kgspPriRead__)(struct KernelGsp *, NvU32);
372 void (*__kgspRegWrite__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32);
373 NvU32 (*__kgspMaskDmemAddr__)(struct OBJGPU *, struct KernelGsp *, NvU32);
374 void (*__kgspStateDestroy__)(POBJGPU, struct KernelGsp *);
375 void (*__kgspVprintf__)(struct KernelGsp *, NvBool, const char *, va_list);
376 NvBool (*__kgspClearInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceClearInterruptArguments *);
377 void (*__kgspPriWrite__)(struct KernelGsp *, NvU32, NvU32);
378 void *(*__kgspMapBufferDescriptor__)(struct KernelGsp *, CrashCatBufferDescriptor *);
379 void (*__kgspSyncBufferDescriptor__)(struct KernelGsp *, CrashCatBufferDescriptor *, NvU32, NvU32);
380 NvU32 (*__kgspRegRead__)(struct OBJGPU *, struct KernelGsp *, NvU32);
381 NvBool (*__kgspIsPresent__)(POBJGPU, struct KernelGsp *);
382 NV_STATUS (*__kgspStateLoad__)(POBJGPU, struct KernelGsp *, NvU32);
383 const NvU32 *(*__kgspGetScratchOffsets__)(struct KernelGsp *, NV_CRASHCAT_SCRATCH_GROUP_ID);
384 void (*__kgspUnload__)(struct KernelGsp *);
385 NV_STATUS (*__kgspStateUnload__)(POBJGPU, struct KernelGsp *, NvU32);
386 NV_STATUS (*__kgspServiceNotificationInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceServiceNotificationInterruptArguments *);
387 NvU32 (*__kgspGetWFL0Offset__)(struct KernelGsp *);
388 NV_STATUS (*__kgspStateInitLocked__)(POBJGPU, struct KernelGsp *);
389 NV_STATUS (*__kgspStatePreLoad__)(POBJGPU, struct KernelGsp *, NvU32);
390 NV_STATUS (*__kgspStatePostUnload__)(POBJGPU, struct KernelGsp *, NvU32);
391 NV_STATUS (*__kgspStatePreUnload__)(POBJGPU, struct KernelGsp *, NvU32);
392 NV_STATUS (*__kgspStateInitUnlocked__)(POBJGPU, struct KernelGsp *);
393 void (*__kgspInitMissing__)(POBJGPU, struct KernelGsp *);
394 NV_STATUS (*__kgspStatePreInitLocked__)(POBJGPU, struct KernelGsp *);
395 NV_STATUS (*__kgspStatePreInitUnlocked__)(POBJGPU, struct KernelGsp *);
396 NV_STATUS (*__kgspStatePostLoad__)(POBJGPU, struct KernelGsp *, NvU32);
397 void (*__kgspUnmapBufferDescriptor__)(struct KernelGsp *, CrashCatBufferDescriptor *);
398 void (*__kgspReadDmem__)(struct KernelGsp *, NvU32, NvU32, void *);
399 struct MESSAGE_QUEUE_COLLECTION *pMQCollection;
400 struct OBJRPC *pRpc;
401 struct OBJRPC *pLocklessRpc;
402 char vbiosVersionStr[16];
403 KernelGspFlcnUcode *pFwsecUcode;
404 KernelGspFlcnUcode *pScrubberUcode;
405 KernelGspFlcnUcode *pBooterLoadUcode;
406 KernelGspFlcnUcode *pBooterUnloadUcode;
407 MEMORY_DESCRIPTOR *pWprMetaDescriptor;
408 GspFwWprMeta *pWprMeta;
409 NvP64 pWprMetaMappingPriv;
410 KernelGspPreparedFwsecCmd *pPreparedFwsecCmd;
411 MEMORY_DESCRIPTOR *pSRMetaDescriptor;
412 MEMORY_DESCRIPTOR *pSRRadix3Descriptor;
413 MEMORY_DESCRIPTOR *pGspFmcArgumentsDescriptor;
414 GSP_FMC_BOOT_PARAMS *pGspFmcArgumentsCached;
415 NvP64 pGspFmcArgumentsMappingPriv;
416 MEMORY_DESCRIPTOR *pLibosInitArgumentsDescriptor;
417 LibosMemoryRegionInitArgument *pLibosInitArgumentsCached;
418 NvP64 pLibosInitArgumentsMappingPriv;
419 MEMORY_DESCRIPTOR *pGspArgumentsDescriptor;
420 GSP_ARGUMENTS_CACHED *pGspArgumentsCached;
421 NvP64 pGspArgumentsMappingPriv;
422 MEMORY_DESCRIPTOR *pGspRmBootUcodeMemdesc;
423 NvP64 pGspRmBootUcodeMemdescPriv;
424 NvU32 gspRmBootUcodeSize;
425 NvU8 *pGspRmBootUcodeImage;
426 RM_RISCV_UCODE_DESC *pGspRmBootUcodeDesc;
427 MEMORY_DESCRIPTOR *pGspUCodeRadix3Descriptor;
428 MEMORY_DESCRIPTOR *pSignatureMemdesc;
429 LIBOS_LOG_DECODE logDecode;
430 LIBOS_LOG_DECODE logDecodeVgpuPartition[32];
431 RM_LIBOS_LOG_MEM rmLibosLogMem[3];
432 RM_LIBOS_LOG_MEM gspPluginInitTaskLogMem[32];
433 RM_LIBOS_LOG_MEM gspPluginVgpuTaskLogMem[32];
434 NvBool bHasVgpuLogs;
435 void *pLogElf;
436 NvU64 logElfDataSize;
437 PORT_MUTEX *pNvlogFlushMtx;
438 NvBool bLibosLogsPollingEnabled;
439 NvU8 bootAttempts;
440 NvBool bInInit;
441 NvBool bInLockdown;
442 NvBool bPollingForRpcResponse;
443 NvBool bFatalError;
444 MEMORY_DESCRIPTOR *pMemDesc_simAccessBuf;
445 SimAccessBuffer *pSimAccessBuf;
446 NvP64 pSimAccessBufPriv;
447 MEMORY_DESCRIPTOR *pNotifyOpSurfMemDesc;
448 NotifyOpSharedSurface *pNotifyOpSurf;
449 NvP64 pNotifyOpSurfPriv;
450 MEMORY_DESCRIPTOR *pProfilerSamplesMD;
451 void *pProfilerSamplesMDPriv;
452 void *pProfilerSamples;
453 GspStaticConfigInfo gspStaticInfo;
454 NvBool bIsTaskIsrQueueRequired;
455 NvBool bPartitionedFmc;
456 NvBool bScrubberUcodeSupported;
457 NvU32 fwHeapParamBaseSize;
458 NvBool bBootGspRmWithBoostClocks;
459 NvU8 ememPort;
460 };
461
462 #ifndef __NVOC_CLASS_KernelGsp_TYPEDEF__
463 #define __NVOC_CLASS_KernelGsp_TYPEDEF__
464 typedef struct KernelGsp KernelGsp;
465 #endif /* __NVOC_CLASS_KernelGsp_TYPEDEF__ */
466
467 #ifndef __nvoc_class_id_KernelGsp
468 #define __nvoc_class_id_KernelGsp 0x311d4e
469 #endif /* __nvoc_class_id_KernelGsp */
470
471 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGsp;
472
473 #define __staticCast_KernelGsp(pThis) \
474 ((pThis)->__nvoc_pbase_KernelGsp)
475
476 #ifdef __nvoc_kernel_gsp_h_disabled
477 #define __dynamicCast_KernelGsp(pThis) ((KernelGsp*)NULL)
478 #else //__nvoc_kernel_gsp_h_disabled
479 #define __dynamicCast_KernelGsp(pThis) \
480 ((KernelGsp*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelGsp)))
481 #endif //__nvoc_kernel_gsp_h_disabled
482
483 #define PDB_PROP_KGSP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
484 #define PDB_PROP_KGSP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
485
486 NV_STATUS __nvoc_objCreateDynamic_KernelGsp(KernelGsp**, Dynamic*, NvU32, va_list);
487
488 NV_STATUS __nvoc_objCreate_KernelGsp(KernelGsp**, Dynamic*, NvU32);
489 #define __objCreate_KernelGsp(ppNewObj, pParent, createFlags) \
490 __nvoc_objCreate_KernelGsp((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
491
492 #define kgspConstructEngine(pGpu, pKernelGsp, arg0) kgspConstructEngine_DISPATCH(pGpu, pKernelGsp, arg0)
493 #define kgspRegisterIntrService(pGpu, pKernelGsp, pRecords) kgspRegisterIntrService_DISPATCH(pGpu, pKernelGsp, pRecords)
494 #define kgspServiceInterrupt(pGpu, pKernelGsp, pParams) kgspServiceInterrupt_DISPATCH(pGpu, pKernelGsp, pParams)
495 #define kgspConfigureFalcon(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp)
496 #define kgspConfigureFalcon_HAL(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp)
497 #define kgspIsDebugModeEnabled(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp)
498 #define kgspIsDebugModeEnabled_HAL(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp)
499 #define kgspAllocBootArgs(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp)
500 #define kgspAllocBootArgs_HAL(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp)
501 #define kgspFreeBootArgs(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp)
502 #define kgspFreeBootArgs_HAL(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp)
503 #define kgspProgramLibosBootArgsAddr(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr_DISPATCH(pGpu, pKernelGsp)
504 #define kgspProgramLibosBootArgsAddr_HAL(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr_DISPATCH(pGpu, pKernelGsp)
505 #define kgspSetCmdQueueHead(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead_DISPATCH(pGpu, pKernelGsp, queueIdx, value)
506 #define kgspSetCmdQueueHead_HAL(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead_DISPATCH(pGpu, pKernelGsp, queueIdx, value)
507 #define kgspPrepareForBootstrap(pGpu, pKernelGsp, pGspFw) kgspPrepareForBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
508 #define kgspPrepareForBootstrap_HAL(pGpu, pKernelGsp, pGspFw) kgspPrepareForBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
509 #define kgspBootstrap(pGpu, pKernelGsp, pGspFw) kgspBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
510 #define kgspBootstrap_HAL(pGpu, pKernelGsp, pGspFw) kgspBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
511 #define kgspGetGspRmBootUcodeStorage(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc)
512 #define kgspGetGspRmBootUcodeStorage_HAL(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc)
513 #define kgspGetBinArchiveGspRmBoot(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp)
514 #define kgspGetBinArchiveGspRmBoot_HAL(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp)
515 #define kgspGetBinArchiveConcatenatedFMCDesc(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp)
516 #define kgspGetBinArchiveConcatenatedFMCDesc_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp)
517 #define kgspGetBinArchiveConcatenatedFMC(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp)
518 #define kgspGetBinArchiveConcatenatedFMC_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp)
519 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp)
520 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp)
521 #define kgspGetBinArchiveGspRmFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp)
522 #define kgspGetBinArchiveGspRmFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp)
523 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp)
524 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp)
525 #define kgspCalculateFbLayout(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw)
526 #define kgspCalculateFbLayout_HAL(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw)
527 #define kgspGetNonWprHeapSize(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp)
528 #define kgspGetNonWprHeapSize_HAL(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp)
529 #define kgspExecuteSequencerCommand(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize)
530 #define kgspExecuteSequencerCommand_HAL(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize)
531 #define kgspReadUcodeFuseVersion(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId)
532 #define kgspReadUcodeFuseVersion_HAL(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId)
533 #define kgspResetHw(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp)
534 #define kgspResetHw_HAL(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp)
535 #define kgspHealthCheck(pGpu, pKernelGsp) kgspHealthCheck_DISPATCH(pGpu, pKernelGsp)
536 #define kgspHealthCheck_HAL(pGpu, pKernelGsp) kgspHealthCheck_DISPATCH(pGpu, pKernelGsp)
537 #define kgspService(pGpu, pKernelGsp) kgspService_DISPATCH(pGpu, pKernelGsp)
538 #define kgspService_HAL(pGpu, pKernelGsp) kgspService_DISPATCH(pGpu, pKernelGsp)
539 #define kgspIsWpr2Up(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp)
540 #define kgspIsWpr2Up_HAL(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp)
541 #define kgspGetFrtsSize(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp)
542 #define kgspGetFrtsSize_HAL(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp)
543 #define kgspGetPrescrubbedTopFbSize(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp)
544 #define kgspGetPrescrubbedTopFbSize_HAL(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp)
545 #define kgspExtractVbiosFromRom(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg)
546 #define kgspExtractVbiosFromRom_HAL(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg)
547 #define kgspPrepareForFwsecFrts(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd) kgspPrepareForFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd)
548 #define kgspPrepareForFwsecFrts_HAL(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd) kgspPrepareForFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd)
549 #define kgspPrepareForFwsecSb(pGpu, pKernelGsp, pFwsecUcode, preparedCmd) kgspPrepareForFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, preparedCmd)
550 #define kgspPrepareForFwsecSb_HAL(pGpu, pKernelGsp, pFwsecUcode, preparedCmd) kgspPrepareForFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, preparedCmd)
551 #define kgspExecuteFwsec(pGpu, pKernelGsp, preparedCmd) kgspExecuteFwsec_DISPATCH(pGpu, pKernelGsp, preparedCmd)
552 #define kgspExecuteFwsec_HAL(pGpu, pKernelGsp, preparedCmd) kgspExecuteFwsec_DISPATCH(pGpu, pKernelGsp, preparedCmd)
553 #define kgspExecuteScrubberIfNeeded(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp)
554 #define kgspExecuteScrubberIfNeeded_HAL(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp)
555 #define kgspExecuteBooterLoad(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData)
556 #define kgspExecuteBooterLoad_HAL(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData)
557 #define kgspExecuteBooterUnloadIfNeeded(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData)
558 #define kgspExecuteBooterUnloadIfNeeded_HAL(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData)
559 #define kgspExecuteHsFalcon(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1)
560 #define kgspExecuteHsFalcon_HAL(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1)
561 #define kgspWaitForProcessorSuspend(pGpu, pKernelGsp) kgspWaitForProcessorSuspend_DISPATCH(pGpu, pKernelGsp)
562 #define kgspWaitForProcessorSuspend_HAL(pGpu, pKernelGsp) kgspWaitForProcessorSuspend_DISPATCH(pGpu, pKernelGsp)
563 #define kgspSavePowerMgmtState(pGpu, pKernelGsp) kgspSavePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
564 #define kgspSavePowerMgmtState_HAL(pGpu, pKernelGsp) kgspSavePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
565 #define kgspRestorePowerMgmtState(pGpu, pKernelGsp) kgspRestorePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
566 #define kgspRestorePowerMgmtState_HAL(pGpu, pKernelGsp) kgspRestorePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
567 #define kgspFreeSuspendResumeData(pGpu, pKernelGsp) kgspFreeSuspendResumeData_DISPATCH(pGpu, pKernelGsp)
568 #define kgspFreeSuspendResumeData_HAL(pGpu, pKernelGsp) kgspFreeSuspendResumeData_DISPATCH(pGpu, pKernelGsp)
569 #define kgspWaitForGfwBootOk(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp)
570 #define kgspWaitForGfwBootOk_HAL(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp)
571 #define kgspGetBinArchiveBooterLoadUcode(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp)
572 #define kgspGetBinArchiveBooterLoadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp)
573 #define kgspGetBinArchiveBooterUnloadUcode(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp)
574 #define kgspGetBinArchiveBooterUnloadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp)
575 #define kgspGetMinWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
576 #define kgspGetMinWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
577 #define kgspGetMaxWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
578 #define kgspGetMaxWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
579 #define kgspGetFwHeapParamOsCarveoutSize(pGpu, pKernelGsp) kgspGetFwHeapParamOsCarveoutSize_DISPATCH(pGpu, pKernelGsp)
580 #define kgspGetFwHeapParamOsCarveoutSize_HAL(pGpu, pKernelGsp) kgspGetFwHeapParamOsCarveoutSize_DISPATCH(pGpu, pKernelGsp)
581 #define kgspInitVgpuPartitionLogging(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize)
582 #define kgspInitVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize)
583 #define kgspFreeVgpuPartitionLogging(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid)
584 #define kgspFreeVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid)
585 #define kgspGetSignatureSectionNamePrefix(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp)
586 #define kgspGetSignatureSectionNamePrefix_HAL(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp)
587 #define kgspSetupGspFmcArgs(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw)
588 #define kgspSetupGspFmcArgs_HAL(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw)
589 #define kgspReadEmem(pKernelGsp, offset, size, pBuf) kgspReadEmem_DISPATCH(pKernelGsp, offset, size, pBuf)
590 #define kgspReadEmem_HAL(pKernelGsp, offset, size, pBuf) kgspReadEmem_DISPATCH(pKernelGsp, offset, size, pBuf)
591 #define kgspIssueNotifyOp(pGpu, pKernelGsp, opCode, pArgs, argc) kgspIssueNotifyOp_DISPATCH(pGpu, pKernelGsp, opCode, pArgs, argc)
592 #define kgspIssueNotifyOp_HAL(pGpu, pKernelGsp, opCode, pArgs, argc) kgspIssueNotifyOp_DISPATCH(pGpu, pKernelGsp, opCode, pArgs, argc)
593 #define kgspCheckGspRmCcCleanup(pGpu, pKernelGsp) kgspCheckGspRmCcCleanup_DISPATCH(pGpu, pKernelGsp)
594 #define kgspCheckGspRmCcCleanup_HAL(pGpu, pKernelGsp) kgspCheckGspRmCcCleanup_DISPATCH(pGpu, pKernelGsp)
595 #define kgspConfigured(arg0) kgspConfigured_DISPATCH(arg0)
596 #define kgspPriRead(arg0, offset) kgspPriRead_DISPATCH(arg0, offset)
597 #define kgspRegWrite(pGpu, pKernelFlcn, offset, data) kgspRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
598 #define kgspMaskDmemAddr(pGpu, pKernelFlcn, addr) kgspMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
599 #define kgspStateDestroy(pGpu, pEngstate) kgspStateDestroy_DISPATCH(pGpu, pEngstate)
600 #define kgspVprintf(arg0, bReportStart, fmt, args) kgspVprintf_DISPATCH(arg0, bReportStart, fmt, args)
601 #define kgspClearInterrupt(pGpu, pIntrService, pParams) kgspClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
602 #define kgspPriWrite(arg0, offset, data) kgspPriWrite_DISPATCH(arg0, offset, data)
603 #define kgspMapBufferDescriptor(arg0, pBufDesc) kgspMapBufferDescriptor_DISPATCH(arg0, pBufDesc)
604 #define kgspSyncBufferDescriptor(arg0, pBufDesc, offset, size) kgspSyncBufferDescriptor_DISPATCH(arg0, pBufDesc, offset, size)
605 #define kgspRegRead(pGpu, pKernelFlcn, offset) kgspRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
606 #define kgspIsPresent(pGpu, pEngstate) kgspIsPresent_DISPATCH(pGpu, pEngstate)
607 #define kgspStateLoad(pGpu, pEngstate, arg0) kgspStateLoad_DISPATCH(pGpu, pEngstate, arg0)
608 #define kgspGetScratchOffsets(arg0, scratchGroupId) kgspGetScratchOffsets_DISPATCH(arg0, scratchGroupId)
609 #define kgspUnload(arg0) kgspUnload_DISPATCH(arg0)
610 #define kgspStateUnload(pGpu, pEngstate, arg0) kgspStateUnload_DISPATCH(pGpu, pEngstate, arg0)
611 #define kgspServiceNotificationInterrupt(pGpu, pIntrService, pParams) kgspServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams)
612 #define kgspGetWFL0Offset(arg0) kgspGetWFL0Offset_DISPATCH(arg0)
613 #define kgspStateInitLocked(pGpu, pEngstate) kgspStateInitLocked_DISPATCH(pGpu, pEngstate)
614 #define kgspStatePreLoad(pGpu, pEngstate, arg0) kgspStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
615 #define kgspStatePostUnload(pGpu, pEngstate, arg0) kgspStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
616 #define kgspStatePreUnload(pGpu, pEngstate, arg0) kgspStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
617 #define kgspStateInitUnlocked(pGpu, pEngstate) kgspStateInitUnlocked_DISPATCH(pGpu, pEngstate)
618 #define kgspInitMissing(pGpu, pEngstate) kgspInitMissing_DISPATCH(pGpu, pEngstate)
619 #define kgspStatePreInitLocked(pGpu, pEngstate) kgspStatePreInitLocked_DISPATCH(pGpu, pEngstate)
620 #define kgspStatePreInitUnlocked(pGpu, pEngstate) kgspStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
621 #define kgspStatePostLoad(pGpu, pEngstate, arg0) kgspStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
622 #define kgspUnmapBufferDescriptor(arg0, pBufDesc) kgspUnmapBufferDescriptor_DISPATCH(arg0, pBufDesc)
623 #define kgspReadDmem(arg0, offset, size, pBuf) kgspReadDmem_DISPATCH(arg0, offset, size, pBuf)
624 NV_STATUS kgspConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg0);
625
kgspConstructEngine_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,ENGDESCRIPTOR arg0)626 static inline NV_STATUS kgspConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg0) {
627 return pKernelGsp->__kgspConstructEngine__(pGpu, pKernelGsp, arg0);
628 }
629
630 void kgspRegisterIntrService_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[171]);
631
kgspRegisterIntrService_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,IntrServiceRecord pRecords[171])632 static inline void kgspRegisterIntrService_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[171]) {
633 pKernelGsp->__kgspRegisterIntrService__(pGpu, pKernelGsp, pRecords);
634 }
635
636 NvU32 kgspServiceInterrupt_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams);
637
kgspServiceInterrupt_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,IntrServiceServiceInterruptArguments * pParams)638 static inline NvU32 kgspServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams) {
639 return pKernelGsp->__kgspServiceInterrupt__(pGpu, pKernelGsp, pParams);
640 }
641
642 void kgspConfigureFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
643
644 void kgspConfigureFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
645
kgspConfigureFalcon_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)646 static inline void kgspConfigureFalcon_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
647 NV_ASSERT_PRECOMP(0);
648 }
649
kgspConfigureFalcon_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)650 static inline void kgspConfigureFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
651 pKernelGsp->__kgspConfigureFalcon__(pGpu, pKernelGsp);
652 }
653
654 NvBool kgspIsDebugModeEnabled_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
655
656 NvBool kgspIsDebugModeEnabled_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
657
kgspIsDebugModeEnabled_108313(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)658 static inline NvBool kgspIsDebugModeEnabled_108313(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
659 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
660 }
661
kgspIsDebugModeEnabled_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)662 static inline NvBool kgspIsDebugModeEnabled_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
663 return pKernelGsp->__kgspIsDebugModeEnabled__(pGpu, pKernelGsp);
664 }
665
666 NV_STATUS kgspAllocBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
667
668 NV_STATUS kgspAllocBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
669
kgspAllocBootArgs_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)670 static inline NV_STATUS kgspAllocBootArgs_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
671 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
672 }
673
kgspAllocBootArgs_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)674 static inline NV_STATUS kgspAllocBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
675 return pKernelGsp->__kgspAllocBootArgs__(pGpu, pKernelGsp);
676 }
677
678 void kgspFreeBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
679
680 void kgspFreeBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
681
kgspFreeBootArgs_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)682 static inline void kgspFreeBootArgs_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
683 NV_ASSERT_PRECOMP(0);
684 }
685
kgspFreeBootArgs_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)686 static inline void kgspFreeBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
687 pKernelGsp->__kgspFreeBootArgs__(pGpu, pKernelGsp);
688 }
689
690 void kgspProgramLibosBootArgsAddr_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
691
kgspProgramLibosBootArgsAddr_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)692 static inline void kgspProgramLibosBootArgsAddr_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
693 NV_ASSERT_PRECOMP(0);
694 }
695
kgspProgramLibosBootArgsAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)696 static inline void kgspProgramLibosBootArgsAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
697 pKernelGsp->__kgspProgramLibosBootArgsAddr__(pGpu, pKernelGsp);
698 }
699
700 NV_STATUS kgspSetCmdQueueHead_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value);
701
kgspSetCmdQueueHead_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 queueIdx,NvU32 value)702 static inline NV_STATUS kgspSetCmdQueueHead_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value) {
703 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
704 }
705
kgspSetCmdQueueHead_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 queueIdx,NvU32 value)706 static inline NV_STATUS kgspSetCmdQueueHead_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value) {
707 return pKernelGsp->__kgspSetCmdQueueHead__(pGpu, pKernelGsp, queueIdx, value);
708 }
709
710 NV_STATUS kgspPrepareForBootstrap_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
711
712 NV_STATUS kgspPrepareForBootstrap_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
713
kgspPrepareForBootstrap_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)714 static inline NV_STATUS kgspPrepareForBootstrap_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
715 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
716 }
717
kgspPrepareForBootstrap_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)718 static inline NV_STATUS kgspPrepareForBootstrap_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
719 return pKernelGsp->__kgspPrepareForBootstrap__(pGpu, pKernelGsp, pGspFw);
720 }
721
722 NV_STATUS kgspBootstrap_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
723
724 NV_STATUS kgspBootstrap_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
725
kgspBootstrap_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)726 static inline NV_STATUS kgspBootstrap_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
727 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
728 }
729
kgspBootstrap_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)730 static inline NV_STATUS kgspBootstrap_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
731 return pKernelGsp->__kgspBootstrap__(pGpu, pKernelGsp, pGspFw);
732 }
733
734 void kgspGetGspRmBootUcodeStorage_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
735
736 void kgspGetGspRmBootUcodeStorage_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
737
738 void kgspGetGspRmBootUcodeStorage_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
739
kgspGetGspRmBootUcodeStorage_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,BINDATA_STORAGE ** ppBinStorageImage,BINDATA_STORAGE ** ppBinStorageDesc)740 static inline void kgspGetGspRmBootUcodeStorage_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc) {
741 NV_ASSERT_PRECOMP(0);
742 }
743
kgspGetGspRmBootUcodeStorage_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,BINDATA_STORAGE ** ppBinStorageImage,BINDATA_STORAGE ** ppBinStorageDesc)744 static inline void kgspGetGspRmBootUcodeStorage_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc) {
745 pKernelGsp->__kgspGetGspRmBootUcodeStorage__(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc);
746 }
747
748 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_TU102(struct KernelGsp *pKernelGsp);
749
750 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA100(struct KernelGsp *pKernelGsp);
751
752 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA102(struct KernelGsp *pKernelGsp);
753
754 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GH100(struct KernelGsp *pKernelGsp);
755
756 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_AD102(struct KernelGsp *pKernelGsp);
757
kgspGetBinArchiveGspRmBoot_80f438(struct KernelGsp * pKernelGsp)758 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_80f438(struct KernelGsp *pKernelGsp) {
759 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
760 }
761
kgspGetBinArchiveGspRmBoot_DISPATCH(struct KernelGsp * pKernelGsp)762 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_DISPATCH(struct KernelGsp *pKernelGsp) {
763 return pKernelGsp->__kgspGetBinArchiveGspRmBoot__(pKernelGsp);
764 }
765
766 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_GH100(struct KernelGsp *pKernelGsp);
767
kgspGetBinArchiveConcatenatedFMCDesc_80f438(struct KernelGsp * pKernelGsp)768 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_80f438(struct KernelGsp *pKernelGsp) {
769 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
770 }
771
kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(struct KernelGsp * pKernelGsp)772 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(struct KernelGsp *pKernelGsp) {
773 return pKernelGsp->__kgspGetBinArchiveConcatenatedFMCDesc__(pKernelGsp);
774 }
775
776 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_GH100(struct KernelGsp *pKernelGsp);
777
kgspGetBinArchiveConcatenatedFMC_80f438(struct KernelGsp * pKernelGsp)778 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_80f438(struct KernelGsp *pKernelGsp) {
779 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
780 }
781
kgspGetBinArchiveConcatenatedFMC_DISPATCH(struct KernelGsp * pKernelGsp)782 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_DISPATCH(struct KernelGsp *pKernelGsp) {
783 return pKernelGsp->__kgspGetBinArchiveConcatenatedFMC__(pKernelGsp);
784 }
785
786 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_GH100(struct KernelGsp *pKernelGsp);
787
kgspGetBinArchiveGspRmFmcGfwDebugSigned_80f438(struct KernelGsp * pKernelGsp)788 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_80f438(struct KernelGsp *pKernelGsp) {
789 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
790 }
791
kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(struct KernelGsp * pKernelGsp)792 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
793 return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwDebugSigned__(pKernelGsp);
794 }
795
796 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp);
797
kgspGetBinArchiveGspRmFmcGfwProdSigned_80f438(struct KernelGsp * pKernelGsp)798 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) {
799 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
800 }
801
kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(struct KernelGsp * pKernelGsp)802 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
803 return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwProdSigned__(pKernelGsp);
804 }
805
806 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp);
807
kgspGetBinArchiveGspRmCcFmcGfwProdSigned_80f438(struct KernelGsp * pKernelGsp)808 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) {
809 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
810 }
811
kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(struct KernelGsp * pKernelGsp)812 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
813 return pKernelGsp->__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__(pKernelGsp);
814 }
815
816 NV_STATUS kgspCalculateFbLayout_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
817
818 NV_STATUS kgspCalculateFbLayout_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
819
kgspCalculateFbLayout_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)820 static inline NV_STATUS kgspCalculateFbLayout_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
821 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
822 }
823
kgspCalculateFbLayout_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)824 static inline NV_STATUS kgspCalculateFbLayout_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
825 return pKernelGsp->__kgspCalculateFbLayout__(pGpu, pKernelGsp, pGspFw);
826 }
827
kgspGetNonWprHeapSize_ed6b8b(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)828 static inline NvU32 kgspGetNonWprHeapSize_ed6b8b(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
829 return 1048576;
830 }
831
kgspGetNonWprHeapSize_d505ea(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)832 static inline NvU32 kgspGetNonWprHeapSize_d505ea(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
833 return 2097152;
834 }
835
kgspGetNonWprHeapSize_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)836 static inline NvU32 kgspGetNonWprHeapSize_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
837 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
838 }
839
kgspGetNonWprHeapSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)840 static inline NvU32 kgspGetNonWprHeapSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
841 return pKernelGsp->__kgspGetNonWprHeapSize__(pGpu, pKernelGsp);
842 }
843
844 NV_STATUS kgspExecuteSequencerCommand_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize);
845
846 NV_STATUS kgspExecuteSequencerCommand_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize);
847
kgspExecuteSequencerCommand_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pPayLoad,NvU32 payloadSize)848 static inline NV_STATUS kgspExecuteSequencerCommand_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize) {
849 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
850 }
851
kgspExecuteSequencerCommand_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pPayLoad,NvU32 payloadSize)852 static inline NV_STATUS kgspExecuteSequencerCommand_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize) {
853 return pKernelGsp->__kgspExecuteSequencerCommand__(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize);
854 }
855
kgspReadUcodeFuseVersion_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 ucodeId)856 static inline NvU32 kgspReadUcodeFuseVersion_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
857 return 0;
858 }
859
860 NvU32 kgspReadUcodeFuseVersion_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId);
861
kgspReadUcodeFuseVersion_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 ucodeId)862 static inline NvU32 kgspReadUcodeFuseVersion_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
863 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
864 }
865
kgspReadUcodeFuseVersion_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 ucodeId)866 static inline NvU32 kgspReadUcodeFuseVersion_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
867 return pKernelGsp->__kgspReadUcodeFuseVersion__(pGpu, pKernelGsp, ucodeId);
868 }
869
870 NV_STATUS kgspResetHw_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
871
872 NV_STATUS kgspResetHw_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
873
kgspResetHw_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)874 static inline NV_STATUS kgspResetHw_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
875 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
876 }
877
kgspResetHw_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)878 static inline NV_STATUS kgspResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
879 return pKernelGsp->__kgspResetHw__(pGpu, pKernelGsp);
880 }
881
882 NvBool kgspHealthCheck_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
883
kgspHealthCheck_108313(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)884 static inline NvBool kgspHealthCheck_108313(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
885 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
886 }
887
kgspHealthCheck_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)888 static inline NvBool kgspHealthCheck_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
889 return pKernelGsp->__kgspHealthCheck__(pGpu, pKernelGsp);
890 }
891
892 NvU32 kgspService_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
893
kgspService_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)894 static inline NvU32 kgspService_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
895 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
896 }
897
kgspService_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)898 static inline NvU32 kgspService_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
899 return pKernelGsp->__kgspService__(pGpu, pKernelGsp);
900 }
901
902 NvBool kgspIsWpr2Up_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
903
904 NvBool kgspIsWpr2Up_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
905
kgspIsWpr2Up_108313(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)906 static inline NvBool kgspIsWpr2Up_108313(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
907 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
908 }
909
kgspIsWpr2Up_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)910 static inline NvBool kgspIsWpr2Up_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
911 return pKernelGsp->__kgspIsWpr2Up__(pGpu, pKernelGsp);
912 }
913
914 NvU32 kgspGetFrtsSize_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
915
kgspGetFrtsSize_4a4dee(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)916 static inline NvU32 kgspGetFrtsSize_4a4dee(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
917 return 0;
918 }
919
kgspGetFrtsSize_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)920 static inline NvU32 kgspGetFrtsSize_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
921 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
922 }
923
kgspGetFrtsSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)924 static inline NvU32 kgspGetFrtsSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
925 return pKernelGsp->__kgspGetFrtsSize__(pGpu, pKernelGsp);
926 }
927
kgspGetPrescrubbedTopFbSize_e1e623(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)928 static inline NvU64 kgspGetPrescrubbedTopFbSize_e1e623(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
929 return 256 * 1024 * 1024;
930 }
931
kgspGetPrescrubbedTopFbSize_604eb7(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)932 static inline NvU64 kgspGetPrescrubbedTopFbSize_604eb7(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
933 return (+18446744073709551615ULL);
934 }
935
kgspGetPrescrubbedTopFbSize_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)936 static inline NvU64 kgspGetPrescrubbedTopFbSize_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
937 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
938 }
939
kgspGetPrescrubbedTopFbSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)940 static inline NvU64 kgspGetPrescrubbedTopFbSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
941 return pKernelGsp->__kgspGetPrescrubbedTopFbSize__(pGpu, pKernelGsp);
942 }
943
944 NV_STATUS kgspExtractVbiosFromRom_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg);
945
kgspExtractVbiosFromRom_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspVbiosImg ** ppVbiosImg)946 static inline NV_STATUS kgspExtractVbiosFromRom_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
947 return NV_ERR_NOT_SUPPORTED;
948 }
949
kgspExtractVbiosFromRom_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspVbiosImg ** ppVbiosImg)950 static inline NV_STATUS kgspExtractVbiosFromRom_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
951 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
952 }
953
kgspExtractVbiosFromRom_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspVbiosImg ** ppVbiosImg)954 static inline NV_STATUS kgspExtractVbiosFromRom_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
955 return pKernelGsp->__kgspExtractVbiosFromRom__(pGpu, pKernelGsp, ppVbiosImg);
956 }
957
958 NV_STATUS kgspPrepareForFwsecFrts_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset, KernelGspPreparedFwsecCmd *preparedCmd);
959
kgspPrepareForFwsecFrts_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,const NvU64 frtsOffset,KernelGspPreparedFwsecCmd * preparedCmd)960 static inline NV_STATUS kgspPrepareForFwsecFrts_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset, KernelGspPreparedFwsecCmd *preparedCmd) {
961 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
962 }
963
kgspPrepareForFwsecFrts_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,const NvU64 frtsOffset,KernelGspPreparedFwsecCmd * preparedCmd)964 static inline NV_STATUS kgspPrepareForFwsecFrts_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset, KernelGspPreparedFwsecCmd *preparedCmd) {
965 return pKernelGsp->__kgspPrepareForFwsecFrts__(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd);
966 }
967
968 NV_STATUS kgspPrepareForFwsecSb_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd);
969
kgspPrepareForFwsecSb_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,KernelGspPreparedFwsecCmd * preparedCmd)970 static inline NV_STATUS kgspPrepareForFwsecSb_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd) {
971 return NV_ERR_NOT_SUPPORTED;
972 }
973
kgspPrepareForFwsecSb_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,KernelGspPreparedFwsecCmd * preparedCmd)974 static inline NV_STATUS kgspPrepareForFwsecSb_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd) {
975 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
976 }
977
kgspPrepareForFwsecSb_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,KernelGspPreparedFwsecCmd * preparedCmd)978 static inline NV_STATUS kgspPrepareForFwsecSb_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd) {
979 return pKernelGsp->__kgspPrepareForFwsecSb__(pGpu, pKernelGsp, pFwsecUcode, preparedCmd);
980 }
981
982 NV_STATUS kgspExecuteFwsec_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspPreparedFwsecCmd *preparedCmd);
983
kgspExecuteFwsec_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspPreparedFwsecCmd * preparedCmd)984 static inline NV_STATUS kgspExecuteFwsec_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspPreparedFwsecCmd *preparedCmd) {
985 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
986 }
987
kgspExecuteFwsec_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspPreparedFwsecCmd * preparedCmd)988 static inline NV_STATUS kgspExecuteFwsec_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspPreparedFwsecCmd *preparedCmd) {
989 return pKernelGsp->__kgspExecuteFwsec__(pGpu, pKernelGsp, preparedCmd);
990 }
991
992 NV_STATUS kgspExecuteScrubberIfNeeded_AD102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
993
kgspExecuteScrubberIfNeeded_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)994 static inline NV_STATUS kgspExecuteScrubberIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
995 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
996 }
997
kgspExecuteScrubberIfNeeded_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)998 static inline NV_STATUS kgspExecuteScrubberIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
999 return pKernelGsp->__kgspExecuteScrubberIfNeeded__(pGpu, pKernelGsp);
1000 }
1001
1002 NV_STATUS kgspExecuteBooterLoad_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData);
1003
kgspExecuteBooterLoad_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfData)1004 static inline NV_STATUS kgspExecuteBooterLoad_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) {
1005 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1006 }
1007
kgspExecuteBooterLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfData)1008 static inline NV_STATUS kgspExecuteBooterLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) {
1009 return pKernelGsp->__kgspExecuteBooterLoad__(pGpu, pKernelGsp, sysmemAddrOfData);
1010 }
1011
1012 NV_STATUS kgspExecuteBooterUnloadIfNeeded_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData);
1013
kgspExecuteBooterUnloadIfNeeded_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfSuspendResumeData)1014 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) {
1015 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1016 }
1017
kgspExecuteBooterUnloadIfNeeded_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfSuspendResumeData)1018 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) {
1019 return pKernelGsp->__kgspExecuteBooterUnloadIfNeeded__(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData);
1020 }
1021
1022 NV_STATUS kgspExecuteHsFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1);
1023
1024 NV_STATUS kgspExecuteHsFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1);
1025
kgspExecuteHsFalcon_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFlcnUcode,struct KernelFalcon * pKernelFlcn,NvU32 * pMailbox0,NvU32 * pMailbox1)1026 static inline NV_STATUS kgspExecuteHsFalcon_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) {
1027 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1028 }
1029
kgspExecuteHsFalcon_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFlcnUcode,struct KernelFalcon * pKernelFlcn,NvU32 * pMailbox0,NvU32 * pMailbox1)1030 static inline NV_STATUS kgspExecuteHsFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) {
1031 return pKernelGsp->__kgspExecuteHsFalcon__(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1);
1032 }
1033
1034 NV_STATUS kgspWaitForProcessorSuspend_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1035
kgspWaitForProcessorSuspend_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1036 static inline NV_STATUS kgspWaitForProcessorSuspend_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1037 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1038 }
1039
kgspWaitForProcessorSuspend_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1040 static inline NV_STATUS kgspWaitForProcessorSuspend_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1041 return pKernelGsp->__kgspWaitForProcessorSuspend__(pGpu, pKernelGsp);
1042 }
1043
1044 NV_STATUS kgspSavePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1045
kgspSavePowerMgmtState_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1046 static inline NV_STATUS kgspSavePowerMgmtState_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1047 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1048 }
1049
kgspSavePowerMgmtState_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1050 static inline NV_STATUS kgspSavePowerMgmtState_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1051 return pKernelGsp->__kgspSavePowerMgmtState__(pGpu, pKernelGsp);
1052 }
1053
1054 NV_STATUS kgspRestorePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1055
kgspRestorePowerMgmtState_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1056 static inline NV_STATUS kgspRestorePowerMgmtState_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1057 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1058 }
1059
kgspRestorePowerMgmtState_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1060 static inline NV_STATUS kgspRestorePowerMgmtState_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1061 return pKernelGsp->__kgspRestorePowerMgmtState__(pGpu, pKernelGsp);
1062 }
1063
1064 void kgspFreeSuspendResumeData_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1065
kgspFreeSuspendResumeData_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1066 static inline void kgspFreeSuspendResumeData_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1067 NV_ASSERT_PRECOMP(0);
1068 }
1069
kgspFreeSuspendResumeData_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1070 static inline void kgspFreeSuspendResumeData_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1071 pKernelGsp->__kgspFreeSuspendResumeData__(pGpu, pKernelGsp);
1072 }
1073
1074 NV_STATUS kgspWaitForGfwBootOk_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1075
1076 NV_STATUS kgspWaitForGfwBootOk_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1077
kgspWaitForGfwBootOk_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1078 static inline NV_STATUS kgspWaitForGfwBootOk_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1079 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1080 }
1081
kgspWaitForGfwBootOk_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1082 static inline NV_STATUS kgspWaitForGfwBootOk_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1083 return pKernelGsp->__kgspWaitForGfwBootOk__(pGpu, pKernelGsp);
1084 }
1085
1086 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU102(struct KernelGsp *pKernelGsp);
1087
1088 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU116(struct KernelGsp *pKernelGsp);
1089
1090 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA100(struct KernelGsp *pKernelGsp);
1091
1092 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA102(struct KernelGsp *pKernelGsp);
1093
1094 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_AD102(struct KernelGsp *pKernelGsp);
1095
kgspGetBinArchiveBooterLoadUcode_80f438(struct KernelGsp * pKernelGsp)1096 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_80f438(struct KernelGsp *pKernelGsp) {
1097 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1098 }
1099
kgspGetBinArchiveBooterLoadUcode_DISPATCH(struct KernelGsp * pKernelGsp)1100 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_DISPATCH(struct KernelGsp *pKernelGsp) {
1101 return pKernelGsp->__kgspGetBinArchiveBooterLoadUcode__(pKernelGsp);
1102 }
1103
1104 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU102(struct KernelGsp *pKernelGsp);
1105
1106 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU116(struct KernelGsp *pKernelGsp);
1107
1108 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA100(struct KernelGsp *pKernelGsp);
1109
1110 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA102(struct KernelGsp *pKernelGsp);
1111
1112 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_AD102(struct KernelGsp *pKernelGsp);
1113
kgspGetBinArchiveBooterUnloadUcode_80f438(struct KernelGsp * pKernelGsp)1114 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_80f438(struct KernelGsp *pKernelGsp) {
1115 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1116 }
1117
kgspGetBinArchiveBooterUnloadUcode_DISPATCH(struct KernelGsp * pKernelGsp)1118 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_DISPATCH(struct KernelGsp *pKernelGsp) {
1119 return pKernelGsp->__kgspGetBinArchiveBooterUnloadUcode__(pKernelGsp);
1120 }
1121
kgspGetMinWprHeapSizeMB_7185bf(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1122 static inline NvU64 kgspGetMinWprHeapSizeMB_7185bf(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1123 return (64U);
1124 }
1125
kgspGetMinWprHeapSizeMB_cc88c3(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1126 static inline NvU64 kgspGetMinWprHeapSizeMB_cc88c3(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1127 return pGpu->bVgpuGspPluginOffloadEnabled ? (565U) : (86U);
1128 }
1129
kgspGetMinWprHeapSizeMB_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1130 static inline NvU64 kgspGetMinWprHeapSizeMB_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1131 return 0;
1132 }
1133
kgspGetMinWprHeapSizeMB_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1134 static inline NvU64 kgspGetMinWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1135 return pKernelGsp->__kgspGetMinWprHeapSizeMB__(pGpu, pKernelGsp);
1136 }
1137
kgspGetMaxWprHeapSizeMB_ad4e6a(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1138 static inline NvU64 kgspGetMaxWprHeapSizeMB_ad4e6a(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1139 return (256U);
1140 }
1141
kgspGetMaxWprHeapSizeMB_55728f(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1142 static inline NvU64 kgspGetMaxWprHeapSizeMB_55728f(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1143 return pGpu->bVgpuGspPluginOffloadEnabled ? (1040U) : (278U);
1144 }
1145
kgspGetMaxWprHeapSizeMB_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1146 static inline NvU64 kgspGetMaxWprHeapSizeMB_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1147 return 0;
1148 }
1149
kgspGetMaxWprHeapSizeMB_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1150 static inline NvU64 kgspGetMaxWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1151 return pKernelGsp->__kgspGetMaxWprHeapSizeMB__(pGpu, pKernelGsp);
1152 }
1153
kgspGetFwHeapParamOsCarveoutSize_397f70(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1154 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_397f70(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1155 return (0 << 20);
1156 }
1157
kgspGetFwHeapParamOsCarveoutSize_4b5307(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1158 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_4b5307(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1159 return pGpu->bVgpuGspPluginOffloadEnabled ? (36 << 20) : (22 << 20);
1160 }
1161
kgspGetFwHeapParamOsCarveoutSize_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1162 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1163 return 0;
1164 }
1165
kgspGetFwHeapParamOsCarveoutSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1166 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1167 return pKernelGsp->__kgspGetFwHeapParamOsCarveoutSize__(pGpu, pKernelGsp);
1168 }
1169
kgspInitVgpuPartitionLogging_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid,NvU64 initTaskLogBUffOffset,NvU64 initTaskLogBUffSize,NvU64 vgpuTaskLogBUffOffset,NvU64 vgpuTaskLogBuffSize)1170 static inline NV_STATUS kgspInitVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize) {
1171 return NV_ERR_NOT_SUPPORTED;
1172 }
1173
1174 NV_STATUS kgspInitVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize);
1175
kgspInitVgpuPartitionLogging_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid,NvU64 initTaskLogBUffOffset,NvU64 initTaskLogBUffSize,NvU64 vgpuTaskLogBUffOffset,NvU64 vgpuTaskLogBuffSize)1176 static inline NV_STATUS kgspInitVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize) {
1177 return pKernelGsp->__kgspInitVgpuPartitionLogging__(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize);
1178 }
1179
kgspFreeVgpuPartitionLogging_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid)1180 static inline NV_STATUS kgspFreeVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) {
1181 return NV_ERR_NOT_SUPPORTED;
1182 }
1183
1184 NV_STATUS kgspFreeVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid);
1185
kgspFreeVgpuPartitionLogging_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid)1186 static inline NV_STATUS kgspFreeVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) {
1187 return pKernelGsp->__kgspFreeVgpuPartitionLogging__(pGpu, pKernelGsp, gfid);
1188 }
1189
1190 const char *kgspGetSignatureSectionNamePrefix_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1191
kgspGetSignatureSectionNamePrefix_789efb(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1192 static inline const char *kgspGetSignatureSectionNamePrefix_789efb(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1193 return ".fwsignature_";
1194 }
1195
kgspGetSignatureSectionNamePrefix_80f438(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1196 static inline const char *kgspGetSignatureSectionNamePrefix_80f438(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1197 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1198 }
1199
kgspGetSignatureSectionNamePrefix_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1200 static inline const char *kgspGetSignatureSectionNamePrefix_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1201 return pKernelGsp->__kgspGetSignatureSectionNamePrefix__(pGpu, pKernelGsp);
1202 }
1203
1204 NV_STATUS kgspSetupGspFmcArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1205
kgspSetupGspFmcArgs_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1206 static inline NV_STATUS kgspSetupGspFmcArgs_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1207 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1208 }
1209
kgspSetupGspFmcArgs_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1210 static inline NV_STATUS kgspSetupGspFmcArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1211 return pKernelGsp->__kgspSetupGspFmcArgs__(pGpu, pKernelGsp, pGspFw);
1212 }
1213
1214 void kgspReadEmem_TU102(struct KernelGsp *pKernelGsp, NvU64 offset, NvU64 size, void *pBuf);
1215
kgspReadEmem_366c4c(struct KernelGsp * pKernelGsp,NvU64 offset,NvU64 size,void * pBuf)1216 static inline void kgspReadEmem_366c4c(struct KernelGsp *pKernelGsp, NvU64 offset, NvU64 size, void *pBuf) {
1217 NV_ASSERT(0);
1218 }
1219
kgspReadEmem_DISPATCH(struct KernelGsp * pKernelGsp,NvU64 offset,NvU64 size,void * pBuf)1220 static inline void kgspReadEmem_DISPATCH(struct KernelGsp *pKernelGsp, NvU64 offset, NvU64 size, void *pBuf) {
1221 pKernelGsp->__kgspReadEmem__(pKernelGsp, offset, size, pBuf);
1222 }
1223
1224 NV_STATUS kgspIssueNotifyOp_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pArgs, NvU32 argc);
1225
kgspIssueNotifyOp_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pArgs,NvU32 argc)1226 static inline NV_STATUS kgspIssueNotifyOp_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pArgs, NvU32 argc) {
1227 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1228 }
1229
kgspIssueNotifyOp_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pArgs,NvU32 argc)1230 static inline NV_STATUS kgspIssueNotifyOp_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pArgs, NvU32 argc) {
1231 return pKernelGsp->__kgspIssueNotifyOp__(pGpu, pKernelGsp, opCode, pArgs, argc);
1232 }
1233
1234 NV_STATUS kgspCheckGspRmCcCleanup_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1235
kgspCheckGspRmCcCleanup_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1236 static inline NV_STATUS kgspCheckGspRmCcCleanup_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1237 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1238 }
1239
kgspCheckGspRmCcCleanup_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1240 static inline NV_STATUS kgspCheckGspRmCcCleanup_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1241 return pKernelGsp->__kgspCheckGspRmCcCleanup__(pGpu, pKernelGsp);
1242 }
1243
kgspConfigured_DISPATCH(struct KernelGsp * arg0)1244 static inline NvBool kgspConfigured_DISPATCH(struct KernelGsp *arg0) {
1245 return arg0->__kgspConfigured__(arg0);
1246 }
1247
kgspPriRead_DISPATCH(struct KernelGsp * arg0,NvU32 offset)1248 static inline NvU32 kgspPriRead_DISPATCH(struct KernelGsp *arg0, NvU32 offset) {
1249 return arg0->__kgspPriRead__(arg0, offset);
1250 }
1251
kgspRegWrite_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelFlcn,NvU32 offset,NvU32 data)1252 static inline void kgspRegWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 offset, NvU32 data) {
1253 pKernelFlcn->__kgspRegWrite__(pGpu, pKernelFlcn, offset, data);
1254 }
1255
kgspMaskDmemAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelFlcn,NvU32 addr)1256 static inline NvU32 kgspMaskDmemAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 addr) {
1257 return pKernelFlcn->__kgspMaskDmemAddr__(pGpu, pKernelFlcn, addr);
1258 }
1259
kgspStateDestroy_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1260 static inline void kgspStateDestroy_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1261 pEngstate->__kgspStateDestroy__(pGpu, pEngstate);
1262 }
1263
kgspVprintf_DISPATCH(struct KernelGsp * arg0,NvBool bReportStart,const char * fmt,va_list args)1264 static inline void kgspVprintf_DISPATCH(struct KernelGsp *arg0, NvBool bReportStart, const char *fmt, va_list args) {
1265 arg0->__kgspVprintf__(arg0, bReportStart, fmt, args);
1266 }
1267
kgspClearInterrupt_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pIntrService,IntrServiceClearInterruptArguments * pParams)1268 static inline NvBool kgspClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceClearInterruptArguments *pParams) {
1269 return pIntrService->__kgspClearInterrupt__(pGpu, pIntrService, pParams);
1270 }
1271
kgspPriWrite_DISPATCH(struct KernelGsp * arg0,NvU32 offset,NvU32 data)1272 static inline void kgspPriWrite_DISPATCH(struct KernelGsp *arg0, NvU32 offset, NvU32 data) {
1273 arg0->__kgspPriWrite__(arg0, offset, data);
1274 }
1275
kgspMapBufferDescriptor_DISPATCH(struct KernelGsp * arg0,CrashCatBufferDescriptor * pBufDesc)1276 static inline void *kgspMapBufferDescriptor_DISPATCH(struct KernelGsp *arg0, CrashCatBufferDescriptor *pBufDesc) {
1277 return arg0->__kgspMapBufferDescriptor__(arg0, pBufDesc);
1278 }
1279
kgspSyncBufferDescriptor_DISPATCH(struct KernelGsp * arg0,CrashCatBufferDescriptor * pBufDesc,NvU32 offset,NvU32 size)1280 static inline void kgspSyncBufferDescriptor_DISPATCH(struct KernelGsp *arg0, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) {
1281 arg0->__kgspSyncBufferDescriptor__(arg0, pBufDesc, offset, size);
1282 }
1283
kgspRegRead_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelFlcn,NvU32 offset)1284 static inline NvU32 kgspRegRead_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 offset) {
1285 return pKernelFlcn->__kgspRegRead__(pGpu, pKernelFlcn, offset);
1286 }
1287
kgspIsPresent_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1288 static inline NvBool kgspIsPresent_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1289 return pEngstate->__kgspIsPresent__(pGpu, pEngstate);
1290 }
1291
kgspStateLoad_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate,NvU32 arg0)1292 static inline NV_STATUS kgspStateLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1293 return pEngstate->__kgspStateLoad__(pGpu, pEngstate, arg0);
1294 }
1295
kgspGetScratchOffsets_DISPATCH(struct KernelGsp * arg0,NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId)1296 static inline const NvU32 *kgspGetScratchOffsets_DISPATCH(struct KernelGsp *arg0, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) {
1297 return arg0->__kgspGetScratchOffsets__(arg0, scratchGroupId);
1298 }
1299
kgspUnload_DISPATCH(struct KernelGsp * arg0)1300 static inline void kgspUnload_DISPATCH(struct KernelGsp *arg0) {
1301 arg0->__kgspUnload__(arg0);
1302 }
1303
kgspStateUnload_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate,NvU32 arg0)1304 static inline NV_STATUS kgspStateUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1305 return pEngstate->__kgspStateUnload__(pGpu, pEngstate, arg0);
1306 }
1307
kgspServiceNotificationInterrupt_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pIntrService,IntrServiceServiceNotificationInterruptArguments * pParams)1308 static inline NV_STATUS kgspServiceNotificationInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) {
1309 return pIntrService->__kgspServiceNotificationInterrupt__(pGpu, pIntrService, pParams);
1310 }
1311
kgspGetWFL0Offset_DISPATCH(struct KernelGsp * arg0)1312 static inline NvU32 kgspGetWFL0Offset_DISPATCH(struct KernelGsp *arg0) {
1313 return arg0->__kgspGetWFL0Offset__(arg0);
1314 }
1315
kgspStateInitLocked_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1316 static inline NV_STATUS kgspStateInitLocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1317 return pEngstate->__kgspStateInitLocked__(pGpu, pEngstate);
1318 }
1319
kgspStatePreLoad_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate,NvU32 arg0)1320 static inline NV_STATUS kgspStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1321 return pEngstate->__kgspStatePreLoad__(pGpu, pEngstate, arg0);
1322 }
1323
kgspStatePostUnload_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate,NvU32 arg0)1324 static inline NV_STATUS kgspStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1325 return pEngstate->__kgspStatePostUnload__(pGpu, pEngstate, arg0);
1326 }
1327
kgspStatePreUnload_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate,NvU32 arg0)1328 static inline NV_STATUS kgspStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1329 return pEngstate->__kgspStatePreUnload__(pGpu, pEngstate, arg0);
1330 }
1331
kgspStateInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1332 static inline NV_STATUS kgspStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1333 return pEngstate->__kgspStateInitUnlocked__(pGpu, pEngstate);
1334 }
1335
kgspInitMissing_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1336 static inline void kgspInitMissing_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1337 pEngstate->__kgspInitMissing__(pGpu, pEngstate);
1338 }
1339
kgspStatePreInitLocked_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1340 static inline NV_STATUS kgspStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1341 return pEngstate->__kgspStatePreInitLocked__(pGpu, pEngstate);
1342 }
1343
kgspStatePreInitUnlocked_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate)1344 static inline NV_STATUS kgspStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) {
1345 return pEngstate->__kgspStatePreInitUnlocked__(pGpu, pEngstate);
1346 }
1347
kgspStatePostLoad_DISPATCH(POBJGPU pGpu,struct KernelGsp * pEngstate,NvU32 arg0)1348 static inline NV_STATUS kgspStatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) {
1349 return pEngstate->__kgspStatePostLoad__(pGpu, pEngstate, arg0);
1350 }
1351
kgspUnmapBufferDescriptor_DISPATCH(struct KernelGsp * arg0,CrashCatBufferDescriptor * pBufDesc)1352 static inline void kgspUnmapBufferDescriptor_DISPATCH(struct KernelGsp *arg0, CrashCatBufferDescriptor *pBufDesc) {
1353 arg0->__kgspUnmapBufferDescriptor__(arg0, pBufDesc);
1354 }
1355
kgspReadDmem_DISPATCH(struct KernelGsp * arg0,NvU32 offset,NvU32 size,void * pBuf)1356 static inline void kgspReadDmem_DISPATCH(struct KernelGsp *arg0, NvU32 offset, NvU32 size, void *pBuf) {
1357 arg0->__kgspReadDmem__(arg0, offset, size, pBuf);
1358 }
1359
1360 void kgspDestruct_IMPL(struct KernelGsp *pKernelGsp);
1361
1362 #define __nvoc_kgspDestruct(pKernelGsp) kgspDestruct_IMPL(pKernelGsp)
1363 void kgspPopulateGspRmInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs);
1364
1365 #ifdef __nvoc_kernel_gsp_h_disabled
kgspPopulateGspRmInitArgs(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_SR_INIT_ARGUMENTS * pGspSrInitArgs)1366 static inline void kgspPopulateGspRmInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs) {
1367 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1368 }
1369 #else //__nvoc_kernel_gsp_h_disabled
1370 #define kgspPopulateGspRmInitArgs(pGpu, pKernelGsp, pGspSrInitArgs) kgspPopulateGspRmInitArgs_IMPL(pGpu, pKernelGsp, pGspSrInitArgs)
1371 #endif //__nvoc_kernel_gsp_h_disabled
1372
1373 NV_STATUS kgspInitRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1374
1375 #ifdef __nvoc_kernel_gsp_h_disabled
kgspInitRm(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1376 static inline NV_STATUS kgspInitRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1377 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1378 return NV_ERR_NOT_SUPPORTED;
1379 }
1380 #else //__nvoc_kernel_gsp_h_disabled
1381 #define kgspInitRm(pGpu, pKernelGsp, pGspFw) kgspInitRm_IMPL(pGpu, pKernelGsp, pGspFw)
1382 #endif //__nvoc_kernel_gsp_h_disabled
1383
1384 NV_STATUS kgspCreateRadix3_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData);
1385
1386 #ifdef __nvoc_kernel_gsp_h_disabled
kgspCreateRadix3(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,MEMORY_DESCRIPTOR ** ppMemdescRadix3,MEMORY_DESCRIPTOR * pMemdescData,const void * pData,NvU64 sizeOfData)1387 static inline NV_STATUS kgspCreateRadix3(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData) {
1388 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1389 return NV_ERR_NOT_SUPPORTED;
1390 }
1391 #else //__nvoc_kernel_gsp_h_disabled
1392 #define kgspCreateRadix3(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData) kgspCreateRadix3_IMPL(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData)
1393 #endif //__nvoc_kernel_gsp_h_disabled
1394
1395 NV_STATUS kgspUnloadRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1396
1397 #ifdef __nvoc_kernel_gsp_h_disabled
kgspUnloadRm(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1398 static inline NV_STATUS kgspUnloadRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1399 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1400 return NV_ERR_NOT_SUPPORTED;
1401 }
1402 #else //__nvoc_kernel_gsp_h_disabled
1403 #define kgspUnloadRm(pGpu, pKernelGsp) kgspUnloadRm_IMPL(pGpu, pKernelGsp)
1404 #endif //__nvoc_kernel_gsp_h_disabled
1405
1406 NV_STATUS kgspPrepareBootBinaryImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1407
1408 #ifdef __nvoc_kernel_gsp_h_disabled
kgspPrepareBootBinaryImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1409 static inline NV_STATUS kgspPrepareBootBinaryImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1410 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1411 return NV_ERR_NOT_SUPPORTED;
1412 }
1413 #else //__nvoc_kernel_gsp_h_disabled
1414 #define kgspPrepareBootBinaryImage(pGpu, pKernelGsp) kgspPrepareBootBinaryImage_IMPL(pGpu, pKernelGsp)
1415 #endif //__nvoc_kernel_gsp_h_disabled
1416
1417 NvU64 kgspGetFwHeapSize_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize);
1418
1419 #ifdef __nvoc_kernel_gsp_h_disabled
kgspGetFwHeapSize(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU64 posteriorFbSize)1420 static inline NvU64 kgspGetFwHeapSize(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize) {
1421 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1422 return 0;
1423 }
1424 #else //__nvoc_kernel_gsp_h_disabled
1425 #define kgspGetFwHeapSize(pGpu, pKernelGsp, posteriorFbSize) kgspGetFwHeapSize_IMPL(pGpu, pKernelGsp, posteriorFbSize)
1426 #endif //__nvoc_kernel_gsp_h_disabled
1427
1428 NvU64 kgspGetWprEndMargin_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1429
1430 #ifdef __nvoc_kernel_gsp_h_disabled
kgspGetWprEndMargin(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1431 static inline NvU64 kgspGetWprEndMargin(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1432 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1433 return 0;
1434 }
1435 #else //__nvoc_kernel_gsp_h_disabled
1436 #define kgspGetWprEndMargin(pGpu, pKernelGsp) kgspGetWprEndMargin_IMPL(pGpu, pKernelGsp)
1437 #endif //__nvoc_kernel_gsp_h_disabled
1438
1439 void kgspSetupLibosInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1440
1441 #ifdef __nvoc_kernel_gsp_h_disabled
kgspSetupLibosInitArgs(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1442 static inline void kgspSetupLibosInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1443 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1444 }
1445 #else //__nvoc_kernel_gsp_h_disabled
1446 #define kgspSetupLibosInitArgs(pGpu, pKernelGsp) kgspSetupLibosInitArgs_IMPL(pGpu, pKernelGsp)
1447 #endif //__nvoc_kernel_gsp_h_disabled
1448
1449 NV_STATUS kgspQueueAsyncInitRpcs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1450
1451 #ifdef __nvoc_kernel_gsp_h_disabled
kgspQueueAsyncInitRpcs(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1452 static inline NV_STATUS kgspQueueAsyncInitRpcs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1453 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1454 return NV_ERR_NOT_SUPPORTED;
1455 }
1456 #else //__nvoc_kernel_gsp_h_disabled
1457 #define kgspQueueAsyncInitRpcs(pGpu, pKernelGsp) kgspQueueAsyncInitRpcs_IMPL(pGpu, pKernelGsp)
1458 #endif //__nvoc_kernel_gsp_h_disabled
1459
1460 void kgspRpcRecvEvents_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1461
1462 #ifdef __nvoc_kernel_gsp_h_disabled
kgspRpcRecvEvents(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1463 static inline void kgspRpcRecvEvents(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1464 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1465 }
1466 #else //__nvoc_kernel_gsp_h_disabled
1467 #define kgspRpcRecvEvents(pGpu, pKernelGsp) kgspRpcRecvEvents_IMPL(pGpu, pKernelGsp)
1468 #endif //__nvoc_kernel_gsp_h_disabled
1469
1470 NV_STATUS kgspWaitForRmInitDone_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1471
1472 #ifdef __nvoc_kernel_gsp_h_disabled
kgspWaitForRmInitDone(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1473 static inline NV_STATUS kgspWaitForRmInitDone(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1474 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1475 return NV_ERR_NOT_SUPPORTED;
1476 }
1477 #else //__nvoc_kernel_gsp_h_disabled
1478 #define kgspWaitForRmInitDone(pGpu, pKernelGsp) kgspWaitForRmInitDone_IMPL(pGpu, pKernelGsp)
1479 #endif //__nvoc_kernel_gsp_h_disabled
1480
1481 NV_STATUS kgspStartLogPolling_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1482
1483 #ifdef __nvoc_kernel_gsp_h_disabled
kgspStartLogPolling(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1484 static inline NV_STATUS kgspStartLogPolling(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1485 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1486 return NV_ERR_NOT_SUPPORTED;
1487 }
1488 #else //__nvoc_kernel_gsp_h_disabled
1489 #define kgspStartLogPolling(pGpu, pKernelGsp) kgspStartLogPolling_IMPL(pGpu, pKernelGsp)
1490 #endif //__nvoc_kernel_gsp_h_disabled
1491
1492 void kgspDumpGspLogs_IMPL(struct KernelGsp *pKernelGsp, NvBool arg0);
1493
1494 #ifdef __nvoc_kernel_gsp_h_disabled
kgspDumpGspLogs(struct KernelGsp * pKernelGsp,NvBool arg0)1495 static inline void kgspDumpGspLogs(struct KernelGsp *pKernelGsp, NvBool arg0) {
1496 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1497 }
1498 #else //__nvoc_kernel_gsp_h_disabled
1499 #define kgspDumpGspLogs(pKernelGsp, arg0) kgspDumpGspLogs_IMPL(pKernelGsp, arg0)
1500 #endif //__nvoc_kernel_gsp_h_disabled
1501
1502 void kgspDumpGspLogsUnlocked_IMPL(struct KernelGsp *pKernelGsp, NvBool arg0);
1503
1504 #ifdef __nvoc_kernel_gsp_h_disabled
kgspDumpGspLogsUnlocked(struct KernelGsp * pKernelGsp,NvBool arg0)1505 static inline void kgspDumpGspLogsUnlocked(struct KernelGsp *pKernelGsp, NvBool arg0) {
1506 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1507 }
1508 #else //__nvoc_kernel_gsp_h_disabled
1509 #define kgspDumpGspLogsUnlocked(pKernelGsp, arg0) kgspDumpGspLogsUnlocked_IMPL(pKernelGsp, arg0)
1510 #endif //__nvoc_kernel_gsp_h_disabled
1511
1512 NV_STATUS kgspExecuteSequencerBuffer_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams);
1513
1514 #ifdef __nvoc_kernel_gsp_h_disabled
kgspExecuteSequencerBuffer(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,void * pRunCpuSeqParams)1515 static inline NV_STATUS kgspExecuteSequencerBuffer(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams) {
1516 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1517 return NV_ERR_NOT_SUPPORTED;
1518 }
1519 #else //__nvoc_kernel_gsp_h_disabled
1520 #define kgspExecuteSequencerBuffer(pGpu, pKernelGsp, pRunCpuSeqParams) kgspExecuteSequencerBuffer_IMPL(pGpu, pKernelGsp, pRunCpuSeqParams)
1521 #endif //__nvoc_kernel_gsp_h_disabled
1522
1523 NV_STATUS kgspParseFwsecUcodeFromVbiosImg_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined);
1524
1525 #ifdef __nvoc_kernel_gsp_h_disabled
kgspParseFwsecUcodeFromVbiosImg(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const KernelGspVbiosImg * const pVbiosImg,KernelGspFlcnUcode ** ppFwsecUcode,NvU64 * pVbiosVersionCombined)1526 static inline NV_STATUS kgspParseFwsecUcodeFromVbiosImg(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined) {
1527 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1528 return NV_ERR_NOT_SUPPORTED;
1529 }
1530 #else //__nvoc_kernel_gsp_h_disabled
1531 #define kgspParseFwsecUcodeFromVbiosImg(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined) kgspParseFwsecUcodeFromVbiosImg_IMPL(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined)
1532 #endif //__nvoc_kernel_gsp_h_disabled
1533
1534 NV_STATUS kgspAllocateScrubberUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode);
1535
1536 #ifdef __nvoc_kernel_gsp_h_disabled
kgspAllocateScrubberUcodeImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode ** ppScrubberUcode)1537 static inline NV_STATUS kgspAllocateScrubberUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode) {
1538 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1539 return NV_ERR_NOT_SUPPORTED;
1540 }
1541 #else //__nvoc_kernel_gsp_h_disabled
1542 #define kgspAllocateScrubberUcodeImage(pGpu, pKernelGsp, ppScrubberUcode) kgspAllocateScrubberUcodeImage_IMPL(pGpu, pKernelGsp, ppScrubberUcode)
1543 #endif //__nvoc_kernel_gsp_h_disabled
1544
1545 NV_STATUS kgspAllocateBooterLoadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode);
1546
1547 #ifdef __nvoc_kernel_gsp_h_disabled
kgspAllocateBooterLoadUcodeImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode ** ppBooterLoadUcode)1548 static inline NV_STATUS kgspAllocateBooterLoadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode) {
1549 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1550 return NV_ERR_NOT_SUPPORTED;
1551 }
1552 #else //__nvoc_kernel_gsp_h_disabled
1553 #define kgspAllocateBooterLoadUcodeImage(pGpu, pKernelGsp, ppBooterLoadUcode) kgspAllocateBooterLoadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterLoadUcode)
1554 #endif //__nvoc_kernel_gsp_h_disabled
1555
1556 NV_STATUS kgspAllocateBooterUnloadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode);
1557
1558 #ifdef __nvoc_kernel_gsp_h_disabled
kgspAllocateBooterUnloadUcodeImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode ** ppBooterUnloadUcode)1559 static inline NV_STATUS kgspAllocateBooterUnloadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode) {
1560 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1561 return NV_ERR_NOT_SUPPORTED;
1562 }
1563 #else //__nvoc_kernel_gsp_h_disabled
1564 #define kgspAllocateBooterUnloadUcodeImage(pGpu, pKernelGsp, ppBooterUnloadUcode) kgspAllocateBooterUnloadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterUnloadUcode)
1565 #endif //__nvoc_kernel_gsp_h_disabled
1566
1567 void kgspRcAndNotifyAllUserChannels_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 exceptType);
1568
1569 #ifdef __nvoc_kernel_gsp_h_disabled
kgspRcAndNotifyAllUserChannels(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 exceptType)1570 static inline void kgspRcAndNotifyAllUserChannels(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 exceptType) {
1571 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1572 }
1573 #else //__nvoc_kernel_gsp_h_disabled
1574 #define kgspRcAndNotifyAllUserChannels(pGpu, pKernelGsp, exceptType) kgspRcAndNotifyAllUserChannels_IMPL(pGpu, pKernelGsp, exceptType)
1575 #endif //__nvoc_kernel_gsp_h_disabled
1576
1577 #undef PRIVATE_FIELD
1578
1579
1580 NV_STATUS rpcRmApiControl_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject,
1581 NvU32 cmd, void *pParamStructPtr, NvU32 paramsSize);
1582 NV_STATUS rpcRmApiAlloc_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent,
1583 NvHandle hObject, NvU32 hClass, void *pAllocParams, NvU32 allocParamsSize);
1584 NV_STATUS rpcRmApiDupObject_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent, NvHandle *phObject,
1585 NvHandle hClientSrc, NvHandle hObjectSrc, NvU32 flags);
1586 NV_STATUS rpcRmApiFree_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject);
1587
1588 /* Free a KernelGspVbiosImg structure */
1589 void kgspFreeVbiosImg(KernelGspVbiosImg *pVbiosImg);
1590 /* Free a KernelGspFlcnUcode structure */
1591 void kgspFreeFlcnUcode(KernelGspFlcnUcode *pFlcnUcode);
1592
1593 void kgspLogRpcDebugInfo(struct OBJGPU *pGpu, OBJRPC *pRpc, NvU32 errorNum, NvBool bPollingForRpcResponse);
1594
1595 #endif // KERNEL_GSP_H
1596
1597 #ifdef __cplusplus
1598 } // extern "C"
1599 #endif
1600
1601 #endif // _G_KERNEL_GSP_NVOC_H_
1602