1
2 #ifndef _G_KERNEL_GSP_NVOC_H_
3 #define _G_KERNEL_GSP_NVOC_H_
4 #include "nvoc/runtime.h"
5
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /*
17 * SPDX-FileCopyrightText: Copyright (c) 2017-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18 * SPDX-License-Identifier: MIT
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining a
21 * copy of this software and associated documentation files (the "Software"),
22 * to deal in the Software without restriction, including without limitation
23 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24 * and/or sell copies of the Software, and to permit persons to whom the
25 * Software is furnished to do so, subject to the following conditions:
26 *
27 * The above copyright notice and this permission notice shall be included in
28 * all copies or substantial portions of the Software.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36 * DEALINGS IN THE SOFTWARE.
37 */
38
39 #pragma once
40 #include "g_kernel_gsp_nvoc.h"
41
42 #ifndef KERNEL_GSP_H
43 #define KERNEL_GSP_H
44
45 /*!
46 * This file provides definitions for all KernelGsp data structures
47 * and interfaces. KernelGsp is responsible for initiating the boot
48 * of RM on the GSP core (GSP-RM) and helps facilitate communication
49 * between Kernel RM and GSP-RM.
50 */
51
52 #include "core/core.h"
53 #include "core/bin_data.h"
54 #include "gpu/eng_state.h"
55 #include "gpu/intr/intr_service.h"
56 #include "gpu/falcon/kernel_falcon.h"
57 #include "gpu/gsp/gsp_static_config.h"
58 #include "gpu/gsp/gsp_init_args.h"
59 #include "gpu/gsp/gsp_fw_heap.h"
60 #include "nv-firmware.h"
61 #include "nv_sriov_defines.h"
62 #include "rmRiscvUcode.h"
63
64 #include "libos_init_args.h"
65 #include "gsp_fw_wpr_meta.h"
66 #include "gsp_fw_sr_meta.h"
67 #include "liblogdecode.h"
68
69 /*!
70 * Forward declarations
71 */
72 typedef struct SimAccessBuffer SimAccessBuffer;
73 typedef struct GSP_FMC_BOOT_PARAMS GSP_FMC_BOOT_PARAMS;
74
75 /*!
76 * Structure for VBIOS image for early FRTS.
77 */
78 typedef struct KernelGspVbiosImg
79 {
80 NvU8 *pImage;
81 NvU32 biosSize;
82 NvU32 expansionRomOffset;
83 } KernelGspVbiosImg;
84
85 /*!
86 * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that
87 * loads directly without the generic falcon bootloader.
88 */
89 typedef struct KernelGspFlcnUcodeBootDirect
90 {
91 NvU8 *pImage;
92 NvU32 size;
93
94 NvU32 imemSize;
95 NvU32 imemNsSize;
96 NvU32 imemNsPa;
97 NvU32 imemSecSize;
98 NvU32 imemSecPa;
99
100 NvU32 dataOffset;
101 NvU32 dmemSize;
102 NvU32 dmemPa;
103 } KernelGspFlcnUcodeBootDirect;
104
105 /*!
106 * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that
107 * loads via the generic falcon bootloader.
108 */
109 typedef struct KernelGspFlcnUcodeBootWithLoader
110 {
111 MEMORY_DESCRIPTOR *pCodeMemDesc;
112 MEMORY_DESCRIPTOR *pDataMemDesc;
113
114 NvU32 codeOffset;
115 NvU32 imemSize;
116 NvU32 imemNsSize;
117 NvU32 imemNsPa;
118 NvU32 imemSecSize;
119 NvU32 imemSecPa;
120 NvU32 codeEntry;
121
122 NvU32 dataOffset;
123 NvU32 dmemSize;
124 NvU32 dmemPa;
125
126 // Extra fields used for falcon ucodes from VBIOS
127 NvU32 interfaceOffset;
128 } KernelGspFlcnUcodeBootWithLoader;
129
130 /*!
131 * Variant of KernelGspFlcnUcode representing a Boot-from-HS ucode.
132 */
133 typedef struct KernelGspFlcnUcodeBootFromHs
134 {
135 MEMORY_DESCRIPTOR *pUcodeMemDesc;
136 NvU32 size;
137
138 NvU32 codeOffset;
139 NvU32 imemSize;
140 NvU32 imemPa;
141 NvU32 imemVa;
142
143 NvU32 dataOffset;
144 NvU32 dmemSize;
145 NvU32 dmemPa;
146 NvU32 dmemVa;
147
148 NvU32 hsSigDmemAddr;
149 NvU32 ucodeId;
150 NvU32 engineIdMask;
151
152 // Extra fields used for falcon ucodes from VBIOS
153 NvU32 *pSignatures;
154 NvU32 signaturesTotalSize; // size of buffer pointed by pSignatures
155 NvU32 sigSize; // size of one signature
156 NvU32 sigCount;
157
158 NvU32 vbiosSigVersions;
159 NvU32 interfaceOffset;
160 } KernelGspFlcnUcodeBootFromHs;
161
162 /*!
163 * Type of KernelGspFlcnUcode. Used as tag in tagged union KernelGspFlcnUcode.
164 * Affects how the ucode is loaded/booted.
165 */
166 typedef enum KernelGspFlcnUcodeBootType
167 {
168 KGSP_FLCN_UCODE_BOOT_DIRECT,
169 KGSP_FLCN_UCODE_BOOT_WITH_LOADER,
170 KGSP_FLCN_UCODE_BOOT_FROM_HS
171 } KernelGspFlcnUcodeBootType;
172
173 /*!
174 * RPC processing trigger
175 */
176 typedef enum KernelGspRpcEventHandlerContext
177 {
178 KGSP_RPC_EVENT_HANDLER_CONTEXT_POLL, // called after issuing an RPC
179 KGSP_RPC_EVENT_HANDLER_CONTEXT_POLL_BOOTUP, // called from kgspWaitForRmInitDone
180 KGSP_RPC_EVENT_HANDLER_CONTEXT_INTERRUPT // called in bottom-half interrupt path
181 } KernelGspRpcEventHandlerContext;
182
183 /*!
184 * Tagged union of falcon ucode variants used by early FRTS and GSP-RM boot.
185 */
186 typedef struct KernelGspFlcnUcode
187 {
188 KernelGspFlcnUcodeBootType bootType;
189 union
190 {
191 KernelGspFlcnUcodeBootDirect ucodeBootDirect;
192 KernelGspFlcnUcodeBootWithLoader ucodeBootWithLoader;
193 KernelGspFlcnUcodeBootFromHs ucodeBootFromHs;
194 };
195 } KernelGspFlcnUcode;
196
197 /*!
198 * Structure for used for executing a FWSEC command
199 */
200 typedef struct KernelGspPreparedFwsecCmd
201 {
202 KernelGspFlcnUcode *pFwsecUcode;
203 NvU32 cmd;
204 NvU64 frtsOffset;
205 } KernelGspPreparedFwsecCmd;
206
207 /*!
208 * GSP-RM source when running in Emulated/Simulated RISCV environment is
209 * extremely slow, so we need a factor (X) to scale timeouts by.
210 */
211 #define GSP_SCALE_TIMEOUT_EMU_SIM 2500
212
213 /*!
214 * Size of libos init arguments packet.
215 */
216 #define LIBOS_INIT_ARGUMENTS_SIZE 0x1000
217
218 /*!
219 * Structure for passing GSP-RM firmware data
220 */
221 typedef struct GSP_FIRMWARE
222 {
223 const void *pBuf; // buffer holding the firmware (ucode)
224 NvU32 size; // size of the firmware
225 const void *pImageData; // points to the GSP FW image start inside the pBuf buffer
226 NvU64 imageSize; // GSP FW image size inside the pBuf buffer
227 const void *pSignatureData; // points to the GSP FW signature start inside the pBuf buffer
228 NvU64 signatureSize; // GSP FW signature size inside the pBuf buffer
229 const void *pLogElf; // firmware logging section and symbol information to decode logs
230 NvU32 logElfSize; // size of the gsp log elf binary
231 } GSP_FIRMWARE;
232
233 /*!
234 * Known ELF section names (or name prefixes) of gsp_*.bin or gsp_log_*.bin.
235 */
236 #define GSP_VERSION_SECTION_NAME ".fwversion"
237 #define GSP_IMAGE_SECTION_NAME ".fwimage"
238 #define GSP_LOGGING_SECTION_NAME ".fwlogging"
239 #define GSP_SIGNATURE_SECTION_NAME_PREFIX ".fwsignature_"
240 #define GSP_CC_SIGNATURE_SECTION_NAME_PREFIX ".fwsignature_cc_"
241
242 /*!
243 * GSP Notify op infra. Used by UVM in HCC mode.
244 */
245 #define GSP_NOTIFY_OP_RESERVED_OPCODE 0
246 // Request fault buffer flush.
247 #define GSP_NOTIFY_OP_FLUSH_REPLAYABLE_FAULT_BUFFER_OPCODE 1
248 #define GSP_NOTIFY_OP_FLUSH_REPLAYABLE_FAULT_BUFFER_VALID_ARGC 1
249 #define GSP_NOTIFY_OP_FLUSH_REPLAYABLE_FAULT_BUFFER_FLUSH_MODE_ARGIDX 0
250 // Fault on prefetch toggle.
251 #define GSP_NOTIFY_OP_TOGGLE_FAULT_ON_PREFETCH_OPCODE 2
252 #define GSP_NOTIFY_OP_TOGGLE_FAULT_ON_PREFETCH_VALID_ARGC 1
253 #define GSP_NOTIFY_OP_TOGGLE_FAULT_ON_PREFETCH_EN_ARGIDX 0
254 // Always keep this as the last defined value
255 #define GSP_NOTIFY_OP_OPCODE_MAX 3
256 #define GSP_NOTIFY_OP_NO_ARGUMENTS 0
257 #define GSP_NOTIFY_OP_MAX_ARGUMENT_COUNT 1
258 typedef struct NotifyOpSharedSurface
259 {
260 NvU32 inUse; // 0 - signals free, 1 - signals busy
261 // An atomic swap is issued in a loop over this field from the
262 // KernelRM side to synchronize access to the shared notify op resource.
263 // Once the operation finishes the exiting thread flips the value back to 0.
264 NvU32 seqNum; // Read by KernelRM; Written by GSP. Provides synchronization so the
265 // requester knows when the operation is finished by GSP.
266 NvU32 opCode; // Written by KernelRM; Read by GSP. Specifies the operation to be performed.
267 NvU32 status; // Read by KernelRM; Written by GSP. Specifies the status of the operation.
268 // Becomes valid for the current operation after seqNum is incremented.
269 NvU32 argc; // Written by KernelRM; Read by GSP. Specifies the number of arguments.
270 NvU32 args[GSP_NOTIFY_OP_MAX_ARGUMENT_COUNT]; // Written by KernelRM; Read by GSP. Contains a list of NvU32 args used
271 // by the operation.
272 } NotifyOpSharedSurface;
273
274 /*!
275 * Index into libosLogDecode array.
276 */
277 enum
278 {
279 LOGIDX_INIT,
280 LOGIDX_INTR,
281 LOGIDX_RM,
282 LOGIDX_KERNEL,
283 LOGIDX_SIZE
284 };
285
286 /*!
287 * LIBOS task logging.
288 */
289 typedef struct
290 {
291 /* Memory for task logging */
292 MEMORY_DESCRIPTOR *pTaskLogDescriptor;
293 NvU64 *pTaskLogBuffer;
294 NvP64 pTaskLogMappingPriv;
295 NvU64 id8;
296 } RM_LIBOS_LOG_MEM;
297
298 /*!
299 * KernelGsp object definition
300 */
301
302 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
303 // the matching C source file, but causes diagnostics to be issued if another
304 // source file references the field.
305 #ifdef NVOC_KERNEL_GSP_H_PRIVATE_ACCESS_ALLOWED
306 #define PRIVATE_FIELD(x) x
307 #else
308 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
309 #endif
310
311 struct MESSAGE_QUEUE_COLLECTION;
312
313
314
315 struct KernelGsp {
316
317 // Metadata
318 const struct NVOC_RTTI *__nvoc_rtti;
319
320 // Parent (i.e. superclass or base class) object pointers
321 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
322 struct IntrService __nvoc_base_IntrService;
323 struct KernelFalcon __nvoc_base_KernelFalcon;
324
325 // Ancestor object pointers for `staticCast` feature
326 struct Object *__nvoc_pbase_Object; // obj super^2
327 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; // engstate super
328 struct IntrService *__nvoc_pbase_IntrService; // intrserv super
329 struct CrashCatEngine *__nvoc_pbase_CrashCatEngine; // crashcatEngine super^3
330 struct KernelCrashCatEngine *__nvoc_pbase_KernelCrashCatEngine; // kcrashcatEngine super^2
331 struct KernelFalcon *__nvoc_pbase_KernelFalcon; // kflcn super
332 struct KernelGsp *__nvoc_pbase_KernelGsp; // kgsp
333
334 // Vtable with 82 per-object function pointers
335 NV_STATUS (*__kgspConstructEngine__)(struct OBJGPU *, struct KernelGsp * /*this*/, ENGDESCRIPTOR); // virtual override (engstate) base (engstate)
336 NV_STATUS (*__kgspStateInitLocked__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual override (engstate) base (engstate)
337 void (*__kgspRegisterIntrService__)(struct OBJGPU *, struct KernelGsp * /*this*/, IntrServiceRecord *); // virtual override (intrserv) base (intrserv)
338 NvU32 (*__kgspServiceInterrupt__)(struct OBJGPU *, struct KernelGsp * /*this*/, IntrServiceServiceInterruptArguments *); // virtual override (intrserv) base (intrserv)
339 void (*__kgspConfigureFalcon__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
340 NvBool (*__kgspIsDebugModeEnabled__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
341 NV_STATUS (*__kgspAllocBootArgs__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
342 void (*__kgspFreeBootArgs__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
343 void (*__kgspProgramLibosBootArgsAddr__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
344 NV_STATUS (*__kgspSetCmdQueueHead__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32, NvU32); // halified (2 hals) body
345 NV_STATUS (*__kgspPrepareForBootstrap__)(struct OBJGPU *, struct KernelGsp * /*this*/, GSP_FIRMWARE *); // halified (3 hals) body
346 NV_STATUS (*__kgspBootstrap__)(struct OBJGPU *, struct KernelGsp * /*this*/, GSP_FIRMWARE *); // halified (3 hals) body
347 void (*__kgspGetGspRmBootUcodeStorage__)(struct OBJGPU *, struct KernelGsp * /*this*/, BINDATA_STORAGE **, BINDATA_STORAGE **); // halified (5 hals) body
348 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveGspRmBoot__)(struct KernelGsp * /*this*/); // halified (7 hals) body
349 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveConcatenatedFMCDesc__)(struct KernelGsp * /*this*/); // halified (3 hals) body
350 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveConcatenatedFMC__)(struct KernelGsp * /*this*/); // halified (3 hals) body
351 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveGspRmFmcGfwDebugSigned__)(struct KernelGsp * /*this*/); // halified (4 hals) body
352 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveGspRmFmcGfwProdSigned__)(struct KernelGsp * /*this*/); // halified (4 hals) body
353 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__)(struct KernelGsp * /*this*/); // halified (3 hals) body
354 NV_STATUS (*__kgspCalculateFbLayout__)(struct OBJGPU *, struct KernelGsp * /*this*/, GSP_FIRMWARE *); // halified (3 hals) body
355 NvU32 (*__kgspGetNonWprHeapSize__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
356 NV_STATUS (*__kgspExecuteSequencerCommand__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32, NvU32 *, NvU32); // halified (3 hals) body
357 NvU32 (*__kgspReadUcodeFuseVersion__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // halified (3 hals) body
358 NV_STATUS (*__kgspResetHw__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual halified (4 hals) override (kflcn) base (kflcn) body
359 NvBool (*__kgspHealthCheck__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
360 NvU32 (*__kgspService__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
361 NvBool (*__kgspIsWpr2Up__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
362 NvU32 (*__kgspGetFrtsSize__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
363 NvU64 (*__kgspGetPrescrubbedTopFbSize__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (4 hals) body
364 NV_STATUS (*__kgspExtractVbiosFromRom__)(struct OBJGPU *, struct KernelGsp * /*this*/, KernelGspVbiosImg **); // halified (3 hals) body
365 NV_STATUS (*__kgspPrepareForFwsecFrts__)(struct OBJGPU *, struct KernelGsp * /*this*/, KernelGspFlcnUcode *, const NvU64, KernelGspPreparedFwsecCmd *); // halified (3 hals) body
366 NV_STATUS (*__kgspPrepareForFwsecSb__)(struct OBJGPU *, struct KernelGsp * /*this*/, KernelGspFlcnUcode *, KernelGspPreparedFwsecCmd *); // halified (3 hals) body
367 NV_STATUS (*__kgspExecuteFwsec__)(struct OBJGPU *, struct KernelGsp * /*this*/, KernelGspPreparedFwsecCmd *); // halified (3 hals) body
368 NV_STATUS (*__kgspExecuteScrubberIfNeeded__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
369 NV_STATUS (*__kgspExecuteBooterLoad__)(struct OBJGPU *, struct KernelGsp * /*this*/, const NvU64); // halified (3 hals) body
370 NV_STATUS (*__kgspExecuteBooterUnloadIfNeeded__)(struct OBJGPU *, struct KernelGsp * /*this*/, const NvU64); // halified (3 hals) body
371 NV_STATUS (*__kgspExecuteHsFalcon__)(struct OBJGPU *, struct KernelGsp * /*this*/, KernelGspFlcnUcode *, struct KernelFalcon *, NvU32 *, NvU32 *); // halified (4 hals) body
372 NV_STATUS (*__kgspWaitForProcessorSuspend__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
373 NV_STATUS (*__kgspSavePowerMgmtState__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
374 NV_STATUS (*__kgspRestorePowerMgmtState__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
375 void (*__kgspFreeSuspendResumeData__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (2 hals) body
376 NV_STATUS (*__kgspWaitForGfwBootOk__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
377 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveBooterLoadUcode__)(struct KernelGsp * /*this*/); // halified (7 hals) body
378 const BINDATA_ARCHIVE * (*__kgspGetBinArchiveBooterUnloadUcode__)(struct KernelGsp * /*this*/); // halified (7 hals) body
379 NvU64 (*__kgspGetMinWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
380 NvU64 (*__kgspGetMaxWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
381 NvU32 (*__kgspGetFwHeapParamOsCarveoutSize__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
382 NV_STATUS (*__kgspInitVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32, NvU64, NvU64, NvU64, NvU64, NvU64, NvU64); // halified (3 hals) body
383 NV_STATUS (*__kgspFreeVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // halified (3 hals) body
384 const char * (*__kgspGetSignatureSectionNamePrefix__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (4 hals) body
385 NV_STATUS (*__kgspSetupGspFmcArgs__)(struct OBJGPU *, struct KernelGsp * /*this*/, GSP_FIRMWARE *); // halified (3 hals) body
386 void (*__kgspReadEmem__)(struct KernelGsp * /*this*/, NvU64, NvU64, void *); // virtual halified (2 hals) override (kcrashcatEngine) base (kflcn) body
387 NV_STATUS (*__kgspIssueNotifyOp__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32, NvU32 *, NvU32); // halified (3 hals) body
388 NV_STATUS (*__kgspCheckGspRmCcCleanup__)(struct OBJGPU *, struct KernelGsp * /*this*/); // halified (3 hals) body
389 void (*__kgspInitMissing__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual inherited (engstate) base (engstate)
390 NV_STATUS (*__kgspStatePreInitLocked__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual inherited (engstate) base (engstate)
391 NV_STATUS (*__kgspStatePreInitUnlocked__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual inherited (engstate) base (engstate)
392 NV_STATUS (*__kgspStateInitUnlocked__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual inherited (engstate) base (engstate)
393 NV_STATUS (*__kgspStatePreLoad__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
394 NV_STATUS (*__kgspStateLoad__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
395 NV_STATUS (*__kgspStatePostLoad__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
396 NV_STATUS (*__kgspStatePreUnload__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
397 NV_STATUS (*__kgspStateUnload__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
398 NV_STATUS (*__kgspStatePostUnload__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
399 void (*__kgspStateDestroy__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual inherited (engstate) base (engstate)
400 NvBool (*__kgspIsPresent__)(struct OBJGPU *, struct KernelGsp * /*this*/); // virtual inherited (engstate) base (engstate)
401 NvBool (*__kgspClearInterrupt__)(struct OBJGPU *, struct KernelGsp * /*this*/, IntrServiceClearInterruptArguments *); // virtual inherited (intrserv) base (intrserv)
402 NV_STATUS (*__kgspServiceNotificationInterrupt__)(struct OBJGPU *, struct KernelGsp * /*this*/, IntrServiceServiceNotificationInterruptArguments *); // virtual inherited (intrserv) base (intrserv)
403 NvU32 (*__kgspRegRead__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual halified (2 hals) inherited (kflcn) base (kflcn) body
404 void (*__kgspRegWrite__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32, NvU32); // virtual halified (2 hals) inherited (kflcn) base (kflcn) body
405 NvU32 (*__kgspMaskDmemAddr__)(struct OBJGPU *, struct KernelGsp * /*this*/, NvU32); // virtual halified (3 hals) inherited (kflcn) base (kflcn) body
406 NvBool (*__kgspConfigured__)(struct KernelGsp * /*this*/); // virtual inherited (kcrashcatEngine) base (kflcn)
407 void (*__kgspUnload__)(struct KernelGsp * /*this*/); // virtual inherited (kcrashcatEngine) base (kflcn)
408 void (*__kgspVprintf__)(struct KernelGsp * /*this*/, NvBool, const char *, va_list); // virtual inherited (kcrashcatEngine) base (kflcn)
409 NvU32 (*__kgspPriRead__)(struct KernelGsp * /*this*/, NvU32); // virtual inherited (kcrashcatEngine) base (kflcn)
410 void (*__kgspPriWrite__)(struct KernelGsp * /*this*/, NvU32, NvU32); // virtual inherited (kcrashcatEngine) base (kflcn)
411 void * (*__kgspMapBufferDescriptor__)(struct KernelGsp * /*this*/, CrashCatBufferDescriptor *); // virtual inherited (kcrashcatEngine) base (kflcn)
412 void (*__kgspUnmapBufferDescriptor__)(struct KernelGsp * /*this*/, CrashCatBufferDescriptor *); // virtual inherited (kcrashcatEngine) base (kflcn)
413 void (*__kgspSyncBufferDescriptor__)(struct KernelGsp * /*this*/, CrashCatBufferDescriptor *, NvU32, NvU32); // virtual inherited (kcrashcatEngine) base (kflcn)
414 void (*__kgspReadDmem__)(struct KernelGsp * /*this*/, NvU32, NvU32, void *); // virtual halified (singleton optimized) inherited (kcrashcatEngine) base (kflcn)
415 const NvU32 * (*__kgspGetScratchOffsets__)(struct KernelGsp * /*this*/, NV_CRASHCAT_SCRATCH_GROUP_ID); // virtual halified (singleton optimized) inherited (kcrashcatEngine) base (kflcn)
416 NvU32 (*__kgspGetWFL0Offset__)(struct KernelGsp * /*this*/); // virtual halified (singleton optimized) inherited (kcrashcatEngine) base (kflcn)
417
418 // Data members
419 struct MESSAGE_QUEUE_COLLECTION *pMQCollection;
420 struct OBJRPC *pRpc;
421 char vbiosVersionStr[16];
422 KernelGspFlcnUcode *pFwsecUcode;
423 KernelGspFlcnUcode *pScrubberUcode;
424 KernelGspFlcnUcode *pBooterLoadUcode;
425 KernelGspFlcnUcode *pBooterUnloadUcode;
426 MEMORY_DESCRIPTOR *pWprMetaDescriptor;
427 GspFwWprMeta *pWprMeta;
428 NvP64 pWprMetaMappingPriv;
429 KernelGspPreparedFwsecCmd *pPreparedFwsecCmd;
430 MEMORY_DESCRIPTOR *pSRMetaDescriptor;
431 MEMORY_DESCRIPTOR *pSRRadix3Descriptor;
432 MEMORY_DESCRIPTOR *pGspFmcArgumentsDescriptor;
433 GSP_FMC_BOOT_PARAMS *pGspFmcArgumentsCached;
434 NvP64 pGspFmcArgumentsMappingPriv;
435 MEMORY_DESCRIPTOR *pLibosInitArgumentsDescriptor;
436 LibosMemoryRegionInitArgument *pLibosInitArgumentsCached;
437 NvP64 pLibosInitArgumentsMappingPriv;
438 MEMORY_DESCRIPTOR *pGspArgumentsDescriptor;
439 GSP_ARGUMENTS_CACHED *pGspArgumentsCached;
440 NvP64 pGspArgumentsMappingPriv;
441 MEMORY_DESCRIPTOR *pGspRmBootUcodeMemdesc;
442 NvP64 pGspRmBootUcodeMemdescPriv;
443 NvU32 gspRmBootUcodeSize;
444 NvU8 *pGspRmBootUcodeImage;
445 RM_RISCV_UCODE_DESC *pGspRmBootUcodeDesc;
446 MEMORY_DESCRIPTOR *pGspUCodeRadix3Descriptor;
447 MEMORY_DESCRIPTOR *pSignatureMemdesc;
448 LIBOS_LOG_DECODE logDecode;
449 LIBOS_LOG_DECODE logDecodeVgpuPartition[32];
450 RM_LIBOS_LOG_MEM rmLibosLogMem[4];
451 RM_LIBOS_LOG_MEM gspPluginInitTaskLogMem[32];
452 RM_LIBOS_LOG_MEM gspPluginVgpuTaskLogMem[32];
453 RM_LIBOS_LOG_MEM libosKernelLogMem[32];
454 NvBool bHasVgpuLogs;
455 void *pLogElf;
456 NvU64 logElfDataSize;
457 PORT_MUTEX *pNvlogFlushMtx;
458 NvBool bLibosLogsPollingEnabled;
459 NvU8 bootAttempts;
460 NvBool bInInit;
461 NvBool bInLockdown;
462 NvBool bPollingForRpcResponse;
463 NvBool bFatalError;
464 MEMORY_DESCRIPTOR *pMemDesc_simAccessBuf;
465 SimAccessBuffer *pSimAccessBuf;
466 NvP64 pSimAccessBufPriv;
467 MEMORY_DESCRIPTOR *pNotifyOpSurfMemDesc;
468 NotifyOpSharedSurface *pNotifyOpSurf;
469 NvP64 pNotifyOpSurfPriv;
470 MEMORY_DESCRIPTOR *pProfilerSamplesMD;
471 void *pProfilerSamplesMDPriv;
472 void *pProfilerSamples;
473 GspStaticConfigInfo gspStaticInfo;
474 NvBool bPartitionedFmc;
475 NvBool bScrubberUcodeSupported;
476 NvU32 fwHeapParamBaseSize;
477 NvBool bBootGspRmWithBoostClocks;
478 NvU8 ememPort;
479 };
480
481 #ifndef __NVOC_CLASS_KernelGsp_TYPEDEF__
482 #define __NVOC_CLASS_KernelGsp_TYPEDEF__
483 typedef struct KernelGsp KernelGsp;
484 #endif /* __NVOC_CLASS_KernelGsp_TYPEDEF__ */
485
486 #ifndef __nvoc_class_id_KernelGsp
487 #define __nvoc_class_id_KernelGsp 0x311d4e
488 #endif /* __nvoc_class_id_KernelGsp */
489
490 // Casting support
491 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGsp;
492
493 #define __staticCast_KernelGsp(pThis) \
494 ((pThis)->__nvoc_pbase_KernelGsp)
495
496 #ifdef __nvoc_kernel_gsp_h_disabled
497 #define __dynamicCast_KernelGsp(pThis) ((KernelGsp*)NULL)
498 #else //__nvoc_kernel_gsp_h_disabled
499 #define __dynamicCast_KernelGsp(pThis) \
500 ((KernelGsp*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelGsp)))
501 #endif //__nvoc_kernel_gsp_h_disabled
502
503 // Property macros
504 #define PDB_PROP_KGSP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
505 #define PDB_PROP_KGSP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
506
507 NV_STATUS __nvoc_objCreateDynamic_KernelGsp(KernelGsp**, Dynamic*, NvU32, va_list);
508
509 NV_STATUS __nvoc_objCreate_KernelGsp(KernelGsp**, Dynamic*, NvU32);
510 #define __objCreate_KernelGsp(ppNewObj, pParent, createFlags) \
511 __nvoc_objCreate_KernelGsp((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
512
513
514 // Wrapper macros
515 #define kgspConstructEngine_FNPTR(pKernelGsp) pKernelGsp->__kgspConstructEngine__
516 #define kgspConstructEngine(pGpu, pKernelGsp, arg3) kgspConstructEngine_DISPATCH(pGpu, pKernelGsp, arg3)
517 #define kgspStateInitLocked_FNPTR(pKernelGsp) pKernelGsp->__kgspStateInitLocked__
518 #define kgspStateInitLocked(pGpu, pKernelGsp) kgspStateInitLocked_DISPATCH(pGpu, pKernelGsp)
519 #define kgspRegisterIntrService_FNPTR(pKernelGsp) pKernelGsp->__kgspRegisterIntrService__
520 #define kgspRegisterIntrService(pGpu, pKernelGsp, pRecords) kgspRegisterIntrService_DISPATCH(pGpu, pKernelGsp, pRecords)
521 #define kgspServiceInterrupt_FNPTR(pKernelGsp) pKernelGsp->__kgspServiceInterrupt__
522 #define kgspServiceInterrupt(pGpu, pKernelGsp, pParams) kgspServiceInterrupt_DISPATCH(pGpu, pKernelGsp, pParams)
523 #define kgspConfigureFalcon_FNPTR(pKernelGsp) pKernelGsp->__kgspConfigureFalcon__
524 #define kgspConfigureFalcon(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp)
525 #define kgspConfigureFalcon_HAL(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp)
526 #define kgspIsDebugModeEnabled_FNPTR(pKernelGsp) pKernelGsp->__kgspIsDebugModeEnabled__
527 #define kgspIsDebugModeEnabled(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp)
528 #define kgspIsDebugModeEnabled_HAL(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp)
529 #define kgspAllocBootArgs_FNPTR(pKernelGsp) pKernelGsp->__kgspAllocBootArgs__
530 #define kgspAllocBootArgs(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp)
531 #define kgspAllocBootArgs_HAL(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp)
532 #define kgspFreeBootArgs_FNPTR(pKernelGsp) pKernelGsp->__kgspFreeBootArgs__
533 #define kgspFreeBootArgs(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp)
534 #define kgspFreeBootArgs_HAL(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp)
535 #define kgspProgramLibosBootArgsAddr_FNPTR(pKernelGsp) pKernelGsp->__kgspProgramLibosBootArgsAddr__
536 #define kgspProgramLibosBootArgsAddr(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr_DISPATCH(pGpu, pKernelGsp)
537 #define kgspProgramLibosBootArgsAddr_HAL(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr_DISPATCH(pGpu, pKernelGsp)
538 #define kgspSetCmdQueueHead_FNPTR(pKernelGsp) pKernelGsp->__kgspSetCmdQueueHead__
539 #define kgspSetCmdQueueHead(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead_DISPATCH(pGpu, pKernelGsp, queueIdx, value)
540 #define kgspSetCmdQueueHead_HAL(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead_DISPATCH(pGpu, pKernelGsp, queueIdx, value)
541 #define kgspPrepareForBootstrap_FNPTR(pKernelGsp) pKernelGsp->__kgspPrepareForBootstrap__
542 #define kgspPrepareForBootstrap(pGpu, pKernelGsp, pGspFw) kgspPrepareForBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
543 #define kgspPrepareForBootstrap_HAL(pGpu, pKernelGsp, pGspFw) kgspPrepareForBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
544 #define kgspBootstrap_FNPTR(pKernelGsp) pKernelGsp->__kgspBootstrap__
545 #define kgspBootstrap(pGpu, pKernelGsp, pGspFw) kgspBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
546 #define kgspBootstrap_HAL(pGpu, pKernelGsp, pGspFw) kgspBootstrap_DISPATCH(pGpu, pKernelGsp, pGspFw)
547 #define kgspGetGspRmBootUcodeStorage_FNPTR(pKernelGsp) pKernelGsp->__kgspGetGspRmBootUcodeStorage__
548 #define kgspGetGspRmBootUcodeStorage(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc)
549 #define kgspGetGspRmBootUcodeStorage_HAL(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc)
550 #define kgspGetBinArchiveGspRmBoot_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveGspRmBoot__
551 #define kgspGetBinArchiveGspRmBoot(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp)
552 #define kgspGetBinArchiveGspRmBoot_HAL(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp)
553 #define kgspGetBinArchiveConcatenatedFMCDesc_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveConcatenatedFMCDesc__
554 #define kgspGetBinArchiveConcatenatedFMCDesc(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp)
555 #define kgspGetBinArchiveConcatenatedFMCDesc_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp)
556 #define kgspGetBinArchiveConcatenatedFMC_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveConcatenatedFMC__
557 #define kgspGetBinArchiveConcatenatedFMC(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp)
558 #define kgspGetBinArchiveConcatenatedFMC_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp)
559 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwDebugSigned__
560 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp)
561 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp)
562 #define kgspGetBinArchiveGspRmFmcGfwProdSigned_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwProdSigned__
563 #define kgspGetBinArchiveGspRmFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp)
564 #define kgspGetBinArchiveGspRmFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp)
565 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__
566 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp)
567 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp)
568 #define kgspCalculateFbLayout_FNPTR(pKernelGsp) pKernelGsp->__kgspCalculateFbLayout__
569 #define kgspCalculateFbLayout(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw)
570 #define kgspCalculateFbLayout_HAL(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw)
571 #define kgspGetNonWprHeapSize_FNPTR(pKernelGsp) pKernelGsp->__kgspGetNonWprHeapSize__
572 #define kgspGetNonWprHeapSize(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp)
573 #define kgspGetNonWprHeapSize_HAL(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp)
574 #define kgspExecuteSequencerCommand_FNPTR(pKernelGsp) pKernelGsp->__kgspExecuteSequencerCommand__
575 #define kgspExecuteSequencerCommand(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize)
576 #define kgspExecuteSequencerCommand_HAL(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize)
577 #define kgspReadUcodeFuseVersion_FNPTR(pKernelGsp) pKernelGsp->__kgspReadUcodeFuseVersion__
578 #define kgspReadUcodeFuseVersion(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId)
579 #define kgspReadUcodeFuseVersion_HAL(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId)
580 #define kgspResetHw_FNPTR(pKernelGsp) pKernelGsp->__kgspResetHw__
581 #define kgspResetHw(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp)
582 #define kgspResetHw_HAL(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp)
583 #define kgspHealthCheck_FNPTR(pKernelGsp) pKernelGsp->__kgspHealthCheck__
584 #define kgspHealthCheck(pGpu, pKernelGsp) kgspHealthCheck_DISPATCH(pGpu, pKernelGsp)
585 #define kgspHealthCheck_HAL(pGpu, pKernelGsp) kgspHealthCheck_DISPATCH(pGpu, pKernelGsp)
586 #define kgspService_FNPTR(pKernelGsp) pKernelGsp->__kgspService__
587 #define kgspService(pGpu, pKernelGsp) kgspService_DISPATCH(pGpu, pKernelGsp)
588 #define kgspService_HAL(pGpu, pKernelGsp) kgspService_DISPATCH(pGpu, pKernelGsp)
589 #define kgspIsWpr2Up_FNPTR(pKernelGsp) pKernelGsp->__kgspIsWpr2Up__
590 #define kgspIsWpr2Up(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp)
591 #define kgspIsWpr2Up_HAL(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp)
592 #define kgspGetFrtsSize_FNPTR(pKernelGsp) pKernelGsp->__kgspGetFrtsSize__
593 #define kgspGetFrtsSize(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp)
594 #define kgspGetFrtsSize_HAL(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp)
595 #define kgspGetPrescrubbedTopFbSize_FNPTR(pKernelGsp) pKernelGsp->__kgspGetPrescrubbedTopFbSize__
596 #define kgspGetPrescrubbedTopFbSize(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp)
597 #define kgspGetPrescrubbedTopFbSize_HAL(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp)
598 #define kgspExtractVbiosFromRom_FNPTR(pKernelGsp) pKernelGsp->__kgspExtractVbiosFromRom__
599 #define kgspExtractVbiosFromRom(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg)
600 #define kgspExtractVbiosFromRom_HAL(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg)
601 #define kgspPrepareForFwsecFrts_FNPTR(pKernelGsp) pKernelGsp->__kgspPrepareForFwsecFrts__
602 #define kgspPrepareForFwsecFrts(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd) kgspPrepareForFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd)
603 #define kgspPrepareForFwsecFrts_HAL(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd) kgspPrepareForFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd)
604 #define kgspPrepareForFwsecSb_FNPTR(pKernelGsp) pKernelGsp->__kgspPrepareForFwsecSb__
605 #define kgspPrepareForFwsecSb(pGpu, pKernelGsp, pFwsecUcode, preparedCmd) kgspPrepareForFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, preparedCmd)
606 #define kgspPrepareForFwsecSb_HAL(pGpu, pKernelGsp, pFwsecUcode, preparedCmd) kgspPrepareForFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, preparedCmd)
607 #define kgspExecuteFwsec_FNPTR(pKernelGsp) pKernelGsp->__kgspExecuteFwsec__
608 #define kgspExecuteFwsec(pGpu, pKernelGsp, preparedCmd) kgspExecuteFwsec_DISPATCH(pGpu, pKernelGsp, preparedCmd)
609 #define kgspExecuteFwsec_HAL(pGpu, pKernelGsp, preparedCmd) kgspExecuteFwsec_DISPATCH(pGpu, pKernelGsp, preparedCmd)
610 #define kgspExecuteScrubberIfNeeded_FNPTR(pKernelGsp) pKernelGsp->__kgspExecuteScrubberIfNeeded__
611 #define kgspExecuteScrubberIfNeeded(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp)
612 #define kgspExecuteScrubberIfNeeded_HAL(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp)
613 #define kgspExecuteBooterLoad_FNPTR(pKernelGsp) pKernelGsp->__kgspExecuteBooterLoad__
614 #define kgspExecuteBooterLoad(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData)
615 #define kgspExecuteBooterLoad_HAL(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData)
616 #define kgspExecuteBooterUnloadIfNeeded_FNPTR(pKernelGsp) pKernelGsp->__kgspExecuteBooterUnloadIfNeeded__
617 #define kgspExecuteBooterUnloadIfNeeded(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData)
618 #define kgspExecuteBooterUnloadIfNeeded_HAL(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData)
619 #define kgspExecuteHsFalcon_FNPTR(pKernelGsp) pKernelGsp->__kgspExecuteHsFalcon__
620 #define kgspExecuteHsFalcon(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1)
621 #define kgspExecuteHsFalcon_HAL(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1)
622 #define kgspWaitForProcessorSuspend_FNPTR(pKernelGsp) pKernelGsp->__kgspWaitForProcessorSuspend__
623 #define kgspWaitForProcessorSuspend(pGpu, pKernelGsp) kgspWaitForProcessorSuspend_DISPATCH(pGpu, pKernelGsp)
624 #define kgspWaitForProcessorSuspend_HAL(pGpu, pKernelGsp) kgspWaitForProcessorSuspend_DISPATCH(pGpu, pKernelGsp)
625 #define kgspSavePowerMgmtState_FNPTR(pKernelGsp) pKernelGsp->__kgspSavePowerMgmtState__
626 #define kgspSavePowerMgmtState(pGpu, pKernelGsp) kgspSavePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
627 #define kgspSavePowerMgmtState_HAL(pGpu, pKernelGsp) kgspSavePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
628 #define kgspRestorePowerMgmtState_FNPTR(pKernelGsp) pKernelGsp->__kgspRestorePowerMgmtState__
629 #define kgspRestorePowerMgmtState(pGpu, pKernelGsp) kgspRestorePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
630 #define kgspRestorePowerMgmtState_HAL(pGpu, pKernelGsp) kgspRestorePowerMgmtState_DISPATCH(pGpu, pKernelGsp)
631 #define kgspFreeSuspendResumeData_FNPTR(pKernelGsp) pKernelGsp->__kgspFreeSuspendResumeData__
632 #define kgspFreeSuspendResumeData(pGpu, pKernelGsp) kgspFreeSuspendResumeData_DISPATCH(pGpu, pKernelGsp)
633 #define kgspFreeSuspendResumeData_HAL(pGpu, pKernelGsp) kgspFreeSuspendResumeData_DISPATCH(pGpu, pKernelGsp)
634 #define kgspWaitForGfwBootOk_FNPTR(pKernelGsp) pKernelGsp->__kgspWaitForGfwBootOk__
635 #define kgspWaitForGfwBootOk(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp)
636 #define kgspWaitForGfwBootOk_HAL(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp)
637 #define kgspGetBinArchiveBooterLoadUcode_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveBooterLoadUcode__
638 #define kgspGetBinArchiveBooterLoadUcode(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp)
639 #define kgspGetBinArchiveBooterLoadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp)
640 #define kgspGetBinArchiveBooterUnloadUcode_FNPTR(pKernelGsp) pKernelGsp->__kgspGetBinArchiveBooterUnloadUcode__
641 #define kgspGetBinArchiveBooterUnloadUcode(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp)
642 #define kgspGetBinArchiveBooterUnloadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp)
643 #define kgspGetMinWprHeapSizeMB_FNPTR(pKernelGsp) pKernelGsp->__kgspGetMinWprHeapSizeMB__
644 #define kgspGetMinWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
645 #define kgspGetMinWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
646 #define kgspGetMaxWprHeapSizeMB_FNPTR(pKernelGsp) pKernelGsp->__kgspGetMaxWprHeapSizeMB__
647 #define kgspGetMaxWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
648 #define kgspGetMaxWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp)
649 #define kgspGetFwHeapParamOsCarveoutSize_FNPTR(pKernelGsp) pKernelGsp->__kgspGetFwHeapParamOsCarveoutSize__
650 #define kgspGetFwHeapParamOsCarveoutSize(pGpu, pKernelGsp) kgspGetFwHeapParamOsCarveoutSize_DISPATCH(pGpu, pKernelGsp)
651 #define kgspGetFwHeapParamOsCarveoutSize_HAL(pGpu, pKernelGsp) kgspGetFwHeapParamOsCarveoutSize_DISPATCH(pGpu, pKernelGsp)
652 #define kgspInitVgpuPartitionLogging_FNPTR(pKernelGsp) pKernelGsp->__kgspInitVgpuPartitionLogging__
653 #define kgspInitVgpuPartitionLogging(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize, kernelLogBuffOffset, kernelLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize, kernelLogBuffOffset, kernelLogBuffSize)
654 #define kgspInitVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize, kernelLogBuffOffset, kernelLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize, kernelLogBuffOffset, kernelLogBuffSize)
655 #define kgspFreeVgpuPartitionLogging_FNPTR(pKernelGsp) pKernelGsp->__kgspFreeVgpuPartitionLogging__
656 #define kgspFreeVgpuPartitionLogging(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid)
657 #define kgspFreeVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid)
658 #define kgspGetSignatureSectionNamePrefix_FNPTR(pKernelGsp) pKernelGsp->__kgspGetSignatureSectionNamePrefix__
659 #define kgspGetSignatureSectionNamePrefix(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp)
660 #define kgspGetSignatureSectionNamePrefix_HAL(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp)
661 #define kgspSetupGspFmcArgs_FNPTR(pKernelGsp) pKernelGsp->__kgspSetupGspFmcArgs__
662 #define kgspSetupGspFmcArgs(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw)
663 #define kgspSetupGspFmcArgs_HAL(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw)
664 #define kgspReadEmem_FNPTR(pKernelGsp) pKernelGsp->__kgspReadEmem__
665 #define kgspReadEmem(pKernelGsp, offset, size, pBuf) kgspReadEmem_DISPATCH(pKernelGsp, offset, size, pBuf)
666 #define kgspReadEmem_HAL(pKernelGsp, offset, size, pBuf) kgspReadEmem_DISPATCH(pKernelGsp, offset, size, pBuf)
667 #define kgspIssueNotifyOp_FNPTR(pKernelGsp) pKernelGsp->__kgspIssueNotifyOp__
668 #define kgspIssueNotifyOp(pGpu, pKernelGsp, opCode, pArgs, argc) kgspIssueNotifyOp_DISPATCH(pGpu, pKernelGsp, opCode, pArgs, argc)
669 #define kgspIssueNotifyOp_HAL(pGpu, pKernelGsp, opCode, pArgs, argc) kgspIssueNotifyOp_DISPATCH(pGpu, pKernelGsp, opCode, pArgs, argc)
670 #define kgspCheckGspRmCcCleanup_FNPTR(pKernelGsp) pKernelGsp->__kgspCheckGspRmCcCleanup__
671 #define kgspCheckGspRmCcCleanup(pGpu, pKernelGsp) kgspCheckGspRmCcCleanup_DISPATCH(pGpu, pKernelGsp)
672 #define kgspCheckGspRmCcCleanup_HAL(pGpu, pKernelGsp) kgspCheckGspRmCcCleanup_DISPATCH(pGpu, pKernelGsp)
673 #define kgspInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateInitMissing__
674 #define kgspInitMissing(pGpu, pEngstate) kgspInitMissing_DISPATCH(pGpu, pEngstate)
675 #define kgspStatePreInitLocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitLocked__
676 #define kgspStatePreInitLocked(pGpu, pEngstate) kgspStatePreInitLocked_DISPATCH(pGpu, pEngstate)
677 #define kgspStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitUnlocked__
678 #define kgspStatePreInitUnlocked(pGpu, pEngstate) kgspStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
679 #define kgspStateInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateInitUnlocked__
680 #define kgspStateInitUnlocked(pGpu, pEngstate) kgspStateInitUnlocked_DISPATCH(pGpu, pEngstate)
681 #define kgspStatePreLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreLoad__
682 #define kgspStatePreLoad(pGpu, pEngstate, arg3) kgspStatePreLoad_DISPATCH(pGpu, pEngstate, arg3)
683 #define kgspStateLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateLoad__
684 #define kgspStateLoad(pGpu, pEngstate, arg3) kgspStateLoad_DISPATCH(pGpu, pEngstate, arg3)
685 #define kgspStatePostLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostLoad__
686 #define kgspStatePostLoad(pGpu, pEngstate, arg3) kgspStatePostLoad_DISPATCH(pGpu, pEngstate, arg3)
687 #define kgspStatePreUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreUnload__
688 #define kgspStatePreUnload(pGpu, pEngstate, arg3) kgspStatePreUnload_DISPATCH(pGpu, pEngstate, arg3)
689 #define kgspStateUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateUnload__
690 #define kgspStateUnload(pGpu, pEngstate, arg3) kgspStateUnload_DISPATCH(pGpu, pEngstate, arg3)
691 #define kgspStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostUnload__
692 #define kgspStatePostUnload(pGpu, pEngstate, arg3) kgspStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
693 #define kgspStateDestroy_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStateDestroy__
694 #define kgspStateDestroy(pGpu, pEngstate) kgspStateDestroy_DISPATCH(pGpu, pEngstate)
695 #define kgspIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateIsPresent__
696 #define kgspIsPresent(pGpu, pEngstate) kgspIsPresent_DISPATCH(pGpu, pEngstate)
697 #define kgspClearInterrupt_FNPTR(pIntrService) pIntrService->__nvoc_base_IntrService.__intrservClearInterrupt__
698 #define kgspClearInterrupt(pGpu, pIntrService, pParams) kgspClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
699 #define kgspServiceNotificationInterrupt_FNPTR(pIntrService) pIntrService->__nvoc_base_IntrService.__intrservServiceNotificationInterrupt__
700 #define kgspServiceNotificationInterrupt(pGpu, pIntrService, pParams) kgspServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams)
701 #define kgspRegRead_FNPTR(pKernelFlcn) pKernelFlcn->__nvoc_base_KernelFalcon.__kflcnRegRead__
702 #define kgspRegRead(pGpu, pKernelFlcn, offset) kgspRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
703 #define kgspRegRead_HAL(pGpu, pKernelFlcn, offset) kgspRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
704 #define kgspRegWrite_FNPTR(pKernelFlcn) pKernelFlcn->__nvoc_base_KernelFalcon.__kflcnRegWrite__
705 #define kgspRegWrite(pGpu, pKernelFlcn, offset, data) kgspRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
706 #define kgspRegWrite_HAL(pGpu, pKernelFlcn, offset, data) kgspRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
707 #define kgspMaskDmemAddr_FNPTR(pKernelFlcn) pKernelFlcn->__nvoc_base_KernelFalcon.__kflcnMaskDmemAddr__
708 #define kgspMaskDmemAddr(pGpu, pKernelFlcn, addr) kgspMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
709 #define kgspMaskDmemAddr_HAL(pGpu, pKernelFlcn, addr) kgspMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
710 #define kgspConfigured_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineConfigured__
711 #define kgspConfigured(arg_this) kgspConfigured_DISPATCH(arg_this)
712 #define kgspUnload_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineUnload__
713 #define kgspUnload(arg_this) kgspUnload_DISPATCH(arg_this)
714 #define kgspVprintf_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineVprintf__
715 #define kgspVprintf(arg_this, bReportStart, fmt, args) kgspVprintf_DISPATCH(arg_this, bReportStart, fmt, args)
716 #define kgspPriRead_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEnginePriRead__
717 #define kgspPriRead(arg_this, offset) kgspPriRead_DISPATCH(arg_this, offset)
718 #define kgspPriWrite_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEnginePriWrite__
719 #define kgspPriWrite(arg_this, offset, data) kgspPriWrite_DISPATCH(arg_this, offset, data)
720 #define kgspMapBufferDescriptor_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineMapBufferDescriptor__
721 #define kgspMapBufferDescriptor(arg_this, pBufDesc) kgspMapBufferDescriptor_DISPATCH(arg_this, pBufDesc)
722 #define kgspUnmapBufferDescriptor_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineUnmapBufferDescriptor__
723 #define kgspUnmapBufferDescriptor(arg_this, pBufDesc) kgspUnmapBufferDescriptor_DISPATCH(arg_this, pBufDesc)
724 #define kgspSyncBufferDescriptor_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineSyncBufferDescriptor__
725 #define kgspSyncBufferDescriptor(arg_this, pBufDesc, offset, size) kgspSyncBufferDescriptor_DISPATCH(arg_this, pBufDesc, offset, size)
726 #define kgspReadDmem_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineReadDmem__
727 #define kgspReadDmem(arg_this, offset, size, pBuf) kgspReadDmem_DISPATCH(arg_this, offset, size, pBuf)
728 #define kgspReadDmem_HAL(arg_this, offset, size, pBuf) kgspReadDmem_DISPATCH(arg_this, offset, size, pBuf)
729 #define kgspGetScratchOffsets_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineGetScratchOffsets__
730 #define kgspGetScratchOffsets(arg_this, scratchGroupId) kgspGetScratchOffsets_DISPATCH(arg_this, scratchGroupId)
731 #define kgspGetScratchOffsets_HAL(arg_this, scratchGroupId) kgspGetScratchOffsets_DISPATCH(arg_this, scratchGroupId)
732 #define kgspGetWFL0Offset_FNPTR(arg_this) arg_this->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__kcrashcatEngineGetWFL0Offset__
733 #define kgspGetWFL0Offset(arg_this) kgspGetWFL0Offset_DISPATCH(arg_this)
734 #define kgspGetWFL0Offset_HAL(arg_this) kgspGetWFL0Offset_DISPATCH(arg_this)
735
736 // Dispatch functions
kgspConstructEngine_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,ENGDESCRIPTOR arg3)737 static inline NV_STATUS kgspConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg3) {
738 return pKernelGsp->__kgspConstructEngine__(pGpu, pKernelGsp, arg3);
739 }
740
kgspStateInitLocked_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)741 static inline NV_STATUS kgspStateInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
742 return pKernelGsp->__kgspStateInitLocked__(pGpu, pKernelGsp);
743 }
744
kgspRegisterIntrService_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,IntrServiceRecord pRecords[175])745 static inline void kgspRegisterIntrService_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[175]) {
746 pKernelGsp->__kgspRegisterIntrService__(pGpu, pKernelGsp, pRecords);
747 }
748
kgspServiceInterrupt_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,IntrServiceServiceInterruptArguments * pParams)749 static inline NvU32 kgspServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams) {
750 return pKernelGsp->__kgspServiceInterrupt__(pGpu, pKernelGsp, pParams);
751 }
752
kgspConfigureFalcon_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)753 static inline void kgspConfigureFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
754 pKernelGsp->__kgspConfigureFalcon__(pGpu, pKernelGsp);
755 }
756
kgspIsDebugModeEnabled_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)757 static inline NvBool kgspIsDebugModeEnabled_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
758 return pKernelGsp->__kgspIsDebugModeEnabled__(pGpu, pKernelGsp);
759 }
760
kgspAllocBootArgs_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)761 static inline NV_STATUS kgspAllocBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
762 return pKernelGsp->__kgspAllocBootArgs__(pGpu, pKernelGsp);
763 }
764
kgspFreeBootArgs_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)765 static inline void kgspFreeBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
766 pKernelGsp->__kgspFreeBootArgs__(pGpu, pKernelGsp);
767 }
768
kgspProgramLibosBootArgsAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)769 static inline void kgspProgramLibosBootArgsAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
770 pKernelGsp->__kgspProgramLibosBootArgsAddr__(pGpu, pKernelGsp);
771 }
772
kgspSetCmdQueueHead_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 queueIdx,NvU32 value)773 static inline NV_STATUS kgspSetCmdQueueHead_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value) {
774 return pKernelGsp->__kgspSetCmdQueueHead__(pGpu, pKernelGsp, queueIdx, value);
775 }
776
kgspPrepareForBootstrap_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)777 static inline NV_STATUS kgspPrepareForBootstrap_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
778 return pKernelGsp->__kgspPrepareForBootstrap__(pGpu, pKernelGsp, pGspFw);
779 }
780
kgspBootstrap_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)781 static inline NV_STATUS kgspBootstrap_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
782 return pKernelGsp->__kgspBootstrap__(pGpu, pKernelGsp, pGspFw);
783 }
784
kgspGetGspRmBootUcodeStorage_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,BINDATA_STORAGE ** ppBinStorageImage,BINDATA_STORAGE ** ppBinStorageDesc)785 static inline void kgspGetGspRmBootUcodeStorage_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc) {
786 pKernelGsp->__kgspGetGspRmBootUcodeStorage__(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc);
787 }
788
kgspGetBinArchiveGspRmBoot_DISPATCH(struct KernelGsp * pKernelGsp)789 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveGspRmBoot_DISPATCH(struct KernelGsp *pKernelGsp) {
790 return pKernelGsp->__kgspGetBinArchiveGspRmBoot__(pKernelGsp);
791 }
792
kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(struct KernelGsp * pKernelGsp)793 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(struct KernelGsp *pKernelGsp) {
794 return pKernelGsp->__kgspGetBinArchiveConcatenatedFMCDesc__(pKernelGsp);
795 }
796
kgspGetBinArchiveConcatenatedFMC_DISPATCH(struct KernelGsp * pKernelGsp)797 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveConcatenatedFMC_DISPATCH(struct KernelGsp *pKernelGsp) {
798 return pKernelGsp->__kgspGetBinArchiveConcatenatedFMC__(pKernelGsp);
799 }
800
kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(struct KernelGsp * pKernelGsp)801 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
802 return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwDebugSigned__(pKernelGsp);
803 }
804
kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(struct KernelGsp * pKernelGsp)805 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
806 return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwProdSigned__(pKernelGsp);
807 }
808
kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(struct KernelGsp * pKernelGsp)809 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) {
810 return pKernelGsp->__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__(pKernelGsp);
811 }
812
kgspCalculateFbLayout_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)813 static inline NV_STATUS kgspCalculateFbLayout_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
814 return pKernelGsp->__kgspCalculateFbLayout__(pGpu, pKernelGsp, pGspFw);
815 }
816
kgspGetNonWprHeapSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)817 static inline NvU32 kgspGetNonWprHeapSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
818 return pKernelGsp->__kgspGetNonWprHeapSize__(pGpu, pKernelGsp);
819 }
820
kgspExecuteSequencerCommand_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pPayLoad,NvU32 payloadSize)821 static inline NV_STATUS kgspExecuteSequencerCommand_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize) {
822 return pKernelGsp->__kgspExecuteSequencerCommand__(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize);
823 }
824
kgspReadUcodeFuseVersion_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 ucodeId)825 static inline NvU32 kgspReadUcodeFuseVersion_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
826 return pKernelGsp->__kgspReadUcodeFuseVersion__(pGpu, pKernelGsp, ucodeId);
827 }
828
kgspResetHw_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)829 static inline NV_STATUS kgspResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
830 return pKernelGsp->__kgspResetHw__(pGpu, pKernelGsp);
831 }
832
kgspHealthCheck_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)833 static inline NvBool kgspHealthCheck_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
834 return pKernelGsp->__kgspHealthCheck__(pGpu, pKernelGsp);
835 }
836
kgspService_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)837 static inline NvU32 kgspService_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
838 return pKernelGsp->__kgspService__(pGpu, pKernelGsp);
839 }
840
kgspIsWpr2Up_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)841 static inline NvBool kgspIsWpr2Up_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
842 return pKernelGsp->__kgspIsWpr2Up__(pGpu, pKernelGsp);
843 }
844
kgspGetFrtsSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)845 static inline NvU32 kgspGetFrtsSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
846 return pKernelGsp->__kgspGetFrtsSize__(pGpu, pKernelGsp);
847 }
848
kgspGetPrescrubbedTopFbSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)849 static inline NvU64 kgspGetPrescrubbedTopFbSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
850 return pKernelGsp->__kgspGetPrescrubbedTopFbSize__(pGpu, pKernelGsp);
851 }
852
kgspExtractVbiosFromRom_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspVbiosImg ** ppVbiosImg)853 static inline NV_STATUS kgspExtractVbiosFromRom_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
854 return pKernelGsp->__kgspExtractVbiosFromRom__(pGpu, pKernelGsp, ppVbiosImg);
855 }
856
kgspPrepareForFwsecFrts_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,const NvU64 frtsOffset,KernelGspPreparedFwsecCmd * preparedCmd)857 static inline NV_STATUS kgspPrepareForFwsecFrts_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset, KernelGspPreparedFwsecCmd *preparedCmd) {
858 return pKernelGsp->__kgspPrepareForFwsecFrts__(pGpu, pKernelGsp, pFwsecUcode, frtsOffset, preparedCmd);
859 }
860
kgspPrepareForFwsecSb_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,KernelGspPreparedFwsecCmd * preparedCmd)861 static inline NV_STATUS kgspPrepareForFwsecSb_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd) {
862 return pKernelGsp->__kgspPrepareForFwsecSb__(pGpu, pKernelGsp, pFwsecUcode, preparedCmd);
863 }
864
kgspExecuteFwsec_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspPreparedFwsecCmd * preparedCmd)865 static inline NV_STATUS kgspExecuteFwsec_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspPreparedFwsecCmd *preparedCmd) {
866 return pKernelGsp->__kgspExecuteFwsec__(pGpu, pKernelGsp, preparedCmd);
867 }
868
kgspExecuteScrubberIfNeeded_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)869 static inline NV_STATUS kgspExecuteScrubberIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
870 return pKernelGsp->__kgspExecuteScrubberIfNeeded__(pGpu, pKernelGsp);
871 }
872
kgspExecuteBooterLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfData)873 static inline NV_STATUS kgspExecuteBooterLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) {
874 return pKernelGsp->__kgspExecuteBooterLoad__(pGpu, pKernelGsp, sysmemAddrOfData);
875 }
876
kgspExecuteBooterUnloadIfNeeded_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfSuspendResumeData)877 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) {
878 return pKernelGsp->__kgspExecuteBooterUnloadIfNeeded__(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData);
879 }
880
kgspExecuteHsFalcon_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFlcnUcode,struct KernelFalcon * pKernelFlcn,NvU32 * pMailbox0,NvU32 * pMailbox1)881 static inline NV_STATUS kgspExecuteHsFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) {
882 return pKernelGsp->__kgspExecuteHsFalcon__(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1);
883 }
884
kgspWaitForProcessorSuspend_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)885 static inline NV_STATUS kgspWaitForProcessorSuspend_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
886 return pKernelGsp->__kgspWaitForProcessorSuspend__(pGpu, pKernelGsp);
887 }
888
kgspSavePowerMgmtState_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)889 static inline NV_STATUS kgspSavePowerMgmtState_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
890 return pKernelGsp->__kgspSavePowerMgmtState__(pGpu, pKernelGsp);
891 }
892
kgspRestorePowerMgmtState_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)893 static inline NV_STATUS kgspRestorePowerMgmtState_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
894 return pKernelGsp->__kgspRestorePowerMgmtState__(pGpu, pKernelGsp);
895 }
896
kgspFreeSuspendResumeData_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)897 static inline void kgspFreeSuspendResumeData_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
898 pKernelGsp->__kgspFreeSuspendResumeData__(pGpu, pKernelGsp);
899 }
900
kgspWaitForGfwBootOk_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)901 static inline NV_STATUS kgspWaitForGfwBootOk_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
902 return pKernelGsp->__kgspWaitForGfwBootOk__(pGpu, pKernelGsp);
903 }
904
kgspGetBinArchiveBooterLoadUcode_DISPATCH(struct KernelGsp * pKernelGsp)905 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveBooterLoadUcode_DISPATCH(struct KernelGsp *pKernelGsp) {
906 return pKernelGsp->__kgspGetBinArchiveBooterLoadUcode__(pKernelGsp);
907 }
908
kgspGetBinArchiveBooterUnloadUcode_DISPATCH(struct KernelGsp * pKernelGsp)909 static inline const BINDATA_ARCHIVE * kgspGetBinArchiveBooterUnloadUcode_DISPATCH(struct KernelGsp *pKernelGsp) {
910 return pKernelGsp->__kgspGetBinArchiveBooterUnloadUcode__(pKernelGsp);
911 }
912
kgspGetMinWprHeapSizeMB_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)913 static inline NvU64 kgspGetMinWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
914 return pKernelGsp->__kgspGetMinWprHeapSizeMB__(pGpu, pKernelGsp);
915 }
916
kgspGetMaxWprHeapSizeMB_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)917 static inline NvU64 kgspGetMaxWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
918 return pKernelGsp->__kgspGetMaxWprHeapSizeMB__(pGpu, pKernelGsp);
919 }
920
kgspGetFwHeapParamOsCarveoutSize_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)921 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
922 return pKernelGsp->__kgspGetFwHeapParamOsCarveoutSize__(pGpu, pKernelGsp);
923 }
924
kgspInitVgpuPartitionLogging_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid,NvU64 initTaskLogBUffOffset,NvU64 initTaskLogBUffSize,NvU64 vgpuTaskLogBUffOffset,NvU64 vgpuTaskLogBuffSize,NvU64 kernelLogBuffOffset,NvU64 kernelLogBuffSize)925 static inline NV_STATUS kgspInitVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize, NvU64 kernelLogBuffOffset, NvU64 kernelLogBuffSize) {
926 return pKernelGsp->__kgspInitVgpuPartitionLogging__(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize, kernelLogBuffOffset, kernelLogBuffSize);
927 }
928
kgspFreeVgpuPartitionLogging_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid)929 static inline NV_STATUS kgspFreeVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) {
930 return pKernelGsp->__kgspFreeVgpuPartitionLogging__(pGpu, pKernelGsp, gfid);
931 }
932
kgspGetSignatureSectionNamePrefix_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)933 static inline const char * kgspGetSignatureSectionNamePrefix_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
934 return pKernelGsp->__kgspGetSignatureSectionNamePrefix__(pGpu, pKernelGsp);
935 }
936
kgspSetupGspFmcArgs_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)937 static inline NV_STATUS kgspSetupGspFmcArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
938 return pKernelGsp->__kgspSetupGspFmcArgs__(pGpu, pKernelGsp, pGspFw);
939 }
940
kgspReadEmem_DISPATCH(struct KernelGsp * pKernelGsp,NvU64 offset,NvU64 size,void * pBuf)941 static inline void kgspReadEmem_DISPATCH(struct KernelGsp *pKernelGsp, NvU64 offset, NvU64 size, void *pBuf) {
942 pKernelGsp->__kgspReadEmem__(pKernelGsp, offset, size, pBuf);
943 }
944
kgspIssueNotifyOp_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pArgs,NvU32 argc)945 static inline NV_STATUS kgspIssueNotifyOp_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pArgs, NvU32 argc) {
946 return pKernelGsp->__kgspIssueNotifyOp__(pGpu, pKernelGsp, opCode, pArgs, argc);
947 }
948
kgspCheckGspRmCcCleanup_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)949 static inline NV_STATUS kgspCheckGspRmCcCleanup_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
950 return pKernelGsp->__kgspCheckGspRmCcCleanup__(pGpu, pKernelGsp);
951 }
952
kgspInitMissing_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate)953 static inline void kgspInitMissing_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate) {
954 pEngstate->__kgspInitMissing__(pGpu, pEngstate);
955 }
956
kgspStatePreInitLocked_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate)957 static inline NV_STATUS kgspStatePreInitLocked_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate) {
958 return pEngstate->__kgspStatePreInitLocked__(pGpu, pEngstate);
959 }
960
kgspStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate)961 static inline NV_STATUS kgspStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate) {
962 return pEngstate->__kgspStatePreInitUnlocked__(pGpu, pEngstate);
963 }
964
kgspStateInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate)965 static inline NV_STATUS kgspStateInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate) {
966 return pEngstate->__kgspStateInitUnlocked__(pGpu, pEngstate);
967 }
968
kgspStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate,NvU32 arg3)969 static inline NV_STATUS kgspStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate, NvU32 arg3) {
970 return pEngstate->__kgspStatePreLoad__(pGpu, pEngstate, arg3);
971 }
972
kgspStateLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate,NvU32 arg3)973 static inline NV_STATUS kgspStateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate, NvU32 arg3) {
974 return pEngstate->__kgspStateLoad__(pGpu, pEngstate, arg3);
975 }
976
kgspStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate,NvU32 arg3)977 static inline NV_STATUS kgspStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate, NvU32 arg3) {
978 return pEngstate->__kgspStatePostLoad__(pGpu, pEngstate, arg3);
979 }
980
kgspStatePreUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate,NvU32 arg3)981 static inline NV_STATUS kgspStatePreUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate, NvU32 arg3) {
982 return pEngstate->__kgspStatePreUnload__(pGpu, pEngstate, arg3);
983 }
984
kgspStateUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate,NvU32 arg3)985 static inline NV_STATUS kgspStateUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate, NvU32 arg3) {
986 return pEngstate->__kgspStateUnload__(pGpu, pEngstate, arg3);
987 }
988
kgspStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate,NvU32 arg3)989 static inline NV_STATUS kgspStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate, NvU32 arg3) {
990 return pEngstate->__kgspStatePostUnload__(pGpu, pEngstate, arg3);
991 }
992
kgspStateDestroy_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate)993 static inline void kgspStateDestroy_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate) {
994 pEngstate->__kgspStateDestroy__(pGpu, pEngstate);
995 }
996
kgspIsPresent_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pEngstate)997 static inline NvBool kgspIsPresent_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pEngstate) {
998 return pEngstate->__kgspIsPresent__(pGpu, pEngstate);
999 }
1000
kgspClearInterrupt_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pIntrService,IntrServiceClearInterruptArguments * pParams)1001 static inline NvBool kgspClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceClearInterruptArguments *pParams) {
1002 return pIntrService->__kgspClearInterrupt__(pGpu, pIntrService, pParams);
1003 }
1004
kgspServiceNotificationInterrupt_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pIntrService,IntrServiceServiceNotificationInterruptArguments * pParams)1005 static inline NV_STATUS kgspServiceNotificationInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) {
1006 return pIntrService->__kgspServiceNotificationInterrupt__(pGpu, pIntrService, pParams);
1007 }
1008
kgspRegRead_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelFlcn,NvU32 offset)1009 static inline NvU32 kgspRegRead_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 offset) {
1010 return pKernelFlcn->__kgspRegRead__(pGpu, pKernelFlcn, offset);
1011 }
1012
kgspRegWrite_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelFlcn,NvU32 offset,NvU32 data)1013 static inline void kgspRegWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 offset, NvU32 data) {
1014 pKernelFlcn->__kgspRegWrite__(pGpu, pKernelFlcn, offset, data);
1015 }
1016
kgspMaskDmemAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelGsp * pKernelFlcn,NvU32 addr)1017 static inline NvU32 kgspMaskDmemAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelFlcn, NvU32 addr) {
1018 return pKernelFlcn->__kgspMaskDmemAddr__(pGpu, pKernelFlcn, addr);
1019 }
1020
kgspConfigured_DISPATCH(struct KernelGsp * arg_this)1021 static inline NvBool kgspConfigured_DISPATCH(struct KernelGsp *arg_this) {
1022 return arg_this->__kgspConfigured__(arg_this);
1023 }
1024
kgspUnload_DISPATCH(struct KernelGsp * arg_this)1025 static inline void kgspUnload_DISPATCH(struct KernelGsp *arg_this) {
1026 arg_this->__kgspUnload__(arg_this);
1027 }
1028
kgspVprintf_DISPATCH(struct KernelGsp * arg_this,NvBool bReportStart,const char * fmt,va_list args)1029 static inline void kgspVprintf_DISPATCH(struct KernelGsp *arg_this, NvBool bReportStart, const char *fmt, va_list args) {
1030 arg_this->__kgspVprintf__(arg_this, bReportStart, fmt, args);
1031 }
1032
kgspPriRead_DISPATCH(struct KernelGsp * arg_this,NvU32 offset)1033 static inline NvU32 kgspPriRead_DISPATCH(struct KernelGsp *arg_this, NvU32 offset) {
1034 return arg_this->__kgspPriRead__(arg_this, offset);
1035 }
1036
kgspPriWrite_DISPATCH(struct KernelGsp * arg_this,NvU32 offset,NvU32 data)1037 static inline void kgspPriWrite_DISPATCH(struct KernelGsp *arg_this, NvU32 offset, NvU32 data) {
1038 arg_this->__kgspPriWrite__(arg_this, offset, data);
1039 }
1040
kgspMapBufferDescriptor_DISPATCH(struct KernelGsp * arg_this,CrashCatBufferDescriptor * pBufDesc)1041 static inline void * kgspMapBufferDescriptor_DISPATCH(struct KernelGsp *arg_this, CrashCatBufferDescriptor *pBufDesc) {
1042 return arg_this->__kgspMapBufferDescriptor__(arg_this, pBufDesc);
1043 }
1044
kgspUnmapBufferDescriptor_DISPATCH(struct KernelGsp * arg_this,CrashCatBufferDescriptor * pBufDesc)1045 static inline void kgspUnmapBufferDescriptor_DISPATCH(struct KernelGsp *arg_this, CrashCatBufferDescriptor *pBufDesc) {
1046 arg_this->__kgspUnmapBufferDescriptor__(arg_this, pBufDesc);
1047 }
1048
kgspSyncBufferDescriptor_DISPATCH(struct KernelGsp * arg_this,CrashCatBufferDescriptor * pBufDesc,NvU32 offset,NvU32 size)1049 static inline void kgspSyncBufferDescriptor_DISPATCH(struct KernelGsp *arg_this, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) {
1050 arg_this->__kgspSyncBufferDescriptor__(arg_this, pBufDesc, offset, size);
1051 }
1052
kgspReadDmem_DISPATCH(struct KernelGsp * arg_this,NvU32 offset,NvU32 size,void * pBuf)1053 static inline void kgspReadDmem_DISPATCH(struct KernelGsp *arg_this, NvU32 offset, NvU32 size, void *pBuf) {
1054 arg_this->__kgspReadDmem__(arg_this, offset, size, pBuf);
1055 }
1056
kgspGetScratchOffsets_DISPATCH(struct KernelGsp * arg_this,NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId)1057 static inline const NvU32 * kgspGetScratchOffsets_DISPATCH(struct KernelGsp *arg_this, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) {
1058 return arg_this->__kgspGetScratchOffsets__(arg_this, scratchGroupId);
1059 }
1060
kgspGetWFL0Offset_DISPATCH(struct KernelGsp * arg_this)1061 static inline NvU32 kgspGetWFL0Offset_DISPATCH(struct KernelGsp *arg_this) {
1062 return arg_this->__kgspGetWFL0Offset__(arg_this);
1063 }
1064
1065 NV_STATUS kgspConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg3);
1066
1067 NV_STATUS kgspStateInitLocked_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1068
1069 void kgspRegisterIntrService_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[175]);
1070
1071 NvU32 kgspServiceInterrupt_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams);
1072
1073 void kgspConfigureFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1074
1075 void kgspConfigureFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1076
kgspConfigureFalcon_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1077 static inline void kgspConfigureFalcon_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1078 NV_ASSERT_PRECOMP(0);
1079 }
1080
1081 NvBool kgspIsDebugModeEnabled_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1082
1083 NvBool kgspIsDebugModeEnabled_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1084
kgspIsDebugModeEnabled_108313(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1085 static inline NvBool kgspIsDebugModeEnabled_108313(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1086 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
1087 }
1088
1089 NV_STATUS kgspAllocBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1090
1091 NV_STATUS kgspAllocBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1092
kgspAllocBootArgs_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1093 static inline NV_STATUS kgspAllocBootArgs_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1094 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1095 }
1096
1097 void kgspFreeBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1098
1099 void kgspFreeBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1100
kgspFreeBootArgs_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1101 static inline void kgspFreeBootArgs_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1102 NV_ASSERT_PRECOMP(0);
1103 }
1104
1105 void kgspProgramLibosBootArgsAddr_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1106
kgspProgramLibosBootArgsAddr_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1107 static inline void kgspProgramLibosBootArgsAddr_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1108 NV_ASSERT_PRECOMP(0);
1109 }
1110
1111 NV_STATUS kgspSetCmdQueueHead_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value);
1112
kgspSetCmdQueueHead_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 queueIdx,NvU32 value)1113 static inline NV_STATUS kgspSetCmdQueueHead_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value) {
1114 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1115 }
1116
1117 NV_STATUS kgspPrepareForBootstrap_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1118
1119 NV_STATUS kgspPrepareForBootstrap_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1120
kgspPrepareForBootstrap_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1121 static inline NV_STATUS kgspPrepareForBootstrap_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1122 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1123 }
1124
1125 NV_STATUS kgspBootstrap_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1126
1127 NV_STATUS kgspBootstrap_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1128
kgspBootstrap_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1129 static inline NV_STATUS kgspBootstrap_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1130 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1131 }
1132
1133 void kgspGetGspRmBootUcodeStorage_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
1134
1135 void kgspGetGspRmBootUcodeStorage_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
1136
1137 void kgspGetGspRmBootUcodeStorage_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
1138
1139 void kgspGetGspRmBootUcodeStorage_GB100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc);
1140
kgspGetGspRmBootUcodeStorage_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,BINDATA_STORAGE ** ppBinStorageImage,BINDATA_STORAGE ** ppBinStorageDesc)1141 static inline void kgspGetGspRmBootUcodeStorage_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc) {
1142 NV_ASSERT_PRECOMP(0);
1143 }
1144
1145 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_TU102(struct KernelGsp *pKernelGsp);
1146
1147 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA100(struct KernelGsp *pKernelGsp);
1148
1149 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA102(struct KernelGsp *pKernelGsp);
1150
1151 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GH100(struct KernelGsp *pKernelGsp);
1152
1153 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_AD102(struct KernelGsp *pKernelGsp);
1154
1155 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GB100(struct KernelGsp *pKernelGsp);
1156
kgspGetBinArchiveGspRmBoot_80f438(struct KernelGsp * pKernelGsp)1157 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_80f438(struct KernelGsp *pKernelGsp) {
1158 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1159 }
1160
1161 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_GH100(struct KernelGsp *pKernelGsp);
1162
kgspGetBinArchiveConcatenatedFMCDesc_80f438(struct KernelGsp * pKernelGsp)1163 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_80f438(struct KernelGsp *pKernelGsp) {
1164 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1165 }
1166
1167 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_GH100(struct KernelGsp *pKernelGsp);
1168
kgspGetBinArchiveConcatenatedFMC_80f438(struct KernelGsp * pKernelGsp)1169 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_80f438(struct KernelGsp *pKernelGsp) {
1170 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1171 }
1172
1173 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_GH100(struct KernelGsp *pKernelGsp);
1174
1175 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_GB100(struct KernelGsp *pKernelGsp);
1176
kgspGetBinArchiveGspRmFmcGfwDebugSigned_80f438(struct KernelGsp * pKernelGsp)1177 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_80f438(struct KernelGsp *pKernelGsp) {
1178 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1179 }
1180
1181 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp);
1182
1183 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_GB100(struct KernelGsp *pKernelGsp);
1184
kgspGetBinArchiveGspRmFmcGfwProdSigned_80f438(struct KernelGsp * pKernelGsp)1185 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) {
1186 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1187 }
1188
1189 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp);
1190
kgspGetBinArchiveGspRmCcFmcGfwProdSigned_80f438(struct KernelGsp * pKernelGsp)1191 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) {
1192 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1193 }
1194
1195 NV_STATUS kgspCalculateFbLayout_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1196
1197 NV_STATUS kgspCalculateFbLayout_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1198
kgspCalculateFbLayout_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1199 static inline NV_STATUS kgspCalculateFbLayout_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1200 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1201 }
1202
kgspGetNonWprHeapSize_ed6b8b(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1203 static inline NvU32 kgspGetNonWprHeapSize_ed6b8b(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1204 return 1048576;
1205 }
1206
kgspGetNonWprHeapSize_d505ea(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1207 static inline NvU32 kgspGetNonWprHeapSize_d505ea(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1208 return 2097152;
1209 }
1210
kgspGetNonWprHeapSize_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1211 static inline NvU32 kgspGetNonWprHeapSize_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1212 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1213 }
1214
1215 NV_STATUS kgspExecuteSequencerCommand_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize);
1216
1217 NV_STATUS kgspExecuteSequencerCommand_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize);
1218
kgspExecuteSequencerCommand_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pPayLoad,NvU32 payloadSize)1219 static inline NV_STATUS kgspExecuteSequencerCommand_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize) {
1220 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1221 }
1222
kgspReadUcodeFuseVersion_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 ucodeId)1223 static inline NvU32 kgspReadUcodeFuseVersion_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
1224 return 0;
1225 }
1226
1227 NvU32 kgspReadUcodeFuseVersion_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId);
1228
kgspReadUcodeFuseVersion_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 ucodeId)1229 static inline NvU32 kgspReadUcodeFuseVersion_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) {
1230 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1231 }
1232
1233 NV_STATUS kgspResetHw_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1234
1235 NV_STATUS kgspResetHw_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1236
1237 NV_STATUS kgspResetHw_GB100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1238
kgspResetHw_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1239 static inline NV_STATUS kgspResetHw_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1240 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1241 }
1242
1243 NvBool kgspHealthCheck_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1244
kgspHealthCheck_108313(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1245 static inline NvBool kgspHealthCheck_108313(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1246 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
1247 }
1248
1249 NvU32 kgspService_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1250
kgspService_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1251 static inline NvU32 kgspService_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1252 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1253 }
1254
1255 NvBool kgspIsWpr2Up_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1256
1257 NvBool kgspIsWpr2Up_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1258
kgspIsWpr2Up_108313(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1259 static inline NvBool kgspIsWpr2Up_108313(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1260 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
1261 }
1262
1263 NvU32 kgspGetFrtsSize_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1264
kgspGetFrtsSize_4a4dee(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1265 static inline NvU32 kgspGetFrtsSize_4a4dee(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1266 return 0;
1267 }
1268
kgspGetFrtsSize_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1269 static inline NvU32 kgspGetFrtsSize_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1270 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1271 }
1272
kgspGetPrescrubbedTopFbSize_e1e623(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1273 static inline NvU64 kgspGetPrescrubbedTopFbSize_e1e623(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1274 return 256 * 1024 * 1024;
1275 }
1276
kgspGetPrescrubbedTopFbSize_604eb7(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1277 static inline NvU64 kgspGetPrescrubbedTopFbSize_604eb7(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1278 return (+18446744073709551615ULL);
1279 }
1280
kgspGetPrescrubbedTopFbSize_474d46(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1281 static inline NvU64 kgspGetPrescrubbedTopFbSize_474d46(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1282 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1283 }
1284
1285 NV_STATUS kgspExtractVbiosFromRom_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg);
1286
kgspExtractVbiosFromRom_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspVbiosImg ** ppVbiosImg)1287 static inline NV_STATUS kgspExtractVbiosFromRom_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
1288 return NV_ERR_NOT_SUPPORTED;
1289 }
1290
kgspExtractVbiosFromRom_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspVbiosImg ** ppVbiosImg)1291 static inline NV_STATUS kgspExtractVbiosFromRom_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) {
1292 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1293 }
1294
1295 NV_STATUS kgspPrepareForFwsecFrts_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset, KernelGspPreparedFwsecCmd *preparedCmd);
1296
kgspPrepareForFwsecFrts_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,const NvU64 frtsOffset,KernelGspPreparedFwsecCmd * preparedCmd)1297 static inline NV_STATUS kgspPrepareForFwsecFrts_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset, KernelGspPreparedFwsecCmd *preparedCmd) {
1298 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1299 }
1300
1301 NV_STATUS kgspPrepareForFwsecSb_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd);
1302
kgspPrepareForFwsecSb_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,KernelGspPreparedFwsecCmd * preparedCmd)1303 static inline NV_STATUS kgspPrepareForFwsecSb_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd) {
1304 return NV_ERR_NOT_SUPPORTED;
1305 }
1306
kgspPrepareForFwsecSb_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFwsecUcode,KernelGspPreparedFwsecCmd * preparedCmd)1307 static inline NV_STATUS kgspPrepareForFwsecSb_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, KernelGspPreparedFwsecCmd *preparedCmd) {
1308 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1309 }
1310
1311 NV_STATUS kgspExecuteFwsec_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspPreparedFwsecCmd *preparedCmd);
1312
kgspExecuteFwsec_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspPreparedFwsecCmd * preparedCmd)1313 static inline NV_STATUS kgspExecuteFwsec_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspPreparedFwsecCmd *preparedCmd) {
1314 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1315 }
1316
1317 NV_STATUS kgspExecuteScrubberIfNeeded_AD102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1318
kgspExecuteScrubberIfNeeded_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1319 static inline NV_STATUS kgspExecuteScrubberIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1320 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1321 }
1322
1323 NV_STATUS kgspExecuteBooterLoad_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData);
1324
kgspExecuteBooterLoad_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfData)1325 static inline NV_STATUS kgspExecuteBooterLoad_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) {
1326 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1327 }
1328
1329 NV_STATUS kgspExecuteBooterUnloadIfNeeded_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData);
1330
kgspExecuteBooterUnloadIfNeeded_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const NvU64 sysmemAddrOfSuspendResumeData)1331 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) {
1332 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1333 }
1334
1335 NV_STATUS kgspExecuteHsFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1);
1336
1337 NV_STATUS kgspExecuteHsFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1);
1338
kgspExecuteHsFalcon_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode * pFlcnUcode,struct KernelFalcon * pKernelFlcn,NvU32 * pMailbox0,NvU32 * pMailbox1)1339 static inline NV_STATUS kgspExecuteHsFalcon_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) {
1340 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1341 }
1342
1343 NV_STATUS kgspWaitForProcessorSuspend_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1344
kgspWaitForProcessorSuspend_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1345 static inline NV_STATUS kgspWaitForProcessorSuspend_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1346 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1347 }
1348
1349 NV_STATUS kgspSavePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1350
kgspSavePowerMgmtState_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1351 static inline NV_STATUS kgspSavePowerMgmtState_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1352 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1353 }
1354
1355 NV_STATUS kgspRestorePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1356
kgspRestorePowerMgmtState_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1357 static inline NV_STATUS kgspRestorePowerMgmtState_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1358 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1359 }
1360
1361 void kgspFreeSuspendResumeData_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1362
kgspFreeSuspendResumeData_f2d351(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1363 static inline void kgspFreeSuspendResumeData_f2d351(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1364 NV_ASSERT_PRECOMP(0);
1365 }
1366
1367 NV_STATUS kgspWaitForGfwBootOk_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1368
1369 NV_STATUS kgspWaitForGfwBootOk_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1370
kgspWaitForGfwBootOk_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1371 static inline NV_STATUS kgspWaitForGfwBootOk_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1372 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1373 }
1374
1375 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU102(struct KernelGsp *pKernelGsp);
1376
1377 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU116(struct KernelGsp *pKernelGsp);
1378
1379 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA100(struct KernelGsp *pKernelGsp);
1380
1381 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA102(struct KernelGsp *pKernelGsp);
1382
1383 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_AD102(struct KernelGsp *pKernelGsp);
1384
kgspGetBinArchiveBooterLoadUcode_80f438(struct KernelGsp * pKernelGsp)1385 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_80f438(struct KernelGsp *pKernelGsp) {
1386 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1387 }
1388
1389 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU102(struct KernelGsp *pKernelGsp);
1390
1391 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU116(struct KernelGsp *pKernelGsp);
1392
1393 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA100(struct KernelGsp *pKernelGsp);
1394
1395 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA102(struct KernelGsp *pKernelGsp);
1396
1397 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_AD102(struct KernelGsp *pKernelGsp);
1398
kgspGetBinArchiveBooterUnloadUcode_80f438(struct KernelGsp * pKernelGsp)1399 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_80f438(struct KernelGsp *pKernelGsp) {
1400 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1401 }
1402
kgspGetMinWprHeapSizeMB_7185bf(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1403 static inline NvU64 kgspGetMinWprHeapSizeMB_7185bf(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1404 return (64U);
1405 }
1406
kgspGetMinWprHeapSizeMB_cc88c3(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1407 static inline NvU64 kgspGetMinWprHeapSizeMB_cc88c3(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1408 return pGpu->bVgpuGspPluginOffloadEnabled ? (565U) : (86U);
1409 }
1410
kgspGetMinWprHeapSizeMB_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1411 static inline NvU64 kgspGetMinWprHeapSizeMB_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1412 return 0;
1413 }
1414
kgspGetMaxWprHeapSizeMB_ad4e6a(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1415 static inline NvU64 kgspGetMaxWprHeapSizeMB_ad4e6a(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1416 return (256U);
1417 }
1418
kgspGetMaxWprHeapSizeMB_55728f(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1419 static inline NvU64 kgspGetMaxWprHeapSizeMB_55728f(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1420 return pGpu->bVgpuGspPluginOffloadEnabled ? (1040U) : (278U);
1421 }
1422
kgspGetMaxWprHeapSizeMB_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1423 static inline NvU64 kgspGetMaxWprHeapSizeMB_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1424 return 0;
1425 }
1426
kgspGetFwHeapParamOsCarveoutSize_397f70(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1427 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_397f70(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1428 return (0 << 20);
1429 }
1430
kgspGetFwHeapParamOsCarveoutSize_4b5307(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1431 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_4b5307(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1432 return pGpu->bVgpuGspPluginOffloadEnabled ? (36 << 20) : (22 << 20);
1433 }
1434
kgspGetFwHeapParamOsCarveoutSize_b2b553(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1435 static inline NvU32 kgspGetFwHeapParamOsCarveoutSize_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1436 return 0;
1437 }
1438
kgspInitVgpuPartitionLogging_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid,NvU64 initTaskLogBUffOffset,NvU64 initTaskLogBUffSize,NvU64 vgpuTaskLogBUffOffset,NvU64 vgpuTaskLogBuffSize,NvU64 kernelLogBuffOffset,NvU64 kernelLogBuffSize)1439 static inline NV_STATUS kgspInitVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize, NvU64 kernelLogBuffOffset, NvU64 kernelLogBuffSize) {
1440 return NV_ERR_NOT_SUPPORTED;
1441 }
1442
1443 NV_STATUS kgspInitVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize, NvU64 kernelLogBuffOffset, NvU64 kernelLogBuffSize);
1444
kgspFreeVgpuPartitionLogging_395e98(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 gfid)1445 static inline NV_STATUS kgspFreeVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) {
1446 return NV_ERR_NOT_SUPPORTED;
1447 }
1448
1449 NV_STATUS kgspFreeVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid);
1450
1451 const char *kgspGetSignatureSectionNamePrefix_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1452
1453 const char *kgspGetSignatureSectionNamePrefix_GB100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1454
kgspGetSignatureSectionNamePrefix_789efb(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1455 static inline const char *kgspGetSignatureSectionNamePrefix_789efb(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1456 return ".fwsignature_";
1457 }
1458
kgspGetSignatureSectionNamePrefix_80f438(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1459 static inline const char *kgspGetSignatureSectionNamePrefix_80f438(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1460 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
1461 }
1462
1463 NV_STATUS kgspSetupGspFmcArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1464
kgspSetupGspFmcArgs_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1465 static inline NV_STATUS kgspSetupGspFmcArgs_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1466 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1467 }
1468
1469 void kgspReadEmem_TU102(struct KernelGsp *pKernelGsp, NvU64 offset, NvU64 size, void *pBuf);
1470
kgspReadEmem_366c4c(struct KernelGsp * pKernelGsp,NvU64 offset,NvU64 size,void * pBuf)1471 static inline void kgspReadEmem_366c4c(struct KernelGsp *pKernelGsp, NvU64 offset, NvU64 size, void *pBuf) {
1472 NV_ASSERT(0);
1473 }
1474
1475 NV_STATUS kgspIssueNotifyOp_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pArgs, NvU32 argc);
1476
kgspIssueNotifyOp_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 opCode,NvU32 * pArgs,NvU32 argc)1477 static inline NV_STATUS kgspIssueNotifyOp_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pArgs, NvU32 argc) {
1478 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1479 }
1480
1481 NV_STATUS kgspCheckGspRmCcCleanup_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1482
kgspCheckGspRmCcCleanup_5baef9(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1483 static inline NV_STATUS kgspCheckGspRmCcCleanup_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1484 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1485 }
1486
1487 void kgspDestruct_IMPL(struct KernelGsp *pKernelGsp);
1488
1489 #define __nvoc_kgspDestruct(pKernelGsp) kgspDestruct_IMPL(pKernelGsp)
1490 void kgspPopulateGspRmInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs);
1491
1492 #ifdef __nvoc_kernel_gsp_h_disabled
kgspPopulateGspRmInitArgs(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_SR_INIT_ARGUMENTS * pGspSrInitArgs)1493 static inline void kgspPopulateGspRmInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs) {
1494 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1495 }
1496 #else //__nvoc_kernel_gsp_h_disabled
1497 #define kgspPopulateGspRmInitArgs(pGpu, pKernelGsp, pGspSrInitArgs) kgspPopulateGspRmInitArgs_IMPL(pGpu, pKernelGsp, pGspSrInitArgs)
1498 #endif //__nvoc_kernel_gsp_h_disabled
1499
1500 NV_STATUS kgspInitRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw);
1501
1502 #ifdef __nvoc_kernel_gsp_h_disabled
kgspInitRm(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,GSP_FIRMWARE * pGspFw)1503 static inline NV_STATUS kgspInitRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) {
1504 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1505 return NV_ERR_NOT_SUPPORTED;
1506 }
1507 #else //__nvoc_kernel_gsp_h_disabled
1508 #define kgspInitRm(pGpu, pKernelGsp, pGspFw) kgspInitRm_IMPL(pGpu, pKernelGsp, pGspFw)
1509 #endif //__nvoc_kernel_gsp_h_disabled
1510
1511 NV_STATUS kgspCreateRadix3_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData);
1512
1513 #ifdef __nvoc_kernel_gsp_h_disabled
kgspCreateRadix3(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,MEMORY_DESCRIPTOR ** ppMemdescRadix3,MEMORY_DESCRIPTOR * pMemdescData,const void * pData,NvU64 sizeOfData)1514 static inline NV_STATUS kgspCreateRadix3(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData) {
1515 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1516 return NV_ERR_NOT_SUPPORTED;
1517 }
1518 #else //__nvoc_kernel_gsp_h_disabled
1519 #define kgspCreateRadix3(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData) kgspCreateRadix3_IMPL(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData)
1520 #endif //__nvoc_kernel_gsp_h_disabled
1521
1522 NV_STATUS kgspUnloadRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1523
1524 #ifdef __nvoc_kernel_gsp_h_disabled
kgspUnloadRm(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1525 static inline NV_STATUS kgspUnloadRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1526 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1527 return NV_ERR_NOT_SUPPORTED;
1528 }
1529 #else //__nvoc_kernel_gsp_h_disabled
1530 #define kgspUnloadRm(pGpu, pKernelGsp) kgspUnloadRm_IMPL(pGpu, pKernelGsp)
1531 #endif //__nvoc_kernel_gsp_h_disabled
1532
1533 NV_STATUS kgspPrepareBootBinaryImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1534
1535 #ifdef __nvoc_kernel_gsp_h_disabled
kgspPrepareBootBinaryImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1536 static inline NV_STATUS kgspPrepareBootBinaryImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1537 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1538 return NV_ERR_NOT_SUPPORTED;
1539 }
1540 #else //__nvoc_kernel_gsp_h_disabled
1541 #define kgspPrepareBootBinaryImage(pGpu, pKernelGsp) kgspPrepareBootBinaryImage_IMPL(pGpu, pKernelGsp)
1542 #endif //__nvoc_kernel_gsp_h_disabled
1543
1544 NvU64 kgspGetFwHeapSize_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize);
1545
1546 #ifdef __nvoc_kernel_gsp_h_disabled
kgspGetFwHeapSize(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU64 posteriorFbSize)1547 static inline NvU64 kgspGetFwHeapSize(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize) {
1548 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1549 return 0;
1550 }
1551 #else //__nvoc_kernel_gsp_h_disabled
1552 #define kgspGetFwHeapSize(pGpu, pKernelGsp, posteriorFbSize) kgspGetFwHeapSize_IMPL(pGpu, pKernelGsp, posteriorFbSize)
1553 #endif //__nvoc_kernel_gsp_h_disabled
1554
1555 NvU64 kgspGetWprEndMargin_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1556
1557 #ifdef __nvoc_kernel_gsp_h_disabled
kgspGetWprEndMargin(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1558 static inline NvU64 kgspGetWprEndMargin(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1559 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1560 return 0;
1561 }
1562 #else //__nvoc_kernel_gsp_h_disabled
1563 #define kgspGetWprEndMargin(pGpu, pKernelGsp) kgspGetWprEndMargin_IMPL(pGpu, pKernelGsp)
1564 #endif //__nvoc_kernel_gsp_h_disabled
1565
1566 void kgspSetupLibosInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1567
1568 #ifdef __nvoc_kernel_gsp_h_disabled
kgspSetupLibosInitArgs(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1569 static inline void kgspSetupLibosInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1570 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1571 }
1572 #else //__nvoc_kernel_gsp_h_disabled
1573 #define kgspSetupLibosInitArgs(pGpu, pKernelGsp) kgspSetupLibosInitArgs_IMPL(pGpu, pKernelGsp)
1574 #endif //__nvoc_kernel_gsp_h_disabled
1575
1576 NV_STATUS kgspQueueAsyncInitRpcs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1577
1578 #ifdef __nvoc_kernel_gsp_h_disabled
kgspQueueAsyncInitRpcs(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1579 static inline NV_STATUS kgspQueueAsyncInitRpcs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1580 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1581 return NV_ERR_NOT_SUPPORTED;
1582 }
1583 #else //__nvoc_kernel_gsp_h_disabled
1584 #define kgspQueueAsyncInitRpcs(pGpu, pKernelGsp) kgspQueueAsyncInitRpcs_IMPL(pGpu, pKernelGsp)
1585 #endif //__nvoc_kernel_gsp_h_disabled
1586
1587 void kgspRpcRecvEvents_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1588
1589 #ifdef __nvoc_kernel_gsp_h_disabled
kgspRpcRecvEvents(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1590 static inline void kgspRpcRecvEvents(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1591 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1592 }
1593 #else //__nvoc_kernel_gsp_h_disabled
1594 #define kgspRpcRecvEvents(pGpu, pKernelGsp) kgspRpcRecvEvents_IMPL(pGpu, pKernelGsp)
1595 #endif //__nvoc_kernel_gsp_h_disabled
1596
1597 NV_STATUS kgspWaitForRmInitDone_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1598
1599 #ifdef __nvoc_kernel_gsp_h_disabled
kgspWaitForRmInitDone(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1600 static inline NV_STATUS kgspWaitForRmInitDone(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1601 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1602 return NV_ERR_NOT_SUPPORTED;
1603 }
1604 #else //__nvoc_kernel_gsp_h_disabled
1605 #define kgspWaitForRmInitDone(pGpu, pKernelGsp) kgspWaitForRmInitDone_IMPL(pGpu, pKernelGsp)
1606 #endif //__nvoc_kernel_gsp_h_disabled
1607
1608 NV_STATUS kgspStartLogPolling_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp);
1609
1610 #ifdef __nvoc_kernel_gsp_h_disabled
kgspStartLogPolling(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp)1611 static inline NV_STATUS kgspStartLogPolling(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) {
1612 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1613 return NV_ERR_NOT_SUPPORTED;
1614 }
1615 #else //__nvoc_kernel_gsp_h_disabled
1616 #define kgspStartLogPolling(pGpu, pKernelGsp) kgspStartLogPolling_IMPL(pGpu, pKernelGsp)
1617 #endif //__nvoc_kernel_gsp_h_disabled
1618
1619 void kgspDumpGspLogs_IMPL(struct KernelGsp *pKernelGsp, NvBool arg2);
1620
1621 #ifdef __nvoc_kernel_gsp_h_disabled
kgspDumpGspLogs(struct KernelGsp * pKernelGsp,NvBool arg2)1622 static inline void kgspDumpGspLogs(struct KernelGsp *pKernelGsp, NvBool arg2) {
1623 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1624 }
1625 #else //__nvoc_kernel_gsp_h_disabled
1626 #define kgspDumpGspLogs(pKernelGsp, arg2) kgspDumpGspLogs_IMPL(pKernelGsp, arg2)
1627 #endif //__nvoc_kernel_gsp_h_disabled
1628
1629 void kgspDumpGspLogsUnlocked_IMPL(struct KernelGsp *pKernelGsp, NvBool arg2);
1630
1631 #ifdef __nvoc_kernel_gsp_h_disabled
kgspDumpGspLogsUnlocked(struct KernelGsp * pKernelGsp,NvBool arg2)1632 static inline void kgspDumpGspLogsUnlocked(struct KernelGsp *pKernelGsp, NvBool arg2) {
1633 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1634 }
1635 #else //__nvoc_kernel_gsp_h_disabled
1636 #define kgspDumpGspLogsUnlocked(pKernelGsp, arg2) kgspDumpGspLogsUnlocked_IMPL(pKernelGsp, arg2)
1637 #endif //__nvoc_kernel_gsp_h_disabled
1638
1639 NV_STATUS kgspExecuteSequencerBuffer_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams);
1640
1641 #ifdef __nvoc_kernel_gsp_h_disabled
kgspExecuteSequencerBuffer(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,void * pRunCpuSeqParams)1642 static inline NV_STATUS kgspExecuteSequencerBuffer(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams) {
1643 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1644 return NV_ERR_NOT_SUPPORTED;
1645 }
1646 #else //__nvoc_kernel_gsp_h_disabled
1647 #define kgspExecuteSequencerBuffer(pGpu, pKernelGsp, pRunCpuSeqParams) kgspExecuteSequencerBuffer_IMPL(pGpu, pKernelGsp, pRunCpuSeqParams)
1648 #endif //__nvoc_kernel_gsp_h_disabled
1649
1650 NV_STATUS kgspParseFwsecUcodeFromVbiosImg_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined);
1651
1652 #ifdef __nvoc_kernel_gsp_h_disabled
kgspParseFwsecUcodeFromVbiosImg(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,const KernelGspVbiosImg * const pVbiosImg,KernelGspFlcnUcode ** ppFwsecUcode,NvU64 * pVbiosVersionCombined)1653 static inline NV_STATUS kgspParseFwsecUcodeFromVbiosImg(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined) {
1654 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1655 return NV_ERR_NOT_SUPPORTED;
1656 }
1657 #else //__nvoc_kernel_gsp_h_disabled
1658 #define kgspParseFwsecUcodeFromVbiosImg(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined) kgspParseFwsecUcodeFromVbiosImg_IMPL(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined)
1659 #endif //__nvoc_kernel_gsp_h_disabled
1660
1661 NV_STATUS kgspAllocateScrubberUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode);
1662
1663 #ifdef __nvoc_kernel_gsp_h_disabled
kgspAllocateScrubberUcodeImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode ** ppScrubberUcode)1664 static inline NV_STATUS kgspAllocateScrubberUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode) {
1665 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1666 return NV_ERR_NOT_SUPPORTED;
1667 }
1668 #else //__nvoc_kernel_gsp_h_disabled
1669 #define kgspAllocateScrubberUcodeImage(pGpu, pKernelGsp, ppScrubberUcode) kgspAllocateScrubberUcodeImage_IMPL(pGpu, pKernelGsp, ppScrubberUcode)
1670 #endif //__nvoc_kernel_gsp_h_disabled
1671
1672 NV_STATUS kgspAllocateBooterLoadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode);
1673
1674 #ifdef __nvoc_kernel_gsp_h_disabled
kgspAllocateBooterLoadUcodeImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode ** ppBooterLoadUcode)1675 static inline NV_STATUS kgspAllocateBooterLoadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode) {
1676 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1677 return NV_ERR_NOT_SUPPORTED;
1678 }
1679 #else //__nvoc_kernel_gsp_h_disabled
1680 #define kgspAllocateBooterLoadUcodeImage(pGpu, pKernelGsp, ppBooterLoadUcode) kgspAllocateBooterLoadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterLoadUcode)
1681 #endif //__nvoc_kernel_gsp_h_disabled
1682
1683 NV_STATUS kgspAllocateBooterUnloadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode);
1684
1685 #ifdef __nvoc_kernel_gsp_h_disabled
kgspAllocateBooterUnloadUcodeImage(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,KernelGspFlcnUcode ** ppBooterUnloadUcode)1686 static inline NV_STATUS kgspAllocateBooterUnloadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode) {
1687 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1688 return NV_ERR_NOT_SUPPORTED;
1689 }
1690 #else //__nvoc_kernel_gsp_h_disabled
1691 #define kgspAllocateBooterUnloadUcodeImage(pGpu, pKernelGsp, ppBooterUnloadUcode) kgspAllocateBooterUnloadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterUnloadUcode)
1692 #endif //__nvoc_kernel_gsp_h_disabled
1693
1694 void kgspRcAndNotifyAllUserChannels_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 exceptType);
1695
1696 #ifdef __nvoc_kernel_gsp_h_disabled
kgspRcAndNotifyAllUserChannels(struct OBJGPU * pGpu,struct KernelGsp * pKernelGsp,NvU32 exceptType)1697 static inline void kgspRcAndNotifyAllUserChannels(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 exceptType) {
1698 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!");
1699 }
1700 #else //__nvoc_kernel_gsp_h_disabled
1701 #define kgspRcAndNotifyAllUserChannels(pGpu, pKernelGsp, exceptType) kgspRcAndNotifyAllUserChannels_IMPL(pGpu, pKernelGsp, exceptType)
1702 #endif //__nvoc_kernel_gsp_h_disabled
1703
1704 #undef PRIVATE_FIELD
1705
1706
1707 NV_STATUS rpcRmApiControl_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject,
1708 NvU32 cmd, void *pParamStructPtr, NvU32 paramsSize);
1709 NV_STATUS rpcRmApiAlloc_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent,
1710 NvHandle hObject, NvU32 hClass, void *pAllocParams, NvU32 allocParamsSize);
1711 NV_STATUS rpcRmApiDupObject_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent, NvHandle *phObject,
1712 NvHandle hClientSrc, NvHandle hObjectSrc, NvU32 flags);
1713 NV_STATUS rpcRmApiFree_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject);
1714
1715 /* Free a KernelGspVbiosImg structure */
1716 void kgspFreeVbiosImg(KernelGspVbiosImg *pVbiosImg);
1717 /* Free a KernelGspFlcnUcode structure */
1718 void kgspFreeFlcnUcode(KernelGspFlcnUcode *pFlcnUcode);
1719
1720 void kgspLogRpcDebugInfo(struct OBJGPU *pGpu, OBJRPC *pRpc, NvU32 errorNum, NvBool bPollingForRpcResponse);
1721
1722 #endif // KERNEL_GSP_H
1723
1724 #ifdef __cplusplus
1725 } // extern "C"
1726 #endif
1727
1728 #endif // _G_KERNEL_GSP_NVOC_H_
1729