1 #ifndef _G_KERNEL_GSP_NVOC_H_ 2 #define _G_KERNEL_GSP_NVOC_H_ 3 #include "nvoc/runtime.h" 4 5 #ifdef __cplusplus 6 extern "C" { 7 #endif 8 9 /* 10 * SPDX-FileCopyrightText: Copyright (c) 2017-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. 11 * SPDX-License-Identifier: MIT 12 * 13 * Permission is hereby granted, free of charge, to any person obtaining a 14 * copy of this software and associated documentation files (the "Software"), 15 * to deal in the Software without restriction, including without limitation 16 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 17 * and/or sell copies of the Software, and to permit persons to whom the 18 * Software is furnished to do so, subject to the following conditions: 19 * 20 * The above copyright notice and this permission notice shall be included in 21 * all copies or substantial portions of the Software. 22 * 23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 29 * DEALINGS IN THE SOFTWARE. 30 */ 31 32 #include "g_kernel_gsp_nvoc.h" 33 34 #ifndef KERNEL_GSP_H 35 #define KERNEL_GSP_H 36 37 /*! 38 * This file provides definitions for all KernelGsp data structures 39 * and interfaces. KernelGsp is responsible for initiating the boot 40 * of RM on the GSP core (GSP-RM) and helps facilitate communication 41 * between Kernel RM and GSP-RM. 42 */ 43 44 #include "core/core.h" 45 #include "core/bin_data.h" 46 #include "gpu/eng_state.h" 47 #include "gpu/intr/intr_service.h" 48 #include "gpu/falcon/kernel_falcon.h" 49 #include "gpu/gsp/gsp_static_config.h" 50 #include "gpu/gsp/gsp_init_args.h" 51 #include "gpu/gsp/gsp_fw_heap.h" 52 #include "nv-firmware.h" 53 #include "nv_sriov_defines.h" 54 #include "rmRiscvUcode.h" 55 56 #include "libos_init_args.h" 57 #include "gsp_fw_wpr_meta.h" 58 #include "gsp_fw_sr_meta.h" 59 #include "liblogdecode.h" 60 61 /*! 62 * Forward declarations 63 */ 64 typedef struct SimAccessBuffer SimAccessBuffer; 65 typedef struct GSP_FMC_BOOT_PARAMS GSP_FMC_BOOT_PARAMS; 66 67 /*! 68 * Structure for VBIOS image for early FRTS. 69 */ 70 typedef struct KernelGspVbiosImg 71 { 72 NvU8 *pImage; 73 NvU32 biosSize; 74 NvU32 expansionRomOffset; 75 } KernelGspVbiosImg; 76 77 /*! 78 * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that 79 * loads directly without the generic falcon bootloader. 80 */ 81 typedef struct KernelGspFlcnUcodeBootDirect 82 { 83 NvU8 *pImage; 84 NvU32 size; 85 86 NvU32 imemSize; 87 NvU32 imemNsSize; 88 NvU32 imemNsPa; 89 NvU32 imemSecSize; 90 NvU32 imemSecPa; 91 92 NvU32 dataOffset; 93 NvU32 dmemSize; 94 NvU32 dmemPa; 95 } KernelGspFlcnUcodeBootDirect; 96 97 /*! 98 * Variant of KernelGspFlcnUcode representing a non-Boot-from-HS ucode that 99 * loads via the generic falcon bootloader. 100 */ 101 typedef struct KernelGspFlcnUcodeBootWithLoader 102 { 103 MEMORY_DESCRIPTOR *pCodeMemDesc; 104 MEMORY_DESCRIPTOR *pDataMemDesc; 105 106 NvU32 codeOffset; 107 NvU32 imemSize; 108 NvU32 imemNsSize; 109 NvU32 imemNsPa; 110 NvU32 imemSecSize; 111 NvU32 imemSecPa; 112 NvU32 codeEntry; 113 114 NvU32 dataOffset; 115 NvU32 dmemSize; 116 NvU32 dmemPa; 117 118 // Extra fields used for falcon ucodes from VBIOS 119 NvU32 interfaceOffset; 120 } KernelGspFlcnUcodeBootWithLoader; 121 122 /*! 123 * Variant of KernelGspFlcnUcode representing a Boot-from-HS ucode. 124 */ 125 typedef struct KernelGspFlcnUcodeBootFromHs 126 { 127 MEMORY_DESCRIPTOR *pUcodeMemDesc; 128 NvU32 size; 129 130 NvU32 codeOffset; 131 NvU32 imemSize; 132 NvU32 imemPa; 133 NvU32 imemVa; 134 135 NvU32 dataOffset; 136 NvU32 dmemSize; 137 NvU32 dmemPa; 138 NvU32 dmemVa; 139 140 NvU32 hsSigDmemAddr; 141 NvU32 ucodeId; 142 NvU32 engineIdMask; 143 144 // Extra fields used for falcon ucodes from VBIOS 145 NvU32 *pSignatures; 146 NvU32 signaturesTotalSize; // size of buffer pointed by pSignatures 147 NvU32 sigSize; // size of one signature 148 NvU32 sigCount; 149 150 NvU32 vbiosSigVersions; 151 NvU32 interfaceOffset; 152 } KernelGspFlcnUcodeBootFromHs; 153 154 /*! 155 * Type of KernelGspFlcnUcode. Used as tag in tagged union KernelGspFlcnUcode. 156 * Affects how the ucode is loaded/booted. 157 */ 158 typedef enum KernelGspFlcnUcodeBootType 159 { 160 KGSP_FLCN_UCODE_BOOT_DIRECT, 161 KGSP_FLCN_UCODE_BOOT_WITH_LOADER, 162 KGSP_FLCN_UCODE_BOOT_FROM_HS 163 } KernelGspFlcnUcodeBootType; 164 165 /*! 166 * Tagged union of falcon ucode variants used by early FRTS and GSP-RM boot. 167 */ 168 typedef struct KernelGspFlcnUcode 169 { 170 KernelGspFlcnUcodeBootType bootType; 171 union 172 { 173 KernelGspFlcnUcodeBootDirect ucodeBootDirect; 174 KernelGspFlcnUcodeBootWithLoader ucodeBootWithLoader; 175 KernelGspFlcnUcodeBootFromHs ucodeBootFromHs; 176 }; 177 } KernelGspFlcnUcode; 178 179 /*! 180 * GSP-RM source when running in Emulated/Simulated RISCV environment is 181 * extremely slow, so we need a factor (X) to scale timeouts by. 182 */ 183 #define GSP_SCALE_TIMEOUT_EMU_SIM 2500 184 185 /*! 186 * Size of libos init arguments packet. 187 */ 188 #define LIBOS_INIT_ARGUMENTS_SIZE 0x1000 189 190 /*! 191 * Structure for passing GSP-RM firmware data 192 */ 193 typedef struct GSP_FIRMWARE 194 { 195 const void *pBuf; // buffer holding the firmware (ucode) 196 NvU32 size; // size of the firmware 197 const void *pImageData; // points to the GSP FW image start inside the pBuf buffer 198 NvU64 imageSize; // GSP FW image size inside the pBuf buffer 199 const void *pSignatureData; // points to the GSP FW signature start inside the pBuf buffer 200 NvU64 signatureSize; // GSP FW signature size inside the pBuf buffer 201 const void *pLogElf; // firmware logging section and symbol information to decode logs 202 NvU32 logElfSize; // size of the gsp log elf binary 203 } GSP_FIRMWARE; 204 205 /*! 206 * Known ELF section names (or name prefixes) of gsp_*.bin or gsp_log_*.bin. 207 */ 208 #define GSP_VERSION_SECTION_NAME ".fwversion" 209 #define GSP_IMAGE_SECTION_NAME ".fwimage" 210 #define GSP_LOGGING_SECTION_NAME ".fwlogging" 211 #define GSP_SIGNATURE_SECTION_NAME_PREFIX ".fwsignature_" 212 #define GSP_CC_SIGNATURE_SECTION_NAME_PREFIX ".fwsignature_cc_" 213 214 /*! 215 * Index into libosLogDecode array. 216 */ 217 enum 218 { 219 LOGIDX_INIT, 220 LOGIDX_INTR, 221 LOGIDX_RM, 222 LOGIDX_SIZE 223 }; 224 225 /*! 226 * LIBOS task logging. 227 */ 228 typedef struct 229 { 230 /* Memory for task logging */ 231 MEMORY_DESCRIPTOR *pTaskLogDescriptor; 232 NvU64 *pTaskLogBuffer; 233 NvP64 pTaskLogMappingPriv; 234 NvU64 id8; 235 } RM_LIBOS_LOG_MEM; 236 237 /*! 238 * KernelGsp object definition 239 */ 240 #ifdef NVOC_KERNEL_GSP_H_PRIVATE_ACCESS_ALLOWED 241 #define PRIVATE_FIELD(x) x 242 #else 243 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x) 244 #endif 245 struct MESSAGE_QUEUE_COLLECTION; 246 247 248 struct KernelGsp { 249 const struct NVOC_RTTI *__nvoc_rtti; 250 struct OBJENGSTATE __nvoc_base_OBJENGSTATE; 251 struct IntrService __nvoc_base_IntrService; 252 struct KernelFalcon __nvoc_base_KernelFalcon; 253 struct Object *__nvoc_pbase_Object; 254 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; 255 struct IntrService *__nvoc_pbase_IntrService; 256 struct KernelFalcon *__nvoc_pbase_KernelFalcon; 257 struct KernelGsp *__nvoc_pbase_KernelGsp; 258 NV_STATUS (*__kgspConstructEngine__)(struct OBJGPU *, struct KernelGsp *, ENGDESCRIPTOR); 259 void (*__kgspRegisterIntrService__)(struct OBJGPU *, struct KernelGsp *, IntrServiceRecord *); 260 NvU32 (*__kgspServiceInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceServiceInterruptArguments *); 261 void (*__kgspConfigureFalcon__)(struct OBJGPU *, struct KernelGsp *); 262 NvBool (*__kgspIsDebugModeEnabled__)(struct OBJGPU *, struct KernelGsp *); 263 NV_STATUS (*__kgspAllocBootArgs__)(struct OBJGPU *, struct KernelGsp *); 264 void (*__kgspFreeBootArgs__)(struct OBJGPU *, struct KernelGsp *); 265 NV_STATUS (*__kgspBootstrapRiscvOSEarly__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *); 266 void (*__kgspGetGspRmBootUcodeStorage__)(struct OBJGPU *, struct KernelGsp *, BINDATA_STORAGE **, BINDATA_STORAGE **); 267 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmBoot__)(struct KernelGsp *); 268 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveConcatenatedFMCDesc__)(struct KernelGsp *); 269 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveConcatenatedFMC__)(struct KernelGsp *); 270 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmFmcGfwDebugSigned__)(struct KernelGsp *); 271 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmFmcGfwProdSigned__)(struct KernelGsp *); 272 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__)(struct KernelGsp *); 273 NV_STATUS (*__kgspCalculateFbLayout__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *); 274 NvU32 (*__kgspGetNonWprHeapSize__)(struct OBJGPU *, struct KernelGsp *); 275 NV_STATUS (*__kgspExecuteSequencerCommand__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU32 *, NvU32); 276 NvU32 (*__kgspReadUcodeFuseVersion__)(struct OBJGPU *, struct KernelGsp *, NvU32); 277 NV_STATUS (*__kgspResetHw__)(struct OBJGPU *, struct KernelGsp *); 278 NvBool (*__kgspIsWpr2Up__)(struct OBJGPU *, struct KernelGsp *); 279 NvU32 (*__kgspGetFrtsSize__)(struct OBJGPU *, struct KernelGsp *); 280 NvU64 (*__kgspGetPrescrubbedTopFbSize__)(struct OBJGPU *, struct KernelGsp *); 281 NV_STATUS (*__kgspExtractVbiosFromRom__)(struct OBJGPU *, struct KernelGsp *, KernelGspVbiosImg **); 282 NV_STATUS (*__kgspExecuteFwsecFrts__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, const NvU64); 283 NV_STATUS (*__kgspExecuteFwsecSb__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *); 284 NV_STATUS (*__kgspExecuteScrubberIfNeeded__)(struct OBJGPU *, struct KernelGsp *); 285 NV_STATUS (*__kgspExecuteBooterLoad__)(struct OBJGPU *, struct KernelGsp *, const NvU64); 286 NV_STATUS (*__kgspExecuteBooterUnloadIfNeeded__)(struct OBJGPU *, struct KernelGsp *, const NvU64); 287 NV_STATUS (*__kgspExecuteHsFalcon__)(struct OBJGPU *, struct KernelGsp *, KernelGspFlcnUcode *, struct KernelFalcon *, NvU32 *, NvU32 *); 288 NV_STATUS (*__kgspWaitForGfwBootOk__)(struct OBJGPU *, struct KernelGsp *); 289 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveBooterLoadUcode__)(struct KernelGsp *); 290 const BINDATA_ARCHIVE *(*__kgspGetBinArchiveBooterUnloadUcode__)(struct KernelGsp *); 291 NvU64 (*__kgspGetMinWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp *); 292 NvU64 (*__kgspGetMaxWprHeapSizeMB__)(struct OBJGPU *, struct KernelGsp *); 293 NV_STATUS (*__kgspInitVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp *, NvU32, NvU64, NvU64, NvU64, NvU64); 294 NV_STATUS (*__kgspFreeVgpuPartitionLogging__)(struct OBJGPU *, struct KernelGsp *, NvU32); 295 const char *(*__kgspGetSignatureSectionNamePrefix__)(struct OBJGPU *, struct KernelGsp *); 296 NV_STATUS (*__kgspSetupGspFmcArgs__)(struct OBJGPU *, struct KernelGsp *, GSP_FIRMWARE *); 297 NV_STATUS (*__kgspStateLoad__)(POBJGPU, struct KernelGsp *, NvU32); 298 NV_STATUS (*__kgspStateUnload__)(POBJGPU, struct KernelGsp *, NvU32); 299 NV_STATUS (*__kgspServiceNotificationInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceServiceNotificationInterruptArguments *); 300 NV_STATUS (*__kgspStateInitLocked__)(POBJGPU, struct KernelGsp *); 301 NV_STATUS (*__kgspStatePreLoad__)(POBJGPU, struct KernelGsp *, NvU32); 302 NV_STATUS (*__kgspStatePostUnload__)(POBJGPU, struct KernelGsp *, NvU32); 303 void (*__kgspStateDestroy__)(POBJGPU, struct KernelGsp *); 304 NV_STATUS (*__kgspStatePreUnload__)(POBJGPU, struct KernelGsp *, NvU32); 305 NV_STATUS (*__kgspStateInitUnlocked__)(POBJGPU, struct KernelGsp *); 306 void (*__kgspInitMissing__)(POBJGPU, struct KernelGsp *); 307 NV_STATUS (*__kgspStatePreInitLocked__)(POBJGPU, struct KernelGsp *); 308 NV_STATUS (*__kgspStatePreInitUnlocked__)(POBJGPU, struct KernelGsp *); 309 NvBool (*__kgspClearInterrupt__)(struct OBJGPU *, struct KernelGsp *, IntrServiceClearInterruptArguments *); 310 NV_STATUS (*__kgspStatePostLoad__)(POBJGPU, struct KernelGsp *, NvU32); 311 NvBool (*__kgspIsPresent__)(POBJGPU, struct KernelGsp *); 312 struct MESSAGE_QUEUE_COLLECTION *pMQCollection; 313 struct OBJRPC *pRpc; 314 struct OBJRPC *pLocklessRpc; 315 char vbiosVersionStr[16]; 316 KernelGspFlcnUcode *pFwsecUcode; 317 KernelGspFlcnUcode *pScrubberUcode; 318 KernelGspFlcnUcode *pBooterLoadUcode; 319 KernelGspFlcnUcode *pBooterUnloadUcode; 320 MEMORY_DESCRIPTOR *pWprMetaDescriptor; 321 GspFwWprMeta *pWprMeta; 322 NvP64 pWprMetaMappingPriv; 323 MEMORY_DESCRIPTOR *pSRMetaDescriptor; 324 MEMORY_DESCRIPTOR *pSRRadix3Descriptor; 325 MEMORY_DESCRIPTOR *pGspFmcArgumentsDescriptor; 326 GSP_FMC_BOOT_PARAMS *pGspFmcArgumentsCached; 327 NvP64 pGspFmcArgumentsMappingPriv; 328 MEMORY_DESCRIPTOR *pLibosInitArgumentsDescriptor; 329 LibosMemoryRegionInitArgument *pLibosInitArgumentsCached; 330 NvP64 pLibosInitArgumentsMappingPriv; 331 MEMORY_DESCRIPTOR *pGspArgumentsDescriptor; 332 GSP_ARGUMENTS_CACHED *pGspArgumentsCached; 333 NvP64 pGspArgumentsMappingPriv; 334 MEMORY_DESCRIPTOR *pGspRmBootUcodeMemdesc; 335 NvP64 pGspRmBootUcodeMemdescPriv; 336 NvU32 gspRmBootUcodeSize; 337 NvU8 *pGspRmBootUcodeImage; 338 RM_RISCV_UCODE_DESC *pGspRmBootUcodeDesc; 339 MEMORY_DESCRIPTOR *pGspUCodeRadix3Descriptor; 340 MEMORY_DESCRIPTOR *pSignatureMemdesc; 341 LIBOS_LOG_DECODE logDecode; 342 LIBOS_LOG_DECODE logDecodeVgpuPartition[32]; 343 RM_LIBOS_LOG_MEM rmLibosLogMem[3]; 344 RM_LIBOS_LOG_MEM gspPluginInitTaskLogMem[32]; 345 RM_LIBOS_LOG_MEM gspPluginVgpuTaskLogMem[32]; 346 void *pLogElf; 347 NvU64 logElfDataSize; 348 NvBool bLibosLogsPollingEnabled; 349 NvBool bInInit; 350 NvBool bInLockdown; 351 NvBool bPollingForRpcResponse; 352 MEMORY_DESCRIPTOR *pMemDesc_simAccessBuf; 353 SimAccessBuffer *pSimAccessBuf; 354 NvP64 pSimAccessBufPriv; 355 MEMORY_DESCRIPTOR *pProfilerSamplesMD; 356 void *pProfilerSamplesMDPriv; 357 void *pProfilerSamples; 358 GspStaticConfigInfo gspStaticInfo; 359 NvBool bIsTaskIsrQueueRequired; 360 NvBool bPartitionedFmc; 361 NvBool bScrubberUcodeSupported; 362 NvU32 fwHeapParamBaseSize; 363 NvU32 fwHeapParamOsCarveoutSize; 364 }; 365 366 #ifndef __NVOC_CLASS_KernelGsp_TYPEDEF__ 367 #define __NVOC_CLASS_KernelGsp_TYPEDEF__ 368 typedef struct KernelGsp KernelGsp; 369 #endif /* __NVOC_CLASS_KernelGsp_TYPEDEF__ */ 370 371 #ifndef __nvoc_class_id_KernelGsp 372 #define __nvoc_class_id_KernelGsp 0x311d4e 373 #endif /* __nvoc_class_id_KernelGsp */ 374 375 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelGsp; 376 377 #define __staticCast_KernelGsp(pThis) \ 378 ((pThis)->__nvoc_pbase_KernelGsp) 379 380 #ifdef __nvoc_kernel_gsp_h_disabled 381 #define __dynamicCast_KernelGsp(pThis) ((KernelGsp*)NULL) 382 #else //__nvoc_kernel_gsp_h_disabled 383 #define __dynamicCast_KernelGsp(pThis) \ 384 ((KernelGsp*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelGsp))) 385 #endif //__nvoc_kernel_gsp_h_disabled 386 387 #define PDB_PROP_KGSP_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE. 388 #define PDB_PROP_KGSP_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING 389 390 NV_STATUS __nvoc_objCreateDynamic_KernelGsp(KernelGsp**, Dynamic*, NvU32, va_list); 391 392 NV_STATUS __nvoc_objCreate_KernelGsp(KernelGsp**, Dynamic*, NvU32); 393 #define __objCreate_KernelGsp(ppNewObj, pParent, createFlags) \ 394 __nvoc_objCreate_KernelGsp((ppNewObj), staticCast((pParent), Dynamic), (createFlags)) 395 396 #define kgspConstructEngine(pGpu, pKernelGsp, arg0) kgspConstructEngine_DISPATCH(pGpu, pKernelGsp, arg0) 397 #define kgspRegisterIntrService(pGpu, pKernelGsp, pRecords) kgspRegisterIntrService_DISPATCH(pGpu, pKernelGsp, pRecords) 398 #define kgspServiceInterrupt(pGpu, pKernelGsp, pParams) kgspServiceInterrupt_DISPATCH(pGpu, pKernelGsp, pParams) 399 #define kgspConfigureFalcon(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp) 400 #define kgspConfigureFalcon_HAL(pGpu, pKernelGsp) kgspConfigureFalcon_DISPATCH(pGpu, pKernelGsp) 401 #define kgspIsDebugModeEnabled(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp) 402 #define kgspIsDebugModeEnabled_HAL(pGpu, pKernelGsp) kgspIsDebugModeEnabled_DISPATCH(pGpu, pKernelGsp) 403 #define kgspAllocBootArgs(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp) 404 #define kgspAllocBootArgs_HAL(pGpu, pKernelGsp) kgspAllocBootArgs_DISPATCH(pGpu, pKernelGsp) 405 #define kgspFreeBootArgs(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp) 406 #define kgspFreeBootArgs_HAL(pGpu, pKernelGsp) kgspFreeBootArgs_DISPATCH(pGpu, pKernelGsp) 407 #define kgspBootstrapRiscvOSEarly(pGpu, pKernelGsp, pGspFw) kgspBootstrapRiscvOSEarly_DISPATCH(pGpu, pKernelGsp, pGspFw) 408 #define kgspBootstrapRiscvOSEarly_HAL(pGpu, pKernelGsp, pGspFw) kgspBootstrapRiscvOSEarly_DISPATCH(pGpu, pKernelGsp, pGspFw) 409 #define kgspGetGspRmBootUcodeStorage(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) 410 #define kgspGetGspRmBootUcodeStorage_HAL(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) kgspGetGspRmBootUcodeStorage_DISPATCH(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc) 411 #define kgspGetBinArchiveGspRmBoot(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp) 412 #define kgspGetBinArchiveGspRmBoot_HAL(pKernelGsp) kgspGetBinArchiveGspRmBoot_DISPATCH(pKernelGsp) 413 #define kgspGetBinArchiveConcatenatedFMCDesc(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp) 414 #define kgspGetBinArchiveConcatenatedFMCDesc_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(pKernelGsp) 415 #define kgspGetBinArchiveConcatenatedFMC(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp) 416 #define kgspGetBinArchiveConcatenatedFMC_HAL(pKernelGsp) kgspGetBinArchiveConcatenatedFMC_DISPATCH(pKernelGsp) 417 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp) 418 #define kgspGetBinArchiveGspRmFmcGfwDebugSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(pKernelGsp) 419 #define kgspGetBinArchiveGspRmFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp) 420 #define kgspGetBinArchiveGspRmFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(pKernelGsp) 421 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp) 422 #define kgspGetBinArchiveGspRmCcFmcGfwProdSigned_HAL(pKernelGsp) kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(pKernelGsp) 423 #define kgspCalculateFbLayout(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw) 424 #define kgspCalculateFbLayout_HAL(pGpu, pKernelGsp, pGspFw) kgspCalculateFbLayout_DISPATCH(pGpu, pKernelGsp, pGspFw) 425 #define kgspGetNonWprHeapSize(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp) 426 #define kgspGetNonWprHeapSize_HAL(pGpu, pKernelGsp) kgspGetNonWprHeapSize_DISPATCH(pGpu, pKernelGsp) 427 #define kgspExecuteSequencerCommand(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) 428 #define kgspExecuteSequencerCommand_HAL(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) kgspExecuteSequencerCommand_DISPATCH(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize) 429 #define kgspReadUcodeFuseVersion(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId) 430 #define kgspReadUcodeFuseVersion_HAL(pGpu, pKernelGsp, ucodeId) kgspReadUcodeFuseVersion_DISPATCH(pGpu, pKernelGsp, ucodeId) 431 #define kgspResetHw(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp) 432 #define kgspResetHw_HAL(pGpu, pKernelGsp) kgspResetHw_DISPATCH(pGpu, pKernelGsp) 433 #define kgspIsWpr2Up(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp) 434 #define kgspIsWpr2Up_HAL(pGpu, pKernelGsp) kgspIsWpr2Up_DISPATCH(pGpu, pKernelGsp) 435 #define kgspGetFrtsSize(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp) 436 #define kgspGetFrtsSize_HAL(pGpu, pKernelGsp) kgspGetFrtsSize_DISPATCH(pGpu, pKernelGsp) 437 #define kgspGetPrescrubbedTopFbSize(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp) 438 #define kgspGetPrescrubbedTopFbSize_HAL(pGpu, pKernelGsp) kgspGetPrescrubbedTopFbSize_DISPATCH(pGpu, pKernelGsp) 439 #define kgspExtractVbiosFromRom(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg) 440 #define kgspExtractVbiosFromRom_HAL(pGpu, pKernelGsp, ppVbiosImg) kgspExtractVbiosFromRom_DISPATCH(pGpu, pKernelGsp, ppVbiosImg) 441 #define kgspExecuteFwsecFrts(pGpu, pKernelGsp, pFwsecUcode, frtsOffset) kgspExecuteFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset) 442 #define kgspExecuteFwsecFrts_HAL(pGpu, pKernelGsp, pFwsecUcode, frtsOffset) kgspExecuteFwsecFrts_DISPATCH(pGpu, pKernelGsp, pFwsecUcode, frtsOffset) 443 #define kgspExecuteFwsecSb(pGpu, pKernelGsp, pFwsecUcode) kgspExecuteFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode) 444 #define kgspExecuteFwsecSb_HAL(pGpu, pKernelGsp, pFwsecUcode) kgspExecuteFwsecSb_DISPATCH(pGpu, pKernelGsp, pFwsecUcode) 445 #define kgspExecuteScrubberIfNeeded(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp) 446 #define kgspExecuteScrubberIfNeeded_HAL(pGpu, pKernelGsp) kgspExecuteScrubberIfNeeded_DISPATCH(pGpu, pKernelGsp) 447 #define kgspExecuteBooterLoad(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData) 448 #define kgspExecuteBooterLoad_HAL(pGpu, pKernelGsp, sysmemAddrOfData) kgspExecuteBooterLoad_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfData) 449 #define kgspExecuteBooterUnloadIfNeeded(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) 450 #define kgspExecuteBooterUnloadIfNeeded_HAL(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) kgspExecuteBooterUnloadIfNeeded_DISPATCH(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData) 451 #define kgspExecuteHsFalcon(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) 452 #define kgspExecuteHsFalcon_HAL(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) kgspExecuteHsFalcon_DISPATCH(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1) 453 #define kgspWaitForGfwBootOk(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp) 454 #define kgspWaitForGfwBootOk_HAL(pGpu, pKernelGsp) kgspWaitForGfwBootOk_DISPATCH(pGpu, pKernelGsp) 455 #define kgspGetBinArchiveBooterLoadUcode(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp) 456 #define kgspGetBinArchiveBooterLoadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterLoadUcode_DISPATCH(pKernelGsp) 457 #define kgspGetBinArchiveBooterUnloadUcode(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp) 458 #define kgspGetBinArchiveBooterUnloadUcode_HAL(pKernelGsp) kgspGetBinArchiveBooterUnloadUcode_DISPATCH(pKernelGsp) 459 #define kgspGetMinWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp) 460 #define kgspGetMinWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMinWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp) 461 #define kgspGetMaxWprHeapSizeMB(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp) 462 #define kgspGetMaxWprHeapSizeMB_HAL(pGpu, pKernelGsp) kgspGetMaxWprHeapSizeMB_DISPATCH(pGpu, pKernelGsp) 463 #define kgspInitVgpuPartitionLogging(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) 464 #define kgspInitVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) kgspInitVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize) 465 #define kgspFreeVgpuPartitionLogging(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid) 466 #define kgspFreeVgpuPartitionLogging_HAL(pGpu, pKernelGsp, gfid) kgspFreeVgpuPartitionLogging_DISPATCH(pGpu, pKernelGsp, gfid) 467 #define kgspGetSignatureSectionNamePrefix(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp) 468 #define kgspGetSignatureSectionNamePrefix_HAL(pGpu, pKernelGsp) kgspGetSignatureSectionNamePrefix_DISPATCH(pGpu, pKernelGsp) 469 #define kgspSetupGspFmcArgs(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw) 470 #define kgspSetupGspFmcArgs_HAL(pGpu, pKernelGsp, pGspFw) kgspSetupGspFmcArgs_DISPATCH(pGpu, pKernelGsp, pGspFw) 471 #define kgspStateLoad(pGpu, pEngstate, arg0) kgspStateLoad_DISPATCH(pGpu, pEngstate, arg0) 472 #define kgspStateUnload(pGpu, pEngstate, arg0) kgspStateUnload_DISPATCH(pGpu, pEngstate, arg0) 473 #define kgspServiceNotificationInterrupt(pGpu, pIntrService, pParams) kgspServiceNotificationInterrupt_DISPATCH(pGpu, pIntrService, pParams) 474 #define kgspStateInitLocked(pGpu, pEngstate) kgspStateInitLocked_DISPATCH(pGpu, pEngstate) 475 #define kgspStatePreLoad(pGpu, pEngstate, arg0) kgspStatePreLoad_DISPATCH(pGpu, pEngstate, arg0) 476 #define kgspStatePostUnload(pGpu, pEngstate, arg0) kgspStatePostUnload_DISPATCH(pGpu, pEngstate, arg0) 477 #define kgspStateDestroy(pGpu, pEngstate) kgspStateDestroy_DISPATCH(pGpu, pEngstate) 478 #define kgspStatePreUnload(pGpu, pEngstate, arg0) kgspStatePreUnload_DISPATCH(pGpu, pEngstate, arg0) 479 #define kgspStateInitUnlocked(pGpu, pEngstate) kgspStateInitUnlocked_DISPATCH(pGpu, pEngstate) 480 #define kgspInitMissing(pGpu, pEngstate) kgspInitMissing_DISPATCH(pGpu, pEngstate) 481 #define kgspStatePreInitLocked(pGpu, pEngstate) kgspStatePreInitLocked_DISPATCH(pGpu, pEngstate) 482 #define kgspStatePreInitUnlocked(pGpu, pEngstate) kgspStatePreInitUnlocked_DISPATCH(pGpu, pEngstate) 483 #define kgspClearInterrupt(pGpu, pIntrService, pParams) kgspClearInterrupt_DISPATCH(pGpu, pIntrService, pParams) 484 #define kgspStatePostLoad(pGpu, pEngstate, arg0) kgspStatePostLoad_DISPATCH(pGpu, pEngstate, arg0) 485 #define kgspIsPresent(pGpu, pEngstate) kgspIsPresent_DISPATCH(pGpu, pEngstate) 486 void kgspProgramLibosBootArgsAddr_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 487 488 489 #ifdef __nvoc_kernel_gsp_h_disabled 490 static inline void kgspProgramLibosBootArgsAddr(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 491 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 492 } 493 #else //__nvoc_kernel_gsp_h_disabled 494 #define kgspProgramLibosBootArgsAddr(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr_TU102(pGpu, pKernelGsp) 495 #endif //__nvoc_kernel_gsp_h_disabled 496 497 #define kgspProgramLibosBootArgsAddr_HAL(pGpu, pKernelGsp) kgspProgramLibosBootArgsAddr(pGpu, pKernelGsp) 498 499 NV_STATUS kgspSetCmdQueueHead_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value); 500 501 502 #ifdef __nvoc_kernel_gsp_h_disabled 503 static inline NV_STATUS kgspSetCmdQueueHead(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 queueIdx, NvU32 value) { 504 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 505 return NV_ERR_NOT_SUPPORTED; 506 } 507 #else //__nvoc_kernel_gsp_h_disabled 508 #define kgspSetCmdQueueHead(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead_TU102(pGpu, pKernelGsp, queueIdx, value) 509 #endif //__nvoc_kernel_gsp_h_disabled 510 511 #define kgspSetCmdQueueHead_HAL(pGpu, pKernelGsp, queueIdx, value) kgspSetCmdQueueHead(pGpu, pKernelGsp, queueIdx, value) 512 513 void kgspHealthCheck_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 514 515 516 #ifdef __nvoc_kernel_gsp_h_disabled 517 static inline void kgspHealthCheck(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 518 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 519 } 520 #else //__nvoc_kernel_gsp_h_disabled 521 #define kgspHealthCheck(pGpu, pKernelGsp) kgspHealthCheck_TU102(pGpu, pKernelGsp) 522 #endif //__nvoc_kernel_gsp_h_disabled 523 524 #define kgspHealthCheck_HAL(pGpu, pKernelGsp) kgspHealthCheck(pGpu, pKernelGsp) 525 526 NvU32 kgspService_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 527 528 529 #ifdef __nvoc_kernel_gsp_h_disabled 530 static inline NvU32 kgspService(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 531 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 532 return 0; 533 } 534 #else //__nvoc_kernel_gsp_h_disabled 535 #define kgspService(pGpu, pKernelGsp) kgspService_TU102(pGpu, pKernelGsp) 536 #endif //__nvoc_kernel_gsp_h_disabled 537 538 #define kgspService_HAL(pGpu, pKernelGsp) kgspService(pGpu, pKernelGsp) 539 540 NV_STATUS kgspWaitForProcessorSuspend_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 541 542 543 #ifdef __nvoc_kernel_gsp_h_disabled 544 static inline NV_STATUS kgspWaitForProcessorSuspend(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 545 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 546 return NV_ERR_NOT_SUPPORTED; 547 } 548 #else //__nvoc_kernel_gsp_h_disabled 549 #define kgspWaitForProcessorSuspend(pGpu, pKernelGsp) kgspWaitForProcessorSuspend_TU102(pGpu, pKernelGsp) 550 #endif //__nvoc_kernel_gsp_h_disabled 551 552 #define kgspWaitForProcessorSuspend_HAL(pGpu, pKernelGsp) kgspWaitForProcessorSuspend(pGpu, pKernelGsp) 553 554 NV_STATUS kgspSavePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 555 556 557 #ifdef __nvoc_kernel_gsp_h_disabled 558 static inline NV_STATUS kgspSavePowerMgmtState(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 559 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 560 return NV_ERR_NOT_SUPPORTED; 561 } 562 #else //__nvoc_kernel_gsp_h_disabled 563 #define kgspSavePowerMgmtState(pGpu, pKernelGsp) kgspSavePowerMgmtState_TU102(pGpu, pKernelGsp) 564 #endif //__nvoc_kernel_gsp_h_disabled 565 566 #define kgspSavePowerMgmtState_HAL(pGpu, pKernelGsp) kgspSavePowerMgmtState(pGpu, pKernelGsp) 567 568 NV_STATUS kgspRestorePowerMgmtState_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 569 570 571 #ifdef __nvoc_kernel_gsp_h_disabled 572 static inline NV_STATUS kgspRestorePowerMgmtState(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 573 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 574 return NV_ERR_NOT_SUPPORTED; 575 } 576 #else //__nvoc_kernel_gsp_h_disabled 577 #define kgspRestorePowerMgmtState(pGpu, pKernelGsp) kgspRestorePowerMgmtState_TU102(pGpu, pKernelGsp) 578 #endif //__nvoc_kernel_gsp_h_disabled 579 580 #define kgspRestorePowerMgmtState_HAL(pGpu, pKernelGsp) kgspRestorePowerMgmtState(pGpu, pKernelGsp) 581 582 void kgspFreeSuspendResumeData_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 583 584 585 #ifdef __nvoc_kernel_gsp_h_disabled 586 static inline void kgspFreeSuspendResumeData(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 587 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 588 } 589 #else //__nvoc_kernel_gsp_h_disabled 590 #define kgspFreeSuspendResumeData(pGpu, pKernelGsp) kgspFreeSuspendResumeData_TU102(pGpu, pKernelGsp) 591 #endif //__nvoc_kernel_gsp_h_disabled 592 593 #define kgspFreeSuspendResumeData_HAL(pGpu, pKernelGsp) kgspFreeSuspendResumeData(pGpu, pKernelGsp) 594 595 NV_STATUS kgspConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg0); 596 597 static inline NV_STATUS kgspConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, ENGDESCRIPTOR arg0) { 598 return pKernelGsp->__kgspConstructEngine__(pGpu, pKernelGsp, arg0); 599 } 600 601 void kgspRegisterIntrService_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[167]); 602 603 static inline void kgspRegisterIntrService_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceRecord pRecords[167]) { 604 pKernelGsp->__kgspRegisterIntrService__(pGpu, pKernelGsp, pRecords); 605 } 606 607 NvU32 kgspServiceInterrupt_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams); 608 609 static inline NvU32 kgspServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, IntrServiceServiceInterruptArguments *pParams) { 610 return pKernelGsp->__kgspServiceInterrupt__(pGpu, pKernelGsp, pParams); 611 } 612 613 void kgspConfigureFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 614 615 void kgspConfigureFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 616 617 static inline void kgspConfigureFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 618 pKernelGsp->__kgspConfigureFalcon__(pGpu, pKernelGsp); 619 } 620 621 NvBool kgspIsDebugModeEnabled_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 622 623 NvBool kgspIsDebugModeEnabled_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 624 625 static inline NvBool kgspIsDebugModeEnabled_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 626 return pKernelGsp->__kgspIsDebugModeEnabled__(pGpu, pKernelGsp); 627 } 628 629 NV_STATUS kgspAllocBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 630 631 NV_STATUS kgspAllocBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 632 633 static inline NV_STATUS kgspAllocBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 634 return pKernelGsp->__kgspAllocBootArgs__(pGpu, pKernelGsp); 635 } 636 637 void kgspFreeBootArgs_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 638 639 void kgspFreeBootArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 640 641 static inline void kgspFreeBootArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 642 pKernelGsp->__kgspFreeBootArgs__(pGpu, pKernelGsp); 643 } 644 645 NV_STATUS kgspBootstrapRiscvOSEarly_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 646 647 NV_STATUS kgspBootstrapRiscvOSEarly_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 648 649 NV_STATUS kgspBootstrapRiscvOSEarly_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 650 651 static inline NV_STATUS kgspBootstrapRiscvOSEarly_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) { 652 return pKernelGsp->__kgspBootstrapRiscvOSEarly__(pGpu, pKernelGsp, pGspFw); 653 } 654 655 void kgspGetGspRmBootUcodeStorage_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc); 656 657 void kgspGetGspRmBootUcodeStorage_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc); 658 659 void kgspGetGspRmBootUcodeStorage_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc); 660 661 static inline void kgspGetGspRmBootUcodeStorage_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, BINDATA_STORAGE **ppBinStorageImage, BINDATA_STORAGE **ppBinStorageDesc) { 662 pKernelGsp->__kgspGetGspRmBootUcodeStorage__(pGpu, pKernelGsp, ppBinStorageImage, ppBinStorageDesc); 663 } 664 665 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_TU102(struct KernelGsp *pKernelGsp); 666 667 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA100(struct KernelGsp *pKernelGsp); 668 669 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GA102(struct KernelGsp *pKernelGsp); 670 671 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_GH100(struct KernelGsp *pKernelGsp); 672 673 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_AD102(struct KernelGsp *pKernelGsp); 674 675 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmBoot_DISPATCH(struct KernelGsp *pKernelGsp) { 676 return pKernelGsp->__kgspGetBinArchiveGspRmBoot__(pKernelGsp); 677 } 678 679 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_GH100(struct KernelGsp *pKernelGsp); 680 681 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_80f438(struct KernelGsp *pKernelGsp) { 682 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 683 } 684 685 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMCDesc_DISPATCH(struct KernelGsp *pKernelGsp) { 686 return pKernelGsp->__kgspGetBinArchiveConcatenatedFMCDesc__(pKernelGsp); 687 } 688 689 const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_GH100(struct KernelGsp *pKernelGsp); 690 691 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_80f438(struct KernelGsp *pKernelGsp) { 692 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 693 } 694 695 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveConcatenatedFMC_DISPATCH(struct KernelGsp *pKernelGsp) { 696 return pKernelGsp->__kgspGetBinArchiveConcatenatedFMC__(pKernelGsp); 697 } 698 699 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_GH100(struct KernelGsp *pKernelGsp); 700 701 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_80f438(struct KernelGsp *pKernelGsp) { 702 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 703 } 704 705 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwDebugSigned_DISPATCH(struct KernelGsp *pKernelGsp) { 706 return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwDebugSigned__(pKernelGsp); 707 } 708 709 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp); 710 711 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) { 712 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 713 } 714 715 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) { 716 return pKernelGsp->__kgspGetBinArchiveGspRmFmcGfwProdSigned__(pKernelGsp); 717 } 718 719 const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_GH100(struct KernelGsp *pKernelGsp); 720 721 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_80f438(struct KernelGsp *pKernelGsp) { 722 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 723 } 724 725 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveGspRmCcFmcGfwProdSigned_DISPATCH(struct KernelGsp *pKernelGsp) { 726 return pKernelGsp->__kgspGetBinArchiveGspRmCcFmcGfwProdSigned__(pKernelGsp); 727 } 728 729 NV_STATUS kgspCalculateFbLayout_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 730 731 NV_STATUS kgspCalculateFbLayout_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 732 733 static inline NV_STATUS kgspCalculateFbLayout_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) { 734 return pKernelGsp->__kgspCalculateFbLayout__(pGpu, pKernelGsp, pGspFw); 735 } 736 737 static inline NvU32 kgspGetNonWprHeapSize_ed6b8b(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 738 return 1048576; 739 } 740 741 static inline NvU32 kgspGetNonWprHeapSize_d505ea(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 742 return 2097152; 743 } 744 745 static inline NvU32 kgspGetNonWprHeapSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 746 return pKernelGsp->__kgspGetNonWprHeapSize__(pGpu, pKernelGsp); 747 } 748 749 NV_STATUS kgspExecuteSequencerCommand_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize); 750 751 NV_STATUS kgspExecuteSequencerCommand_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize); 752 753 static inline NV_STATUS kgspExecuteSequencerCommand_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 opCode, NvU32 *pPayLoad, NvU32 payloadSize) { 754 return pKernelGsp->__kgspExecuteSequencerCommand__(pGpu, pKernelGsp, opCode, pPayLoad, payloadSize); 755 } 756 757 static inline NvU32 kgspReadUcodeFuseVersion_b2b553(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) { 758 return 0; 759 } 760 761 NvU32 kgspReadUcodeFuseVersion_GA100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId); 762 763 static inline NvU32 kgspReadUcodeFuseVersion_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 ucodeId) { 764 return pKernelGsp->__kgspReadUcodeFuseVersion__(pGpu, pKernelGsp, ucodeId); 765 } 766 767 NV_STATUS kgspResetHw_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 768 769 NV_STATUS kgspResetHw_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 770 771 static inline NV_STATUS kgspResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 772 return pKernelGsp->__kgspResetHw__(pGpu, pKernelGsp); 773 } 774 775 NvBool kgspIsWpr2Up_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 776 777 NvBool kgspIsWpr2Up_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 778 779 static inline NvBool kgspIsWpr2Up_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 780 return pKernelGsp->__kgspIsWpr2Up__(pGpu, pKernelGsp); 781 } 782 783 NvU32 kgspGetFrtsSize_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 784 785 static inline NvU32 kgspGetFrtsSize_4a4dee(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 786 return 0; 787 } 788 789 static inline NvU32 kgspGetFrtsSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 790 return pKernelGsp->__kgspGetFrtsSize__(pGpu, pKernelGsp); 791 } 792 793 static inline NvU64 kgspGetPrescrubbedTopFbSize_e1e623(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 794 return 256 * 1024 * 1024; 795 } 796 797 static inline NvU64 kgspGetPrescrubbedTopFbSize_604eb7(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 798 return (+18446744073709551615ULL); 799 } 800 801 static inline NvU64 kgspGetPrescrubbedTopFbSize_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 802 return pKernelGsp->__kgspGetPrescrubbedTopFbSize__(pGpu, pKernelGsp); 803 } 804 805 NV_STATUS kgspExtractVbiosFromRom_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg); 806 807 static inline NV_STATUS kgspExtractVbiosFromRom_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) { 808 return NV_ERR_NOT_SUPPORTED; 809 } 810 811 static inline NV_STATUS kgspExtractVbiosFromRom_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspVbiosImg **ppVbiosImg) { 812 return pKernelGsp->__kgspExtractVbiosFromRom__(pGpu, pKernelGsp, ppVbiosImg); 813 } 814 815 NV_STATUS kgspExecuteFwsecFrts_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset); 816 817 static inline NV_STATUS kgspExecuteFwsecFrts_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset) { 818 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 819 } 820 821 static inline NV_STATUS kgspExecuteFwsecFrts_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode, const NvU64 frtsOffset) { 822 return pKernelGsp->__kgspExecuteFwsecFrts__(pGpu, pKernelGsp, pFwsecUcode, frtsOffset); 823 } 824 825 NV_STATUS kgspExecuteFwsecSb_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode); 826 827 static inline NV_STATUS kgspExecuteFwsecSb_ac1694(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode) { 828 return NV_OK; 829 } 830 831 static inline NV_STATUS kgspExecuteFwsecSb_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFwsecUcode) { 832 return pKernelGsp->__kgspExecuteFwsecSb__(pGpu, pKernelGsp, pFwsecUcode); 833 } 834 835 NV_STATUS kgspExecuteScrubberIfNeeded_AD102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 836 837 static inline NV_STATUS kgspExecuteScrubberIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 838 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 839 } 840 841 static inline NV_STATUS kgspExecuteScrubberIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 842 return pKernelGsp->__kgspExecuteScrubberIfNeeded__(pGpu, pKernelGsp); 843 } 844 845 NV_STATUS kgspExecuteBooterLoad_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData); 846 847 static inline NV_STATUS kgspExecuteBooterLoad_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) { 848 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 849 } 850 851 static inline NV_STATUS kgspExecuteBooterLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfData) { 852 return pKernelGsp->__kgspExecuteBooterLoad__(pGpu, pKernelGsp, sysmemAddrOfData); 853 } 854 855 NV_STATUS kgspExecuteBooterUnloadIfNeeded_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData); 856 857 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) { 858 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 859 } 860 861 static inline NV_STATUS kgspExecuteBooterUnloadIfNeeded_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const NvU64 sysmemAddrOfSuspendResumeData) { 862 return pKernelGsp->__kgspExecuteBooterUnloadIfNeeded__(pGpu, pKernelGsp, sysmemAddrOfSuspendResumeData); 863 } 864 865 NV_STATUS kgspExecuteHsFalcon_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1); 866 867 NV_STATUS kgspExecuteHsFalcon_GA102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1); 868 869 static inline NV_STATUS kgspExecuteHsFalcon_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) { 870 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 871 } 872 873 static inline NV_STATUS kgspExecuteHsFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode *pFlcnUcode, struct KernelFalcon *pKernelFlcn, NvU32 *pMailbox0, NvU32 *pMailbox1) { 874 return pKernelGsp->__kgspExecuteHsFalcon__(pGpu, pKernelGsp, pFlcnUcode, pKernelFlcn, pMailbox0, pMailbox1); 875 } 876 877 NV_STATUS kgspWaitForGfwBootOk_TU102(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 878 879 NV_STATUS kgspWaitForGfwBootOk_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 880 881 static inline NV_STATUS kgspWaitForGfwBootOk_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 882 return pKernelGsp->__kgspWaitForGfwBootOk__(pGpu, pKernelGsp); 883 } 884 885 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU102(struct KernelGsp *pKernelGsp); 886 887 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_TU116(struct KernelGsp *pKernelGsp); 888 889 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA100(struct KernelGsp *pKernelGsp); 890 891 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_GA102(struct KernelGsp *pKernelGsp); 892 893 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_AD102(struct KernelGsp *pKernelGsp); 894 895 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_80f438(struct KernelGsp *pKernelGsp) { 896 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 897 } 898 899 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterLoadUcode_DISPATCH(struct KernelGsp *pKernelGsp) { 900 return pKernelGsp->__kgspGetBinArchiveBooterLoadUcode__(pKernelGsp); 901 } 902 903 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU102(struct KernelGsp *pKernelGsp); 904 905 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_TU116(struct KernelGsp *pKernelGsp); 906 907 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA100(struct KernelGsp *pKernelGsp); 908 909 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_GA102(struct KernelGsp *pKernelGsp); 910 911 const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_AD102(struct KernelGsp *pKernelGsp); 912 913 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_80f438(struct KernelGsp *pKernelGsp) { 914 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 915 } 916 917 static inline const BINDATA_ARCHIVE *kgspGetBinArchiveBooterUnloadUcode_DISPATCH(struct KernelGsp *pKernelGsp) { 918 return pKernelGsp->__kgspGetBinArchiveBooterUnloadUcode__(pKernelGsp); 919 } 920 921 static inline NvU64 kgspGetMinWprHeapSizeMB_7185bf(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 922 return (64U); 923 } 924 925 static inline NvU64 kgspGetMinWprHeapSizeMB_907c84(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 926 return pGpu->bVgpuGspPluginOffloadEnabled ? (549U) : (84U); 927 } 928 929 static inline NvU64 kgspGetMinWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 930 return pKernelGsp->__kgspGetMinWprHeapSizeMB__(pGpu, pKernelGsp); 931 } 932 933 static inline NvU64 kgspGetMaxWprHeapSizeMB_ad4e6a(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 934 return (256U); 935 } 936 937 static inline NvU64 kgspGetMaxWprHeapSizeMB_5839e2(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 938 return pGpu->bVgpuGspPluginOffloadEnabled ? (1024U) : (276U); 939 } 940 941 static inline NvU64 kgspGetMaxWprHeapSizeMB_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 942 return pKernelGsp->__kgspGetMaxWprHeapSizeMB__(pGpu, pKernelGsp); 943 } 944 945 static inline NV_STATUS kgspInitVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize) { 946 return NV_ERR_NOT_SUPPORTED; 947 } 948 949 NV_STATUS kgspInitVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize); 950 951 static inline NV_STATUS kgspInitVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid, NvU64 initTaskLogBUffOffset, NvU64 initTaskLogBUffSize, NvU64 vgpuTaskLogBUffOffset, NvU64 vgpuTaskLogBuffSize) { 952 return pKernelGsp->__kgspInitVgpuPartitionLogging__(pGpu, pKernelGsp, gfid, initTaskLogBUffOffset, initTaskLogBUffSize, vgpuTaskLogBUffOffset, vgpuTaskLogBuffSize); 953 } 954 955 static inline NV_STATUS kgspFreeVgpuPartitionLogging_395e98(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) { 956 return NV_ERR_NOT_SUPPORTED; 957 } 958 959 NV_STATUS kgspFreeVgpuPartitionLogging_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid); 960 961 static inline NV_STATUS kgspFreeVgpuPartitionLogging_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU32 gfid) { 962 return pKernelGsp->__kgspFreeVgpuPartitionLogging__(pGpu, pKernelGsp, gfid); 963 } 964 965 const char *kgspGetSignatureSectionNamePrefix_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 966 967 static inline const char *kgspGetSignatureSectionNamePrefix_789efb(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 968 return ".fwsignature_"; 969 } 970 971 static inline const char *kgspGetSignatureSectionNamePrefix_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 972 return pKernelGsp->__kgspGetSignatureSectionNamePrefix__(pGpu, pKernelGsp); 973 } 974 975 NV_STATUS kgspSetupGspFmcArgs_GH100(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 976 977 static inline NV_STATUS kgspSetupGspFmcArgs_5baef9(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) { 978 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 979 } 980 981 static inline NV_STATUS kgspSetupGspFmcArgs_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) { 982 return pKernelGsp->__kgspSetupGspFmcArgs__(pGpu, pKernelGsp, pGspFw); 983 } 984 985 static inline NV_STATUS kgspStateLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) { 986 return pEngstate->__kgspStateLoad__(pGpu, pEngstate, arg0); 987 } 988 989 static inline NV_STATUS kgspStateUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) { 990 return pEngstate->__kgspStateUnload__(pGpu, pEngstate, arg0); 991 } 992 993 static inline NV_STATUS kgspServiceNotificationInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceServiceNotificationInterruptArguments *pParams) { 994 return pIntrService->__kgspServiceNotificationInterrupt__(pGpu, pIntrService, pParams); 995 } 996 997 static inline NV_STATUS kgspStateInitLocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 998 return pEngstate->__kgspStateInitLocked__(pGpu, pEngstate); 999 } 1000 1001 static inline NV_STATUS kgspStatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) { 1002 return pEngstate->__kgspStatePreLoad__(pGpu, pEngstate, arg0); 1003 } 1004 1005 static inline NV_STATUS kgspStatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) { 1006 return pEngstate->__kgspStatePostUnload__(pGpu, pEngstate, arg0); 1007 } 1008 1009 static inline void kgspStateDestroy_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 1010 pEngstate->__kgspStateDestroy__(pGpu, pEngstate); 1011 } 1012 1013 static inline NV_STATUS kgspStatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) { 1014 return pEngstate->__kgspStatePreUnload__(pGpu, pEngstate, arg0); 1015 } 1016 1017 static inline NV_STATUS kgspStateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 1018 return pEngstate->__kgspStateInitUnlocked__(pGpu, pEngstate); 1019 } 1020 1021 static inline void kgspInitMissing_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 1022 pEngstate->__kgspInitMissing__(pGpu, pEngstate); 1023 } 1024 1025 static inline NV_STATUS kgspStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 1026 return pEngstate->__kgspStatePreInitLocked__(pGpu, pEngstate); 1027 } 1028 1029 static inline NV_STATUS kgspStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 1030 return pEngstate->__kgspStatePreInitUnlocked__(pGpu, pEngstate); 1031 } 1032 1033 static inline NvBool kgspClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelGsp *pIntrService, IntrServiceClearInterruptArguments *pParams) { 1034 return pIntrService->__kgspClearInterrupt__(pGpu, pIntrService, pParams); 1035 } 1036 1037 static inline NV_STATUS kgspStatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate, NvU32 arg0) { 1038 return pEngstate->__kgspStatePostLoad__(pGpu, pEngstate, arg0); 1039 } 1040 1041 static inline NvBool kgspIsPresent_DISPATCH(POBJGPU pGpu, struct KernelGsp *pEngstate) { 1042 return pEngstate->__kgspIsPresent__(pGpu, pEngstate); 1043 } 1044 1045 void kgspDestruct_IMPL(struct KernelGsp *pKernelGsp); 1046 1047 #define __nvoc_kgspDestruct(pKernelGsp) kgspDestruct_IMPL(pKernelGsp) 1048 void kgspPopulateGspRmInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs); 1049 1050 #ifdef __nvoc_kernel_gsp_h_disabled 1051 static inline void kgspPopulateGspRmInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_SR_INIT_ARGUMENTS *pGspSrInitArgs) { 1052 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1053 } 1054 #else //__nvoc_kernel_gsp_h_disabled 1055 #define kgspPopulateGspRmInitArgs(pGpu, pKernelGsp, pGspSrInitArgs) kgspPopulateGspRmInitArgs_IMPL(pGpu, pKernelGsp, pGspSrInitArgs) 1056 #endif //__nvoc_kernel_gsp_h_disabled 1057 1058 NV_STATUS kgspInitRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw); 1059 1060 #ifdef __nvoc_kernel_gsp_h_disabled 1061 static inline NV_STATUS kgspInitRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, GSP_FIRMWARE *pGspFw) { 1062 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1063 return NV_ERR_NOT_SUPPORTED; 1064 } 1065 #else //__nvoc_kernel_gsp_h_disabled 1066 #define kgspInitRm(pGpu, pKernelGsp, pGspFw) kgspInitRm_IMPL(pGpu, pKernelGsp, pGspFw) 1067 #endif //__nvoc_kernel_gsp_h_disabled 1068 1069 NV_STATUS kgspCreateRadix3_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData); 1070 1071 #ifdef __nvoc_kernel_gsp_h_disabled 1072 static inline NV_STATUS kgspCreateRadix3(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, MEMORY_DESCRIPTOR **ppMemdescRadix3, MEMORY_DESCRIPTOR *pMemdescData, const void *pData, NvU64 sizeOfData) { 1073 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1074 return NV_ERR_NOT_SUPPORTED; 1075 } 1076 #else //__nvoc_kernel_gsp_h_disabled 1077 #define kgspCreateRadix3(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData) kgspCreateRadix3_IMPL(pGpu, pKernelGsp, ppMemdescRadix3, pMemdescData, pData, sizeOfData) 1078 #endif //__nvoc_kernel_gsp_h_disabled 1079 1080 NV_STATUS kgspUnloadRm_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 1081 1082 #ifdef __nvoc_kernel_gsp_h_disabled 1083 static inline NV_STATUS kgspUnloadRm(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 1084 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1085 return NV_ERR_NOT_SUPPORTED; 1086 } 1087 #else //__nvoc_kernel_gsp_h_disabled 1088 #define kgspUnloadRm(pGpu, pKernelGsp) kgspUnloadRm_IMPL(pGpu, pKernelGsp) 1089 #endif //__nvoc_kernel_gsp_h_disabled 1090 1091 NV_STATUS kgspPrepareBootBinaryImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 1092 1093 #ifdef __nvoc_kernel_gsp_h_disabled 1094 static inline NV_STATUS kgspPrepareBootBinaryImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 1095 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1096 return NV_ERR_NOT_SUPPORTED; 1097 } 1098 #else //__nvoc_kernel_gsp_h_disabled 1099 #define kgspPrepareBootBinaryImage(pGpu, pKernelGsp) kgspPrepareBootBinaryImage_IMPL(pGpu, pKernelGsp) 1100 #endif //__nvoc_kernel_gsp_h_disabled 1101 1102 NvU64 kgspGetFwHeapSize_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize); 1103 1104 #ifdef __nvoc_kernel_gsp_h_disabled 1105 static inline NvU64 kgspGetFwHeapSize(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvU64 posteriorFbSize) { 1106 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1107 return 0; 1108 } 1109 #else //__nvoc_kernel_gsp_h_disabled 1110 #define kgspGetFwHeapSize(pGpu, pKernelGsp, posteriorFbSize) kgspGetFwHeapSize_IMPL(pGpu, pKernelGsp, posteriorFbSize) 1111 #endif //__nvoc_kernel_gsp_h_disabled 1112 1113 void kgspSetupLibosInitArgs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 1114 1115 #ifdef __nvoc_kernel_gsp_h_disabled 1116 static inline void kgspSetupLibosInitArgs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 1117 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1118 } 1119 #else //__nvoc_kernel_gsp_h_disabled 1120 #define kgspSetupLibosInitArgs(pGpu, pKernelGsp) kgspSetupLibosInitArgs_IMPL(pGpu, pKernelGsp) 1121 #endif //__nvoc_kernel_gsp_h_disabled 1122 1123 void kgspRpcRecvEvents_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 1124 1125 #ifdef __nvoc_kernel_gsp_h_disabled 1126 static inline void kgspRpcRecvEvents(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 1127 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1128 } 1129 #else //__nvoc_kernel_gsp_h_disabled 1130 #define kgspRpcRecvEvents(pGpu, pKernelGsp) kgspRpcRecvEvents_IMPL(pGpu, pKernelGsp) 1131 #endif //__nvoc_kernel_gsp_h_disabled 1132 1133 NV_STATUS kgspWaitForRmInitDone_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 1134 1135 #ifdef __nvoc_kernel_gsp_h_disabled 1136 static inline NV_STATUS kgspWaitForRmInitDone(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 1137 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1138 return NV_ERR_NOT_SUPPORTED; 1139 } 1140 #else //__nvoc_kernel_gsp_h_disabled 1141 #define kgspWaitForRmInitDone(pGpu, pKernelGsp) kgspWaitForRmInitDone_IMPL(pGpu, pKernelGsp) 1142 #endif //__nvoc_kernel_gsp_h_disabled 1143 1144 NV_STATUS kgspStartLogPolling_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp); 1145 1146 #ifdef __nvoc_kernel_gsp_h_disabled 1147 static inline NV_STATUS kgspStartLogPolling(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp) { 1148 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1149 return NV_ERR_NOT_SUPPORTED; 1150 } 1151 #else //__nvoc_kernel_gsp_h_disabled 1152 #define kgspStartLogPolling(pGpu, pKernelGsp) kgspStartLogPolling_IMPL(pGpu, pKernelGsp) 1153 #endif //__nvoc_kernel_gsp_h_disabled 1154 1155 void kgspDumpGspLogs_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvBool arg0); 1156 1157 #ifdef __nvoc_kernel_gsp_h_disabled 1158 static inline void kgspDumpGspLogs(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, NvBool arg0) { 1159 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1160 } 1161 #else //__nvoc_kernel_gsp_h_disabled 1162 #define kgspDumpGspLogs(pGpu, pKernelGsp, arg0) kgspDumpGspLogs_IMPL(pGpu, pKernelGsp, arg0) 1163 #endif //__nvoc_kernel_gsp_h_disabled 1164 1165 NV_STATUS kgspExecuteSequencerBuffer_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams); 1166 1167 #ifdef __nvoc_kernel_gsp_h_disabled 1168 static inline NV_STATUS kgspExecuteSequencerBuffer(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, void *pRunCpuSeqParams) { 1169 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1170 return NV_ERR_NOT_SUPPORTED; 1171 } 1172 #else //__nvoc_kernel_gsp_h_disabled 1173 #define kgspExecuteSequencerBuffer(pGpu, pKernelGsp, pRunCpuSeqParams) kgspExecuteSequencerBuffer_IMPL(pGpu, pKernelGsp, pRunCpuSeqParams) 1174 #endif //__nvoc_kernel_gsp_h_disabled 1175 1176 NV_STATUS kgspParseFwsecUcodeFromVbiosImg_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined); 1177 1178 #ifdef __nvoc_kernel_gsp_h_disabled 1179 static inline NV_STATUS kgspParseFwsecUcodeFromVbiosImg(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, const KernelGspVbiosImg *const pVbiosImg, KernelGspFlcnUcode **ppFwsecUcode, NvU64 *pVbiosVersionCombined) { 1180 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1181 return NV_ERR_NOT_SUPPORTED; 1182 } 1183 #else //__nvoc_kernel_gsp_h_disabled 1184 #define kgspParseFwsecUcodeFromVbiosImg(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined) kgspParseFwsecUcodeFromVbiosImg_IMPL(pGpu, pKernelGsp, pVbiosImg, ppFwsecUcode, pVbiosVersionCombined) 1185 #endif //__nvoc_kernel_gsp_h_disabled 1186 1187 NV_STATUS kgspAllocateScrubberUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode); 1188 1189 #ifdef __nvoc_kernel_gsp_h_disabled 1190 static inline NV_STATUS kgspAllocateScrubberUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppScrubberUcode) { 1191 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1192 return NV_ERR_NOT_SUPPORTED; 1193 } 1194 #else //__nvoc_kernel_gsp_h_disabled 1195 #define kgspAllocateScrubberUcodeImage(pGpu, pKernelGsp, ppScrubberUcode) kgspAllocateScrubberUcodeImage_IMPL(pGpu, pKernelGsp, ppScrubberUcode) 1196 #endif //__nvoc_kernel_gsp_h_disabled 1197 1198 NV_STATUS kgspAllocateBooterLoadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode); 1199 1200 #ifdef __nvoc_kernel_gsp_h_disabled 1201 static inline NV_STATUS kgspAllocateBooterLoadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterLoadUcode) { 1202 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1203 return NV_ERR_NOT_SUPPORTED; 1204 } 1205 #else //__nvoc_kernel_gsp_h_disabled 1206 #define kgspAllocateBooterLoadUcodeImage(pGpu, pKernelGsp, ppBooterLoadUcode) kgspAllocateBooterLoadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterLoadUcode) 1207 #endif //__nvoc_kernel_gsp_h_disabled 1208 1209 NV_STATUS kgspAllocateBooterUnloadUcodeImage_IMPL(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode); 1210 1211 #ifdef __nvoc_kernel_gsp_h_disabled 1212 static inline NV_STATUS kgspAllocateBooterUnloadUcodeImage(struct OBJGPU *pGpu, struct KernelGsp *pKernelGsp, KernelGspFlcnUcode **ppBooterUnloadUcode) { 1213 NV_ASSERT_FAILED_PRECOMP("KernelGsp was disabled!"); 1214 return NV_ERR_NOT_SUPPORTED; 1215 } 1216 #else //__nvoc_kernel_gsp_h_disabled 1217 #define kgspAllocateBooterUnloadUcodeImage(pGpu, pKernelGsp, ppBooterUnloadUcode) kgspAllocateBooterUnloadUcodeImage_IMPL(pGpu, pKernelGsp, ppBooterUnloadUcode) 1218 #endif //__nvoc_kernel_gsp_h_disabled 1219 1220 #undef PRIVATE_FIELD 1221 1222 1223 NV_STATUS rpcRmApiControl_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject, 1224 NvU32 cmd, void *pParamStructPtr, NvU32 paramsSize); 1225 NV_STATUS rpcRmApiAlloc_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent, 1226 NvHandle hObject, NvU32 hClass, void *pAllocParams, NvU32 allocParamsSize); 1227 NV_STATUS rpcRmApiDupObject_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hParent, NvHandle *phObject, 1228 NvHandle hClientSrc, NvHandle hObjectSrc, NvU32 flags); 1229 NV_STATUS rpcRmApiFree_GSP(RM_API *pRmApi, NvHandle hClient, NvHandle hObject); 1230 1231 /* Free a KernelGspVbiosImg structure */ 1232 void kgspFreeVbiosImg(KernelGspVbiosImg *pVbiosImg); 1233 /* Free a KernelGspFlcnUcode structure */ 1234 void kgspFreeFlcnUcode(KernelGspFlcnUcode *pFlcnUcode); 1235 1236 #endif // KERNEL_GSP_H 1237 1238 #ifdef __cplusplus 1239 } // extern "C" 1240 #endif 1241 #endif // _G_KERNEL_GSP_NVOC_H_ 1242