1 #define NVOC_KERNEL_SEC2_H_PRIVATE_ACCESS_ALLOWED 2 #include "nvoc/runtime.h" 3 #include "nvoc/rtti.h" 4 #include "nvtypes.h" 5 #include "nvport/nvport.h" 6 #include "nvport/inline/util_valist.h" 7 #include "utils/nvassert.h" 8 #include "g_kernel_sec2_nvoc.h" 9 10 #ifdef DEBUG 11 char __nvoc_class_id_uniqueness_check_0x2f36c9 = 1; 12 #endif 13 14 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelSec2; 15 16 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Object; 17 18 extern const struct NVOC_CLASS_DEF __nvoc_class_def_OBJENGSTATE; 19 20 extern const struct NVOC_CLASS_DEF __nvoc_class_def_IntrService; 21 22 extern const struct NVOC_CLASS_DEF __nvoc_class_def_CrashCatEngine; 23 24 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelCrashCatEngine; 25 26 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelFalcon; 27 28 void __nvoc_init_KernelSec2(KernelSec2*, RmHalspecOwner* ); 29 void __nvoc_init_funcTable_KernelSec2(KernelSec2*, RmHalspecOwner* ); 30 NV_STATUS __nvoc_ctor_KernelSec2(KernelSec2*, RmHalspecOwner* ); 31 void __nvoc_init_dataField_KernelSec2(KernelSec2*, RmHalspecOwner* ); 32 void __nvoc_dtor_KernelSec2(KernelSec2*); 33 extern const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelSec2; 34 35 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_KernelSec2 = { 36 /*pClassDef=*/ &__nvoc_class_def_KernelSec2, 37 /*dtor=*/ (NVOC_DYNAMIC_DTOR) &__nvoc_dtor_KernelSec2, 38 /*offset=*/ 0, 39 }; 40 41 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_Object = { 42 /*pClassDef=*/ &__nvoc_class_def_Object, 43 /*dtor=*/ &__nvoc_destructFromBase, 44 /*offset=*/ NV_OFFSETOF(KernelSec2, __nvoc_base_OBJENGSTATE.__nvoc_base_Object), 45 }; 46 47 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_OBJENGSTATE = { 48 /*pClassDef=*/ &__nvoc_class_def_OBJENGSTATE, 49 /*dtor=*/ &__nvoc_destructFromBase, 50 /*offset=*/ NV_OFFSETOF(KernelSec2, __nvoc_base_OBJENGSTATE), 51 }; 52 53 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_IntrService = { 54 /*pClassDef=*/ &__nvoc_class_def_IntrService, 55 /*dtor=*/ &__nvoc_destructFromBase, 56 /*offset=*/ NV_OFFSETOF(KernelSec2, __nvoc_base_IntrService), 57 }; 58 59 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_CrashCatEngine = { 60 /*pClassDef=*/ &__nvoc_class_def_CrashCatEngine, 61 /*dtor=*/ &__nvoc_destructFromBase, 62 /*offset=*/ NV_OFFSETOF(KernelSec2, __nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__nvoc_base_CrashCatEngine), 63 }; 64 65 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_KernelCrashCatEngine = { 66 /*pClassDef=*/ &__nvoc_class_def_KernelCrashCatEngine, 67 /*dtor=*/ &__nvoc_destructFromBase, 68 /*offset=*/ NV_OFFSETOF(KernelSec2, __nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine), 69 }; 70 71 static const struct NVOC_RTTI __nvoc_rtti_KernelSec2_KernelFalcon = { 72 /*pClassDef=*/ &__nvoc_class_def_KernelFalcon, 73 /*dtor=*/ &__nvoc_destructFromBase, 74 /*offset=*/ NV_OFFSETOF(KernelSec2, __nvoc_base_KernelFalcon), 75 }; 76 77 static const struct NVOC_CASTINFO __nvoc_castinfo_KernelSec2 = { 78 /*numRelatives=*/ 7, 79 /*relatives=*/ { 80 &__nvoc_rtti_KernelSec2_KernelSec2, 81 &__nvoc_rtti_KernelSec2_KernelFalcon, 82 &__nvoc_rtti_KernelSec2_KernelCrashCatEngine, 83 &__nvoc_rtti_KernelSec2_CrashCatEngine, 84 &__nvoc_rtti_KernelSec2_IntrService, 85 &__nvoc_rtti_KernelSec2_OBJENGSTATE, 86 &__nvoc_rtti_KernelSec2_Object, 87 }, 88 }; 89 90 const struct NVOC_CLASS_DEF __nvoc_class_def_KernelSec2 = 91 { 92 /*classInfo=*/ { 93 /*size=*/ sizeof(KernelSec2), 94 /*classId=*/ classId(KernelSec2), 95 /*providerId=*/ &__nvoc_rtti_provider, 96 #if NV_PRINTF_STRINGS_ALLOWED 97 /*name=*/ "KernelSec2", 98 #endif 99 }, 100 /*objCreatefn=*/ (NVOC_DYNAMIC_OBJ_CREATE) &__nvoc_objCreateDynamic_KernelSec2, 101 /*pCastInfo=*/ &__nvoc_castinfo_KernelSec2, 102 /*pExportInfo=*/ &__nvoc_export_info_KernelSec2 103 }; 104 105 static NV_STATUS __nvoc_thunk_KernelSec2_engstateConstructEngine(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelSec2, ENGDESCRIPTOR arg0) { 106 return ksec2ConstructEngine(pGpu, (struct KernelSec2 *)(((unsigned char *)pKernelSec2) - __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 107 } 108 109 static void __nvoc_thunk_KernelSec2_intrservRegisterIntrService(struct OBJGPU *pGpu, struct IntrService *pKernelSec2, IntrServiceRecord pRecords[168]) { 110 ksec2RegisterIntrService(pGpu, (struct KernelSec2 *)(((unsigned char *)pKernelSec2) - __nvoc_rtti_KernelSec2_IntrService.offset), pRecords); 111 } 112 113 static NV_STATUS __nvoc_thunk_KernelSec2_intrservServiceNotificationInterrupt(struct OBJGPU *arg0, struct IntrService *arg1, IntrServiceServiceNotificationInterruptArguments *arg2) { 114 return ksec2ServiceNotificationInterrupt(arg0, (struct KernelSec2 *)(((unsigned char *)arg1) - __nvoc_rtti_KernelSec2_IntrService.offset), arg2); 115 } 116 117 static NV_STATUS __nvoc_thunk_KernelSec2_kflcnResetHw(struct OBJGPU *pGpu, struct KernelFalcon *pKernelSec2) { 118 return ksec2ResetHw(pGpu, (struct KernelSec2 *)(((unsigned char *)pKernelSec2) - __nvoc_rtti_KernelSec2_KernelFalcon.offset)); 119 } 120 121 static NV_STATUS __nvoc_thunk_KernelSec2_engstateStateLoad(struct OBJGPU *pGpu, struct OBJENGSTATE *pKernelSec2, NvU32 arg0) { 122 return ksec2StateLoad(pGpu, (struct KernelSec2 *)(((unsigned char *)pKernelSec2) - __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 123 } 124 125 static NvBool __nvoc_thunk_KernelCrashCatEngine_ksec2Configured(struct KernelSec2 *arg0) { 126 return kcrashcatEngineConfigured((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset)); 127 } 128 129 static NvU32 __nvoc_thunk_KernelCrashCatEngine_ksec2PriRead(struct KernelSec2 *arg0, NvU32 offset) { 130 return kcrashcatEnginePriRead((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), offset); 131 } 132 133 static void __nvoc_thunk_KernelFalcon_ksec2RegWrite(struct OBJGPU *pGpu, struct KernelSec2 *pKernelFlcn, NvU32 offset, NvU32 data) { 134 kflcnRegWrite(pGpu, (struct KernelFalcon *)(((unsigned char *)pKernelFlcn) + __nvoc_rtti_KernelSec2_KernelFalcon.offset), offset, data); 135 } 136 137 static NvU32 __nvoc_thunk_KernelFalcon_ksec2MaskDmemAddr(struct OBJGPU *pGpu, struct KernelSec2 *pKernelFlcn, NvU32 addr) { 138 return kflcnMaskDmemAddr(pGpu, (struct KernelFalcon *)(((unsigned char *)pKernelFlcn) + __nvoc_rtti_KernelSec2_KernelFalcon.offset), addr); 139 } 140 141 static void __nvoc_thunk_OBJENGSTATE_ksec2StateDestroy(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 142 engstateStateDestroy(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 143 } 144 145 static void __nvoc_thunk_KernelCrashCatEngine_ksec2Vprintf(struct KernelSec2 *arg0, NvBool bReportStart, const char *fmt, va_list args) { 146 kcrashcatEngineVprintf((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), bReportStart, fmt, args); 147 } 148 149 static NvBool __nvoc_thunk_IntrService_ksec2ClearInterrupt(struct OBJGPU *pGpu, struct KernelSec2 *pIntrService, IntrServiceClearInterruptArguments *pParams) { 150 return intrservClearInterrupt(pGpu, (struct IntrService *)(((unsigned char *)pIntrService) + __nvoc_rtti_KernelSec2_IntrService.offset), pParams); 151 } 152 153 static void __nvoc_thunk_KernelCrashCatEngine_ksec2PriWrite(struct KernelSec2 *arg0, NvU32 offset, NvU32 data) { 154 kcrashcatEnginePriWrite((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), offset, data); 155 } 156 157 static void *__nvoc_thunk_KernelCrashCatEngine_ksec2MapBufferDescriptor(struct KernelSec2 *arg0, CrashCatBufferDescriptor *pBufDesc) { 158 return kcrashcatEngineMapBufferDescriptor((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), pBufDesc); 159 } 160 161 static void __nvoc_thunk_KernelCrashCatEngine_ksec2SyncBufferDescriptor(struct KernelSec2 *arg0, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) { 162 kcrashcatEngineSyncBufferDescriptor((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), pBufDesc, offset, size); 163 } 164 165 static NvU32 __nvoc_thunk_KernelFalcon_ksec2RegRead(struct OBJGPU *pGpu, struct KernelSec2 *pKernelFlcn, NvU32 offset) { 166 return kflcnRegRead(pGpu, (struct KernelFalcon *)(((unsigned char *)pKernelFlcn) + __nvoc_rtti_KernelSec2_KernelFalcon.offset), offset); 167 } 168 169 static NvBool __nvoc_thunk_OBJENGSTATE_ksec2IsPresent(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 170 return engstateIsPresent(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 171 } 172 173 static NvU32 __nvoc_thunk_IntrService_ksec2ServiceInterrupt(struct OBJGPU *pGpu, struct KernelSec2 *pIntrService, IntrServiceServiceInterruptArguments *pParams) { 174 return intrservServiceInterrupt(pGpu, (struct IntrService *)(((unsigned char *)pIntrService) + __nvoc_rtti_KernelSec2_IntrService.offset), pParams); 175 } 176 177 static void __nvoc_thunk_KernelCrashCatEngine_ksec2ReadEmem(struct KernelSec2 *arg0, NvU64 offset, NvU64 size, void *pBuf) { 178 kcrashcatEngineReadEmem((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), offset, size, pBuf); 179 } 180 181 static const NvU32 *__nvoc_thunk_KernelCrashCatEngine_ksec2GetScratchOffsets(struct KernelSec2 *arg0, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) { 182 return kcrashcatEngineGetScratchOffsets((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), scratchGroupId); 183 } 184 185 static void __nvoc_thunk_KernelCrashCatEngine_ksec2Unload(struct KernelSec2 *arg0) { 186 kcrashcatEngineUnload((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset)); 187 } 188 189 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StateUnload(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 190 return engstateStateUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 191 } 192 193 static NvU32 __nvoc_thunk_KernelCrashCatEngine_ksec2GetWFL0Offset(struct KernelSec2 *arg0) { 194 return kcrashcatEngineGetWFL0Offset((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset)); 195 } 196 197 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StateInitLocked(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 198 return engstateStateInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 199 } 200 201 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StatePreLoad(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 202 return engstateStatePreLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 203 } 204 205 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StatePostUnload(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 206 return engstateStatePostUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 207 } 208 209 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StatePreUnload(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 210 return engstateStatePreUnload(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 211 } 212 213 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StateInitUnlocked(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 214 return engstateStateInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 215 } 216 217 static void __nvoc_thunk_OBJENGSTATE_ksec2InitMissing(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 218 engstateInitMissing(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 219 } 220 221 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StatePreInitLocked(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 222 return engstateStatePreInitLocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 223 } 224 225 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StatePreInitUnlocked(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 226 return engstateStatePreInitUnlocked(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset)); 227 } 228 229 static NV_STATUS __nvoc_thunk_OBJENGSTATE_ksec2StatePostLoad(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 230 return engstateStatePostLoad(pGpu, (struct OBJENGSTATE *)(((unsigned char *)pEngstate) + __nvoc_rtti_KernelSec2_OBJENGSTATE.offset), arg0); 231 } 232 233 static void __nvoc_thunk_KernelCrashCatEngine_ksec2UnmapBufferDescriptor(struct KernelSec2 *arg0, CrashCatBufferDescriptor *pBufDesc) { 234 kcrashcatEngineUnmapBufferDescriptor((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), pBufDesc); 235 } 236 237 static void __nvoc_thunk_KernelCrashCatEngine_ksec2ReadDmem(struct KernelSec2 *arg0, NvU32 offset, NvU32 size, void *pBuf) { 238 kcrashcatEngineReadDmem((struct KernelCrashCatEngine *)(((unsigned char *)arg0) + __nvoc_rtti_KernelSec2_KernelCrashCatEngine.offset), offset, size, pBuf); 239 } 240 241 const struct NVOC_EXPORT_INFO __nvoc_export_info_KernelSec2 = 242 { 243 /*numEntries=*/ 0, 244 /*pExportEntries=*/ 0 245 }; 246 247 void __nvoc_dtor_OBJENGSTATE(OBJENGSTATE*); 248 void __nvoc_dtor_IntrService(IntrService*); 249 void __nvoc_dtor_KernelFalcon(KernelFalcon*); 250 void __nvoc_dtor_KernelSec2(KernelSec2 *pThis) { 251 __nvoc_ksec2Destruct(pThis); 252 __nvoc_dtor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE); 253 __nvoc_dtor_IntrService(&pThis->__nvoc_base_IntrService); 254 __nvoc_dtor_KernelFalcon(&pThis->__nvoc_base_KernelFalcon); 255 PORT_UNREFERENCED_VARIABLE(pThis); 256 } 257 258 void __nvoc_init_dataField_KernelSec2(KernelSec2 *pThis, RmHalspecOwner *pRmhalspecowner) { 259 RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal; 260 const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx; 261 ChipHal *chipHal = &pRmhalspecowner->chipHal; 262 const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx; 263 PORT_UNREFERENCED_VARIABLE(pThis); 264 PORT_UNREFERENCED_VARIABLE(pRmhalspecowner); 265 PORT_UNREFERENCED_VARIABLE(rmVariantHal); 266 PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx); 267 PORT_UNREFERENCED_VARIABLE(chipHal); 268 PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx); 269 } 270 271 NV_STATUS __nvoc_ctor_OBJENGSTATE(OBJENGSTATE* ); 272 NV_STATUS __nvoc_ctor_IntrService(IntrService* ); 273 NV_STATUS __nvoc_ctor_KernelFalcon(KernelFalcon* , RmHalspecOwner* ); 274 NV_STATUS __nvoc_ctor_KernelSec2(KernelSec2 *pThis, RmHalspecOwner *pRmhalspecowner) { 275 NV_STATUS status = NV_OK; 276 status = __nvoc_ctor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE); 277 if (status != NV_OK) goto __nvoc_ctor_KernelSec2_fail_OBJENGSTATE; 278 status = __nvoc_ctor_IntrService(&pThis->__nvoc_base_IntrService); 279 if (status != NV_OK) goto __nvoc_ctor_KernelSec2_fail_IntrService; 280 status = __nvoc_ctor_KernelFalcon(&pThis->__nvoc_base_KernelFalcon, pRmhalspecowner); 281 if (status != NV_OK) goto __nvoc_ctor_KernelSec2_fail_KernelFalcon; 282 __nvoc_init_dataField_KernelSec2(pThis, pRmhalspecowner); 283 goto __nvoc_ctor_KernelSec2_exit; // Success 284 285 __nvoc_ctor_KernelSec2_fail_KernelFalcon: 286 __nvoc_dtor_IntrService(&pThis->__nvoc_base_IntrService); 287 __nvoc_ctor_KernelSec2_fail_IntrService: 288 __nvoc_dtor_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE); 289 __nvoc_ctor_KernelSec2_fail_OBJENGSTATE: 290 __nvoc_ctor_KernelSec2_exit: 291 292 return status; 293 } 294 295 static void __nvoc_init_funcTable_KernelSec2_1(KernelSec2 *pThis, RmHalspecOwner *pRmhalspecowner) { 296 RmVariantHal *rmVariantHal = &pRmhalspecowner->rmVariantHal; 297 const unsigned long rmVariantHal_HalVarIdx = (unsigned long)rmVariantHal->__nvoc_HalVarIdx; 298 ChipHal *chipHal = &pRmhalspecowner->chipHal; 299 const unsigned long chipHal_HalVarIdx = (unsigned long)chipHal->__nvoc_HalVarIdx; 300 PORT_UNREFERENCED_VARIABLE(pThis); 301 PORT_UNREFERENCED_VARIABLE(pRmhalspecowner); 302 PORT_UNREFERENCED_VARIABLE(rmVariantHal); 303 PORT_UNREFERENCED_VARIABLE(rmVariantHal_HalVarIdx); 304 PORT_UNREFERENCED_VARIABLE(chipHal); 305 PORT_UNREFERENCED_VARIABLE(chipHal_HalVarIdx); 306 307 // Hal function -- ksec2ConstructEngine 308 pThis->__ksec2ConstructEngine__ = &ksec2ConstructEngine_IMPL; 309 310 // Hal function -- ksec2RegisterIntrService 311 pThis->__ksec2RegisterIntrService__ = &ksec2RegisterIntrService_IMPL; 312 313 // Hal function -- ksec2ServiceNotificationInterrupt 314 pThis->__ksec2ServiceNotificationInterrupt__ = &ksec2ServiceNotificationInterrupt_IMPL; 315 316 // Hal function -- ksec2ConfigureFalcon 317 if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x00000400UL) )) /* ChipHal: GA100 */ 318 { 319 pThis->__ksec2ConfigureFalcon__ = &ksec2ConfigureFalcon_GA100; 320 } 321 else if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x000003e0UL) )) /* ChipHal: TU102 | TU104 | TU106 | TU116 | TU117 */ 322 { 323 pThis->__ksec2ConfigureFalcon__ = &ksec2ConfigureFalcon_TU102; 324 } 325 else 326 { 327 pThis->__ksec2ConfigureFalcon__ = &ksec2ConfigureFalcon_GA102; 328 } 329 330 // Hal function -- ksec2ResetHw 331 pThis->__ksec2ResetHw__ = &ksec2ResetHw_TU102; 332 333 // Hal function -- ksec2StateLoad 334 if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x10000000UL) )) /* ChipHal: GH100 */ 335 { 336 pThis->__ksec2StateLoad__ = &ksec2StateLoad_GH100; 337 } 338 // default 339 else 340 { 341 pThis->__ksec2StateLoad__ = &ksec2StateLoad_56cd7a; 342 } 343 344 // Hal function -- ksec2ReadUcodeFuseVersion 345 if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x000003e0UL) )) /* ChipHal: TU102 | TU104 | TU106 | TU116 | TU117 */ 346 { 347 pThis->__ksec2ReadUcodeFuseVersion__ = &ksec2ReadUcodeFuseVersion_b2b553; 348 } 349 else 350 { 351 pThis->__ksec2ReadUcodeFuseVersion__ = &ksec2ReadUcodeFuseVersion_GA100; 352 } 353 354 // Hal function -- ksec2GetBinArchiveBlUcode 355 if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x11f00000UL) )) /* ChipHal: AD102 | AD103 | AD104 | AD106 | AD107 | GH100 */ 356 { 357 pThis->__ksec2GetBinArchiveBlUcode__ = &ksec2GetBinArchiveBlUcode_80f438; 358 } 359 else 360 { 361 pThis->__ksec2GetBinArchiveBlUcode__ = &ksec2GetBinArchiveBlUcode_TU102; 362 } 363 364 // Hal function -- ksec2GetGenericBlUcode 365 if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x11f00000UL) )) /* ChipHal: AD102 | AD103 | AD104 | AD106 | AD107 | GH100 */ 366 { 367 pThis->__ksec2GetGenericBlUcode__ = &ksec2GetGenericBlUcode_5baef9; 368 } 369 else 370 { 371 pThis->__ksec2GetGenericBlUcode__ = &ksec2GetGenericBlUcode_TU102; 372 } 373 374 // Hal function -- ksec2GetBinArchiveSecurescrubUcode 375 if (( ((chipHal_HalVarIdx >> 5) == 1UL) && ((1UL << (chipHal_HalVarIdx & 0x1f)) & 0x01f00000UL) )) /* ChipHal: AD102 | AD103 | AD104 | AD106 | AD107 */ 376 { 377 pThis->__ksec2GetBinArchiveSecurescrubUcode__ = &ksec2GetBinArchiveSecurescrubUcode_AD10X; 378 } 379 else 380 { 381 pThis->__ksec2GetBinArchiveSecurescrubUcode__ = &ksec2GetBinArchiveSecurescrubUcode_80f438; 382 } 383 384 pThis->__nvoc_base_OBJENGSTATE.__engstateConstructEngine__ = &__nvoc_thunk_KernelSec2_engstateConstructEngine; 385 386 pThis->__nvoc_base_IntrService.__intrservRegisterIntrService__ = &__nvoc_thunk_KernelSec2_intrservRegisterIntrService; 387 388 pThis->__nvoc_base_IntrService.__intrservServiceNotificationInterrupt__ = &__nvoc_thunk_KernelSec2_intrservServiceNotificationInterrupt; 389 390 pThis->__nvoc_base_KernelFalcon.__kflcnResetHw__ = &__nvoc_thunk_KernelSec2_kflcnResetHw; 391 392 pThis->__nvoc_base_OBJENGSTATE.__engstateStateLoad__ = &__nvoc_thunk_KernelSec2_engstateStateLoad; 393 394 pThis->__ksec2Configured__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2Configured; 395 396 pThis->__ksec2PriRead__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2PriRead; 397 398 pThis->__ksec2RegWrite__ = &__nvoc_thunk_KernelFalcon_ksec2RegWrite; 399 400 pThis->__ksec2MaskDmemAddr__ = &__nvoc_thunk_KernelFalcon_ksec2MaskDmemAddr; 401 402 pThis->__ksec2StateDestroy__ = &__nvoc_thunk_OBJENGSTATE_ksec2StateDestroy; 403 404 pThis->__ksec2Vprintf__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2Vprintf; 405 406 pThis->__ksec2ClearInterrupt__ = &__nvoc_thunk_IntrService_ksec2ClearInterrupt; 407 408 pThis->__ksec2PriWrite__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2PriWrite; 409 410 pThis->__ksec2MapBufferDescriptor__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2MapBufferDescriptor; 411 412 pThis->__ksec2SyncBufferDescriptor__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2SyncBufferDescriptor; 413 414 pThis->__ksec2RegRead__ = &__nvoc_thunk_KernelFalcon_ksec2RegRead; 415 416 pThis->__ksec2IsPresent__ = &__nvoc_thunk_OBJENGSTATE_ksec2IsPresent; 417 418 pThis->__ksec2ServiceInterrupt__ = &__nvoc_thunk_IntrService_ksec2ServiceInterrupt; 419 420 pThis->__ksec2ReadEmem__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2ReadEmem; 421 422 pThis->__ksec2GetScratchOffsets__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2GetScratchOffsets; 423 424 pThis->__ksec2Unload__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2Unload; 425 426 pThis->__ksec2StateUnload__ = &__nvoc_thunk_OBJENGSTATE_ksec2StateUnload; 427 428 pThis->__ksec2GetWFL0Offset__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2GetWFL0Offset; 429 430 pThis->__ksec2StateInitLocked__ = &__nvoc_thunk_OBJENGSTATE_ksec2StateInitLocked; 431 432 pThis->__ksec2StatePreLoad__ = &__nvoc_thunk_OBJENGSTATE_ksec2StatePreLoad; 433 434 pThis->__ksec2StatePostUnload__ = &__nvoc_thunk_OBJENGSTATE_ksec2StatePostUnload; 435 436 pThis->__ksec2StatePreUnload__ = &__nvoc_thunk_OBJENGSTATE_ksec2StatePreUnload; 437 438 pThis->__ksec2StateInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_ksec2StateInitUnlocked; 439 440 pThis->__ksec2InitMissing__ = &__nvoc_thunk_OBJENGSTATE_ksec2InitMissing; 441 442 pThis->__ksec2StatePreInitLocked__ = &__nvoc_thunk_OBJENGSTATE_ksec2StatePreInitLocked; 443 444 pThis->__ksec2StatePreInitUnlocked__ = &__nvoc_thunk_OBJENGSTATE_ksec2StatePreInitUnlocked; 445 446 pThis->__ksec2StatePostLoad__ = &__nvoc_thunk_OBJENGSTATE_ksec2StatePostLoad; 447 448 pThis->__ksec2UnmapBufferDescriptor__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2UnmapBufferDescriptor; 449 450 pThis->__ksec2ReadDmem__ = &__nvoc_thunk_KernelCrashCatEngine_ksec2ReadDmem; 451 } 452 453 void __nvoc_init_funcTable_KernelSec2(KernelSec2 *pThis, RmHalspecOwner *pRmhalspecowner) { 454 __nvoc_init_funcTable_KernelSec2_1(pThis, pRmhalspecowner); 455 } 456 457 void __nvoc_init_OBJENGSTATE(OBJENGSTATE*); 458 void __nvoc_init_IntrService(IntrService*); 459 void __nvoc_init_KernelFalcon(KernelFalcon*, RmHalspecOwner* ); 460 void __nvoc_init_KernelSec2(KernelSec2 *pThis, RmHalspecOwner *pRmhalspecowner) { 461 pThis->__nvoc_pbase_KernelSec2 = pThis; 462 pThis->__nvoc_pbase_Object = &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object; 463 pThis->__nvoc_pbase_OBJENGSTATE = &pThis->__nvoc_base_OBJENGSTATE; 464 pThis->__nvoc_pbase_IntrService = &pThis->__nvoc_base_IntrService; 465 pThis->__nvoc_pbase_CrashCatEngine = &pThis->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine.__nvoc_base_CrashCatEngine; 466 pThis->__nvoc_pbase_KernelCrashCatEngine = &pThis->__nvoc_base_KernelFalcon.__nvoc_base_KernelCrashCatEngine; 467 pThis->__nvoc_pbase_KernelFalcon = &pThis->__nvoc_base_KernelFalcon; 468 __nvoc_init_OBJENGSTATE(&pThis->__nvoc_base_OBJENGSTATE); 469 __nvoc_init_IntrService(&pThis->__nvoc_base_IntrService); 470 __nvoc_init_KernelFalcon(&pThis->__nvoc_base_KernelFalcon, pRmhalspecowner); 471 __nvoc_init_funcTable_KernelSec2(pThis, pRmhalspecowner); 472 } 473 474 NV_STATUS __nvoc_objCreate_KernelSec2(KernelSec2 **ppThis, Dynamic *pParent, NvU32 createFlags) { 475 NV_STATUS status; 476 Object *pParentObj; 477 KernelSec2 *pThis; 478 RmHalspecOwner *pRmhalspecowner; 479 480 status = __nvoc_handleObjCreateMemAlloc(createFlags, sizeof(KernelSec2), (void**)&pThis, (void**)ppThis); 481 if (status != NV_OK) 482 return status; 483 484 portMemSet(pThis, 0, sizeof(KernelSec2)); 485 486 __nvoc_initRtti(staticCast(pThis, Dynamic), &__nvoc_class_def_KernelSec2); 487 488 pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object.createFlags = createFlags; 489 490 if (pParent != NULL && !(createFlags & NVOC_OBJ_CREATE_FLAGS_PARENT_HALSPEC_ONLY)) 491 { 492 pParentObj = dynamicCast(pParent, Object); 493 objAddChild(pParentObj, &pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object); 494 } 495 else 496 { 497 pThis->__nvoc_base_OBJENGSTATE.__nvoc_base_Object.pParent = NULL; 498 } 499 500 if ((pRmhalspecowner = dynamicCast(pParent, RmHalspecOwner)) == NULL) 501 pRmhalspecowner = objFindAncestorOfType(RmHalspecOwner, pParent); 502 NV_ASSERT_OR_RETURN(pRmhalspecowner != NULL, NV_ERR_INVALID_ARGUMENT); 503 504 __nvoc_init_KernelSec2(pThis, pRmhalspecowner); 505 status = __nvoc_ctor_KernelSec2(pThis, pRmhalspecowner); 506 if (status != NV_OK) goto __nvoc_objCreate_KernelSec2_cleanup; 507 508 *ppThis = pThis; 509 510 return NV_OK; 511 512 __nvoc_objCreate_KernelSec2_cleanup: 513 // do not call destructors here since the constructor already called them 514 if (createFlags & NVOC_OBJ_CREATE_FLAGS_IN_PLACE_CONSTRUCT) 515 portMemSet(pThis, 0, sizeof(KernelSec2)); 516 else 517 portMemFree(pThis); 518 519 // coverity[leaked_storage:FALSE] 520 return status; 521 } 522 523 NV_STATUS __nvoc_objCreateDynamic_KernelSec2(KernelSec2 **ppThis, Dynamic *pParent, NvU32 createFlags, va_list args) { 524 NV_STATUS status; 525 526 status = __nvoc_objCreate_KernelSec2(ppThis, pParent, createFlags); 527 528 return status; 529 } 530 531