1 #ifndef _G_KERNEL_SEC2_NVOC_H_
2 #define _G_KERNEL_SEC2_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2021-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kernel_sec2_nvoc.h"
33 
34 #ifndef KERNEL_SEC2_H
35 #define KERNEL_SEC2_H
36 
37 #include "core/bin_data.h"
38 #include "core/core.h"
39 #include "gpu/eng_state.h"
40 #include "gpu/falcon/kernel_falcon.h"
41 #include "gpu/gpu.h"
42 
43 // forward declaration of RM_FLCN_BL_DESC from rmflcnbl.h
44 struct _def_rm_flcn_bl_desc;
45 typedef struct _def_rm_flcn_bl_desc RM_FLCN_BL_DESC;
46 
47 #ifdef NVOC_KERNEL_SEC2_H_PRIVATE_ACCESS_ALLOWED
48 #define PRIVATE_FIELD(x) x
49 #else
50 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
51 #endif
52 struct KernelSec2 {
53     const struct NVOC_RTTI *__nvoc_rtti;
54     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
55     struct IntrService __nvoc_base_IntrService;
56     struct KernelFalcon __nvoc_base_KernelFalcon;
57     struct Object *__nvoc_pbase_Object;
58     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
59     struct IntrService *__nvoc_pbase_IntrService;
60     struct CrashCatEngine *__nvoc_pbase_CrashCatEngine;
61     struct KernelCrashCatEngine *__nvoc_pbase_KernelCrashCatEngine;
62     struct KernelFalcon *__nvoc_pbase_KernelFalcon;
63     struct KernelSec2 *__nvoc_pbase_KernelSec2;
64     NV_STATUS (*__ksec2ConstructEngine__)(struct OBJGPU *, struct KernelSec2 *, ENGDESCRIPTOR);
65     void (*__ksec2RegisterIntrService__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceRecord *);
66     NV_STATUS (*__ksec2ServiceNotificationInterrupt__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceServiceNotificationInterruptArguments *);
67     void (*__ksec2ConfigureFalcon__)(struct OBJGPU *, struct KernelSec2 *);
68     NV_STATUS (*__ksec2ResetHw__)(struct OBJGPU *, struct KernelSec2 *);
69     NV_STATUS (*__ksec2StateLoad__)(struct OBJGPU *, struct KernelSec2 *, NvU32);
70     NvU32 (*__ksec2ReadUcodeFuseVersion__)(struct OBJGPU *, struct KernelSec2 *, NvU32);
71     const BINDATA_ARCHIVE *(*__ksec2GetBinArchiveBlUcode__)(struct OBJGPU *, struct KernelSec2 *);
72     NV_STATUS (*__ksec2GetGenericBlUcode__)(struct OBJGPU *, struct KernelSec2 *, const RM_FLCN_BL_DESC **, const NvU8 **);
73     const BINDATA_ARCHIVE *(*__ksec2GetBinArchiveSecurescrubUcode__)(struct OBJGPU *, struct KernelSec2 *);
74     NvBool (*__ksec2Configured__)(struct KernelSec2 *);
75     NvU32 (*__ksec2PriRead__)(struct KernelSec2 *, NvU32);
76     void (*__ksec2RegWrite__)(struct OBJGPU *, struct KernelSec2 *, NvU32, NvU32);
77     NvU32 (*__ksec2MaskDmemAddr__)(struct OBJGPU *, struct KernelSec2 *, NvU32);
78     void (*__ksec2StateDestroy__)(POBJGPU, struct KernelSec2 *);
79     void (*__ksec2Vprintf__)(struct KernelSec2 *, NvBool, const char *, va_list);
80     NvBool (*__ksec2ClearInterrupt__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceClearInterruptArguments *);
81     void (*__ksec2PriWrite__)(struct KernelSec2 *, NvU32, NvU32);
82     void *(*__ksec2MapBufferDescriptor__)(struct KernelSec2 *, CrashCatBufferDescriptor *);
83     void (*__ksec2SyncBufferDescriptor__)(struct KernelSec2 *, CrashCatBufferDescriptor *, NvU32, NvU32);
84     NvU32 (*__ksec2RegRead__)(struct OBJGPU *, struct KernelSec2 *, NvU32);
85     NvBool (*__ksec2IsPresent__)(POBJGPU, struct KernelSec2 *);
86     NvU32 (*__ksec2ServiceInterrupt__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceServiceInterruptArguments *);
87     void (*__ksec2ReadEmem__)(struct KernelSec2 *, NvU64, NvU64, void *);
88     const NvU32 *(*__ksec2GetScratchOffsets__)(struct KernelSec2 *, NV_CRASHCAT_SCRATCH_GROUP_ID);
89     void (*__ksec2Unload__)(struct KernelSec2 *);
90     NV_STATUS (*__ksec2StateUnload__)(POBJGPU, struct KernelSec2 *, NvU32);
91     NvU32 (*__ksec2GetWFL0Offset__)(struct KernelSec2 *);
92     NV_STATUS (*__ksec2StateInitLocked__)(POBJGPU, struct KernelSec2 *);
93     NV_STATUS (*__ksec2StatePreLoad__)(POBJGPU, struct KernelSec2 *, NvU32);
94     NV_STATUS (*__ksec2StatePostUnload__)(POBJGPU, struct KernelSec2 *, NvU32);
95     NV_STATUS (*__ksec2StatePreUnload__)(POBJGPU, struct KernelSec2 *, NvU32);
96     NV_STATUS (*__ksec2StateInitUnlocked__)(POBJGPU, struct KernelSec2 *);
97     void (*__ksec2InitMissing__)(POBJGPU, struct KernelSec2 *);
98     NV_STATUS (*__ksec2StatePreInitLocked__)(POBJGPU, struct KernelSec2 *);
99     NV_STATUS (*__ksec2StatePreInitUnlocked__)(POBJGPU, struct KernelSec2 *);
100     NV_STATUS (*__ksec2StatePostLoad__)(POBJGPU, struct KernelSec2 *, NvU32);
101     void (*__ksec2UnmapBufferDescriptor__)(struct KernelSec2 *, CrashCatBufferDescriptor *);
102     void (*__ksec2ReadDmem__)(struct KernelSec2 *, NvU32, NvU32, void *);
103     const RM_FLCN_BL_DESC *pGenericBlUcodeDesc;
104     const NvU8 *pGenericBlUcodeImg;
105 };
106 
107 #ifndef __NVOC_CLASS_KernelSec2_TYPEDEF__
108 #define __NVOC_CLASS_KernelSec2_TYPEDEF__
109 typedef struct KernelSec2 KernelSec2;
110 #endif /* __NVOC_CLASS_KernelSec2_TYPEDEF__ */
111 
112 #ifndef __nvoc_class_id_KernelSec2
113 #define __nvoc_class_id_KernelSec2 0x2f36c9
114 #endif /* __nvoc_class_id_KernelSec2 */
115 
116 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelSec2;
117 
118 #define __staticCast_KernelSec2(pThis) \
119     ((pThis)->__nvoc_pbase_KernelSec2)
120 
121 #ifdef __nvoc_kernel_sec2_h_disabled
122 #define __dynamicCast_KernelSec2(pThis) ((KernelSec2*)NULL)
123 #else //__nvoc_kernel_sec2_h_disabled
124 #define __dynamicCast_KernelSec2(pThis) \
125     ((KernelSec2*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelSec2)))
126 #endif //__nvoc_kernel_sec2_h_disabled
127 
128 #define PDB_PROP_KSEC2_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
129 #define PDB_PROP_KSEC2_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
130 
131 NV_STATUS __nvoc_objCreateDynamic_KernelSec2(KernelSec2**, Dynamic*, NvU32, va_list);
132 
133 NV_STATUS __nvoc_objCreate_KernelSec2(KernelSec2**, Dynamic*, NvU32);
134 #define __objCreate_KernelSec2(ppNewObj, pParent, createFlags) \
135     __nvoc_objCreate_KernelSec2((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
136 
137 #define ksec2ConstructEngine(pGpu, pKernelSec2, arg0) ksec2ConstructEngine_DISPATCH(pGpu, pKernelSec2, arg0)
138 #define ksec2ConstructEngine_HAL(pGpu, pKernelSec2, arg0) ksec2ConstructEngine_DISPATCH(pGpu, pKernelSec2, arg0)
139 #define ksec2RegisterIntrService(pGpu, pKernelSec2, pRecords) ksec2RegisterIntrService_DISPATCH(pGpu, pKernelSec2, pRecords)
140 #define ksec2RegisterIntrService_HAL(pGpu, pKernelSec2, pRecords) ksec2RegisterIntrService_DISPATCH(pGpu, pKernelSec2, pRecords)
141 #define ksec2ServiceNotificationInterrupt(arg0, arg1, arg2) ksec2ServiceNotificationInterrupt_DISPATCH(arg0, arg1, arg2)
142 #define ksec2ServiceNotificationInterrupt_HAL(arg0, arg1, arg2) ksec2ServiceNotificationInterrupt_DISPATCH(arg0, arg1, arg2)
143 #define ksec2ConfigureFalcon(pGpu, pKernelSec2) ksec2ConfigureFalcon_DISPATCH(pGpu, pKernelSec2)
144 #define ksec2ConfigureFalcon_HAL(pGpu, pKernelSec2) ksec2ConfigureFalcon_DISPATCH(pGpu, pKernelSec2)
145 #define ksec2ResetHw(pGpu, pKernelSec2) ksec2ResetHw_DISPATCH(pGpu, pKernelSec2)
146 #define ksec2ResetHw_HAL(pGpu, pKernelSec2) ksec2ResetHw_DISPATCH(pGpu, pKernelSec2)
147 #define ksec2StateLoad(pGpu, pKernelSec2, arg0) ksec2StateLoad_DISPATCH(pGpu, pKernelSec2, arg0)
148 #define ksec2StateLoad_HAL(pGpu, pKernelSec2, arg0) ksec2StateLoad_DISPATCH(pGpu, pKernelSec2, arg0)
149 #define ksec2ReadUcodeFuseVersion(pGpu, pKernelSec2, ucodeId) ksec2ReadUcodeFuseVersion_DISPATCH(pGpu, pKernelSec2, ucodeId)
150 #define ksec2ReadUcodeFuseVersion_HAL(pGpu, pKernelSec2, ucodeId) ksec2ReadUcodeFuseVersion_DISPATCH(pGpu, pKernelSec2, ucodeId)
151 #define ksec2GetBinArchiveBlUcode(pGpu, pKernelSec2) ksec2GetBinArchiveBlUcode_DISPATCH(pGpu, pKernelSec2)
152 #define ksec2GetBinArchiveBlUcode_HAL(pGpu, pKernelSec2) ksec2GetBinArchiveBlUcode_DISPATCH(pGpu, pKernelSec2)
153 #define ksec2GetGenericBlUcode(pGpu, pKernelSec2, ppDesc, ppImg) ksec2GetGenericBlUcode_DISPATCH(pGpu, pKernelSec2, ppDesc, ppImg)
154 #define ksec2GetGenericBlUcode_HAL(pGpu, pKernelSec2, ppDesc, ppImg) ksec2GetGenericBlUcode_DISPATCH(pGpu, pKernelSec2, ppDesc, ppImg)
155 #define ksec2GetBinArchiveSecurescrubUcode(pGpu, pKernelSec2) ksec2GetBinArchiveSecurescrubUcode_DISPATCH(pGpu, pKernelSec2)
156 #define ksec2GetBinArchiveSecurescrubUcode_HAL(pGpu, pKernelSec2) ksec2GetBinArchiveSecurescrubUcode_DISPATCH(pGpu, pKernelSec2)
157 #define ksec2Configured(arg0) ksec2Configured_DISPATCH(arg0)
158 #define ksec2PriRead(arg0, offset) ksec2PriRead_DISPATCH(arg0, offset)
159 #define ksec2RegWrite(pGpu, pKernelFlcn, offset, data) ksec2RegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
160 #define ksec2MaskDmemAddr(pGpu, pKernelFlcn, addr) ksec2MaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
161 #define ksec2StateDestroy(pGpu, pEngstate) ksec2StateDestroy_DISPATCH(pGpu, pEngstate)
162 #define ksec2Vprintf(arg0, bReportStart, fmt, args) ksec2Vprintf_DISPATCH(arg0, bReportStart, fmt, args)
163 #define ksec2ClearInterrupt(pGpu, pIntrService, pParams) ksec2ClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
164 #define ksec2PriWrite(arg0, offset, data) ksec2PriWrite_DISPATCH(arg0, offset, data)
165 #define ksec2MapBufferDescriptor(arg0, pBufDesc) ksec2MapBufferDescriptor_DISPATCH(arg0, pBufDesc)
166 #define ksec2SyncBufferDescriptor(arg0, pBufDesc, offset, size) ksec2SyncBufferDescriptor_DISPATCH(arg0, pBufDesc, offset, size)
167 #define ksec2RegRead(pGpu, pKernelFlcn, offset) ksec2RegRead_DISPATCH(pGpu, pKernelFlcn, offset)
168 #define ksec2IsPresent(pGpu, pEngstate) ksec2IsPresent_DISPATCH(pGpu, pEngstate)
169 #define ksec2ServiceInterrupt(pGpu, pIntrService, pParams) ksec2ServiceInterrupt_DISPATCH(pGpu, pIntrService, pParams)
170 #define ksec2ReadEmem(arg0, offset, size, pBuf) ksec2ReadEmem_DISPATCH(arg0, offset, size, pBuf)
171 #define ksec2GetScratchOffsets(arg0, scratchGroupId) ksec2GetScratchOffsets_DISPATCH(arg0, scratchGroupId)
172 #define ksec2Unload(arg0) ksec2Unload_DISPATCH(arg0)
173 #define ksec2StateUnload(pGpu, pEngstate, arg0) ksec2StateUnload_DISPATCH(pGpu, pEngstate, arg0)
174 #define ksec2GetWFL0Offset(arg0) ksec2GetWFL0Offset_DISPATCH(arg0)
175 #define ksec2StateInitLocked(pGpu, pEngstate) ksec2StateInitLocked_DISPATCH(pGpu, pEngstate)
176 #define ksec2StatePreLoad(pGpu, pEngstate, arg0) ksec2StatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
177 #define ksec2StatePostUnload(pGpu, pEngstate, arg0) ksec2StatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
178 #define ksec2StatePreUnload(pGpu, pEngstate, arg0) ksec2StatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
179 #define ksec2StateInitUnlocked(pGpu, pEngstate) ksec2StateInitUnlocked_DISPATCH(pGpu, pEngstate)
180 #define ksec2InitMissing(pGpu, pEngstate) ksec2InitMissing_DISPATCH(pGpu, pEngstate)
181 #define ksec2StatePreInitLocked(pGpu, pEngstate) ksec2StatePreInitLocked_DISPATCH(pGpu, pEngstate)
182 #define ksec2StatePreInitUnlocked(pGpu, pEngstate) ksec2StatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
183 #define ksec2StatePostLoad(pGpu, pEngstate, arg0) ksec2StatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
184 #define ksec2UnmapBufferDescriptor(arg0, pBufDesc) ksec2UnmapBufferDescriptor_DISPATCH(arg0, pBufDesc)
185 #define ksec2ReadDmem(arg0, offset, size, pBuf) ksec2ReadDmem_DISPATCH(arg0, offset, size, pBuf)
186 NV_STATUS ksec2ConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, ENGDESCRIPTOR arg0);
187 
188 static inline NV_STATUS ksec2ConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, ENGDESCRIPTOR arg0) {
189     return pKernelSec2->__ksec2ConstructEngine__(pGpu, pKernelSec2, arg0);
190 }
191 
192 void ksec2RegisterIntrService_IMPL(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, IntrServiceRecord pRecords[168]);
193 
194 static inline void ksec2RegisterIntrService_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, IntrServiceRecord pRecords[168]) {
195     pKernelSec2->__ksec2RegisterIntrService__(pGpu, pKernelSec2, pRecords);
196 }
197 
198 NV_STATUS ksec2ServiceNotificationInterrupt_IMPL(struct OBJGPU *arg0, struct KernelSec2 *arg1, IntrServiceServiceNotificationInterruptArguments *arg2);
199 
200 static inline NV_STATUS ksec2ServiceNotificationInterrupt_DISPATCH(struct OBJGPU *arg0, struct KernelSec2 *arg1, IntrServiceServiceNotificationInterruptArguments *arg2) {
201     return arg1->__ksec2ServiceNotificationInterrupt__(arg0, arg1, arg2);
202 }
203 
204 void ksec2ConfigureFalcon_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2);
205 
206 void ksec2ConfigureFalcon_GA100(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2);
207 
208 void ksec2ConfigureFalcon_GA102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2);
209 
210 static inline void ksec2ConfigureFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) {
211     pKernelSec2->__ksec2ConfigureFalcon__(pGpu, pKernelSec2);
212 }
213 
214 NV_STATUS ksec2ResetHw_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2);
215 
216 static inline NV_STATUS ksec2ResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) {
217     return pKernelSec2->__ksec2ResetHw__(pGpu, pKernelSec2);
218 }
219 
220 NV_STATUS ksec2StateLoad_GH100(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 arg0);
221 
222 static inline NV_STATUS ksec2StateLoad_56cd7a(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 arg0) {
223     return NV_OK;
224 }
225 
226 static inline NV_STATUS ksec2StateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 arg0) {
227     return pKernelSec2->__ksec2StateLoad__(pGpu, pKernelSec2, arg0);
228 }
229 
230 static inline NvU32 ksec2ReadUcodeFuseVersion_b2b553(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 ucodeId) {
231     return 0;
232 }
233 
234 NvU32 ksec2ReadUcodeFuseVersion_GA100(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 ucodeId);
235 
236 static inline NvU32 ksec2ReadUcodeFuseVersion_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 ucodeId) {
237     return pKernelSec2->__ksec2ReadUcodeFuseVersion__(pGpu, pKernelSec2, ucodeId);
238 }
239 
240 const BINDATA_ARCHIVE *ksec2GetBinArchiveBlUcode_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2);
241 
242 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveBlUcode_80f438(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) {
243     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
244 }
245 
246 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveBlUcode_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) {
247     return pKernelSec2->__ksec2GetBinArchiveBlUcode__(pGpu, pKernelSec2);
248 }
249 
250 NV_STATUS ksec2GetGenericBlUcode_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, const RM_FLCN_BL_DESC **ppDesc, const NvU8 **ppImg);
251 
252 static inline NV_STATUS ksec2GetGenericBlUcode_5baef9(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, const RM_FLCN_BL_DESC **ppDesc, const NvU8 **ppImg) {
253     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
254 }
255 
256 static inline NV_STATUS ksec2GetGenericBlUcode_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, const RM_FLCN_BL_DESC **ppDesc, const NvU8 **ppImg) {
257     return pKernelSec2->__ksec2GetGenericBlUcode__(pGpu, pKernelSec2, ppDesc, ppImg);
258 }
259 
260 const BINDATA_ARCHIVE *ksec2GetBinArchiveSecurescrubUcode_AD10X(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2);
261 
262 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveSecurescrubUcode_80f438(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) {
263     NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0));
264 }
265 
266 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveSecurescrubUcode_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) {
267     return pKernelSec2->__ksec2GetBinArchiveSecurescrubUcode__(pGpu, pKernelSec2);
268 }
269 
270 static inline NvBool ksec2Configured_DISPATCH(struct KernelSec2 *arg0) {
271     return arg0->__ksec2Configured__(arg0);
272 }
273 
274 static inline NvU32 ksec2PriRead_DISPATCH(struct KernelSec2 *arg0, NvU32 offset) {
275     return arg0->__ksec2PriRead__(arg0, offset);
276 }
277 
278 static inline void ksec2RegWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelFlcn, NvU32 offset, NvU32 data) {
279     pKernelFlcn->__ksec2RegWrite__(pGpu, pKernelFlcn, offset, data);
280 }
281 
282 static inline NvU32 ksec2MaskDmemAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelFlcn, NvU32 addr) {
283     return pKernelFlcn->__ksec2MaskDmemAddr__(pGpu, pKernelFlcn, addr);
284 }
285 
286 static inline void ksec2StateDestroy_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
287     pEngstate->__ksec2StateDestroy__(pGpu, pEngstate);
288 }
289 
290 static inline void ksec2Vprintf_DISPATCH(struct KernelSec2 *arg0, NvBool bReportStart, const char *fmt, va_list args) {
291     arg0->__ksec2Vprintf__(arg0, bReportStart, fmt, args);
292 }
293 
294 static inline NvBool ksec2ClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pIntrService, IntrServiceClearInterruptArguments *pParams) {
295     return pIntrService->__ksec2ClearInterrupt__(pGpu, pIntrService, pParams);
296 }
297 
298 static inline void ksec2PriWrite_DISPATCH(struct KernelSec2 *arg0, NvU32 offset, NvU32 data) {
299     arg0->__ksec2PriWrite__(arg0, offset, data);
300 }
301 
302 static inline void *ksec2MapBufferDescriptor_DISPATCH(struct KernelSec2 *arg0, CrashCatBufferDescriptor *pBufDesc) {
303     return arg0->__ksec2MapBufferDescriptor__(arg0, pBufDesc);
304 }
305 
306 static inline void ksec2SyncBufferDescriptor_DISPATCH(struct KernelSec2 *arg0, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) {
307     arg0->__ksec2SyncBufferDescriptor__(arg0, pBufDesc, offset, size);
308 }
309 
310 static inline NvU32 ksec2RegRead_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelFlcn, NvU32 offset) {
311     return pKernelFlcn->__ksec2RegRead__(pGpu, pKernelFlcn, offset);
312 }
313 
314 static inline NvBool ksec2IsPresent_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
315     return pEngstate->__ksec2IsPresent__(pGpu, pEngstate);
316 }
317 
318 static inline NvU32 ksec2ServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pIntrService, IntrServiceServiceInterruptArguments *pParams) {
319     return pIntrService->__ksec2ServiceInterrupt__(pGpu, pIntrService, pParams);
320 }
321 
322 static inline void ksec2ReadEmem_DISPATCH(struct KernelSec2 *arg0, NvU64 offset, NvU64 size, void *pBuf) {
323     arg0->__ksec2ReadEmem__(arg0, offset, size, pBuf);
324 }
325 
326 static inline const NvU32 *ksec2GetScratchOffsets_DISPATCH(struct KernelSec2 *arg0, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) {
327     return arg0->__ksec2GetScratchOffsets__(arg0, scratchGroupId);
328 }
329 
330 static inline void ksec2Unload_DISPATCH(struct KernelSec2 *arg0) {
331     arg0->__ksec2Unload__(arg0);
332 }
333 
334 static inline NV_STATUS ksec2StateUnload_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) {
335     return pEngstate->__ksec2StateUnload__(pGpu, pEngstate, arg0);
336 }
337 
338 static inline NvU32 ksec2GetWFL0Offset_DISPATCH(struct KernelSec2 *arg0) {
339     return arg0->__ksec2GetWFL0Offset__(arg0);
340 }
341 
342 static inline NV_STATUS ksec2StateInitLocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
343     return pEngstate->__ksec2StateInitLocked__(pGpu, pEngstate);
344 }
345 
346 static inline NV_STATUS ksec2StatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) {
347     return pEngstate->__ksec2StatePreLoad__(pGpu, pEngstate, arg0);
348 }
349 
350 static inline NV_STATUS ksec2StatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) {
351     return pEngstate->__ksec2StatePostUnload__(pGpu, pEngstate, arg0);
352 }
353 
354 static inline NV_STATUS ksec2StatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) {
355     return pEngstate->__ksec2StatePreUnload__(pGpu, pEngstate, arg0);
356 }
357 
358 static inline NV_STATUS ksec2StateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
359     return pEngstate->__ksec2StateInitUnlocked__(pGpu, pEngstate);
360 }
361 
362 static inline void ksec2InitMissing_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
363     pEngstate->__ksec2InitMissing__(pGpu, pEngstate);
364 }
365 
366 static inline NV_STATUS ksec2StatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
367     return pEngstate->__ksec2StatePreInitLocked__(pGpu, pEngstate);
368 }
369 
370 static inline NV_STATUS ksec2StatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) {
371     return pEngstate->__ksec2StatePreInitUnlocked__(pGpu, pEngstate);
372 }
373 
374 static inline NV_STATUS ksec2StatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) {
375     return pEngstate->__ksec2StatePostLoad__(pGpu, pEngstate, arg0);
376 }
377 
378 static inline void ksec2UnmapBufferDescriptor_DISPATCH(struct KernelSec2 *arg0, CrashCatBufferDescriptor *pBufDesc) {
379     arg0->__ksec2UnmapBufferDescriptor__(arg0, pBufDesc);
380 }
381 
382 static inline void ksec2ReadDmem_DISPATCH(struct KernelSec2 *arg0, NvU32 offset, NvU32 size, void *pBuf) {
383     arg0->__ksec2ReadDmem__(arg0, offset, size, pBuf);
384 }
385 
386 void ksec2Destruct_IMPL(struct KernelSec2 *pKernelSec2);
387 
388 #define __nvoc_ksec2Destruct(pKernelSec2) ksec2Destruct_IMPL(pKernelSec2)
389 #undef PRIVATE_FIELD
390 
391 
392 #endif  // KERNEL_SEC2_H
393 
394 #ifdef __cplusplus
395 } // extern "C"
396 #endif
397 
398 #endif // _G_KERNEL_SEC2_NVOC_H_
399