1 #ifndef _G_KERNEL_SEC2_NVOC_H_ 2 #define _G_KERNEL_SEC2_NVOC_H_ 3 #include "nvoc/runtime.h" 4 5 #ifdef __cplusplus 6 extern "C" { 7 #endif 8 9 /* 10 * SPDX-FileCopyrightText: Copyright (c) 2021-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. 11 * SPDX-License-Identifier: MIT 12 * 13 * Permission is hereby granted, free of charge, to any person obtaining a 14 * copy of this software and associated documentation files (the "Software"), 15 * to deal in the Software without restriction, including without limitation 16 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 17 * and/or sell copies of the Software, and to permit persons to whom the 18 * Software is furnished to do so, subject to the following conditions: 19 * 20 * The above copyright notice and this permission notice shall be included in 21 * all copies or substantial portions of the Software. 22 * 23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 29 * DEALINGS IN THE SOFTWARE. 30 */ 31 32 #include "g_kernel_sec2_nvoc.h" 33 34 #ifndef KERNEL_SEC2_H 35 #define KERNEL_SEC2_H 36 37 #include "core/bin_data.h" 38 #include "core/core.h" 39 #include "gpu/eng_state.h" 40 #include "gpu/falcon/kernel_falcon.h" 41 #include "gpu/gpu.h" 42 43 // forward declaration of RM_FLCN_BL_DESC from rmflcnbl.h 44 struct _def_rm_flcn_bl_desc; 45 typedef struct _def_rm_flcn_bl_desc RM_FLCN_BL_DESC; 46 47 #ifdef NVOC_KERNEL_SEC2_H_PRIVATE_ACCESS_ALLOWED 48 #define PRIVATE_FIELD(x) x 49 #else 50 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x) 51 #endif 52 struct KernelSec2 { 53 const struct NVOC_RTTI *__nvoc_rtti; 54 struct OBJENGSTATE __nvoc_base_OBJENGSTATE; 55 struct IntrService __nvoc_base_IntrService; 56 struct KernelFalcon __nvoc_base_KernelFalcon; 57 struct Object *__nvoc_pbase_Object; 58 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; 59 struct IntrService *__nvoc_pbase_IntrService; 60 struct KernelFalcon *__nvoc_pbase_KernelFalcon; 61 struct KernelSec2 *__nvoc_pbase_KernelSec2; 62 NV_STATUS (*__ksec2ConstructEngine__)(struct OBJGPU *, struct KernelSec2 *, ENGDESCRIPTOR); 63 void (*__ksec2RegisterIntrService__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceRecord *); 64 NV_STATUS (*__ksec2ServiceNotificationInterrupt__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceServiceNotificationInterruptArguments *); 65 void (*__ksec2ConfigureFalcon__)(struct OBJGPU *, struct KernelSec2 *); 66 NV_STATUS (*__ksec2ResetHw__)(struct OBJGPU *, struct KernelSec2 *); 67 NV_STATUS (*__ksec2StateLoad__)(struct OBJGPU *, struct KernelSec2 *, NvU32); 68 NvU32 (*__ksec2ReadUcodeFuseVersion__)(struct OBJGPU *, struct KernelSec2 *, NvU32); 69 const BINDATA_ARCHIVE *(*__ksec2GetBinArchiveBlUcode__)(struct OBJGPU *, struct KernelSec2 *); 70 NV_STATUS (*__ksec2GetGenericBlUcode__)(struct OBJGPU *, struct KernelSec2 *, const RM_FLCN_BL_DESC **, const NvU8 **); 71 const BINDATA_ARCHIVE *(*__ksec2GetBinArchiveSecurescrubUcode__)(struct OBJGPU *, struct KernelSec2 *); 72 NV_STATUS (*__ksec2StateUnload__)(POBJGPU, struct KernelSec2 *, NvU32); 73 NV_STATUS (*__ksec2StateInitLocked__)(POBJGPU, struct KernelSec2 *); 74 NV_STATUS (*__ksec2StatePreLoad__)(POBJGPU, struct KernelSec2 *, NvU32); 75 NV_STATUS (*__ksec2StatePostUnload__)(POBJGPU, struct KernelSec2 *, NvU32); 76 void (*__ksec2StateDestroy__)(POBJGPU, struct KernelSec2 *); 77 NV_STATUS (*__ksec2StatePreUnload__)(POBJGPU, struct KernelSec2 *, NvU32); 78 NV_STATUS (*__ksec2StateInitUnlocked__)(POBJGPU, struct KernelSec2 *); 79 void (*__ksec2InitMissing__)(POBJGPU, struct KernelSec2 *); 80 NV_STATUS (*__ksec2StatePreInitLocked__)(POBJGPU, struct KernelSec2 *); 81 NV_STATUS (*__ksec2StatePreInitUnlocked__)(POBJGPU, struct KernelSec2 *); 82 NvBool (*__ksec2ClearInterrupt__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceClearInterruptArguments *); 83 NV_STATUS (*__ksec2StatePostLoad__)(POBJGPU, struct KernelSec2 *, NvU32); 84 NvBool (*__ksec2IsPresent__)(POBJGPU, struct KernelSec2 *); 85 NvU32 (*__ksec2ServiceInterrupt__)(struct OBJGPU *, struct KernelSec2 *, IntrServiceServiceInterruptArguments *); 86 const RM_FLCN_BL_DESC *pGenericBlUcodeDesc; 87 const NvU8 *pGenericBlUcodeImg; 88 }; 89 90 #ifndef __NVOC_CLASS_KernelSec2_TYPEDEF__ 91 #define __NVOC_CLASS_KernelSec2_TYPEDEF__ 92 typedef struct KernelSec2 KernelSec2; 93 #endif /* __NVOC_CLASS_KernelSec2_TYPEDEF__ */ 94 95 #ifndef __nvoc_class_id_KernelSec2 96 #define __nvoc_class_id_KernelSec2 0x2f36c9 97 #endif /* __nvoc_class_id_KernelSec2 */ 98 99 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelSec2; 100 101 #define __staticCast_KernelSec2(pThis) \ 102 ((pThis)->__nvoc_pbase_KernelSec2) 103 104 #ifdef __nvoc_kernel_sec2_h_disabled 105 #define __dynamicCast_KernelSec2(pThis) ((KernelSec2*)NULL) 106 #else //__nvoc_kernel_sec2_h_disabled 107 #define __dynamicCast_KernelSec2(pThis) \ 108 ((KernelSec2*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelSec2))) 109 #endif //__nvoc_kernel_sec2_h_disabled 110 111 #define PDB_PROP_KSEC2_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE. 112 #define PDB_PROP_KSEC2_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING 113 114 NV_STATUS __nvoc_objCreateDynamic_KernelSec2(KernelSec2**, Dynamic*, NvU32, va_list); 115 116 NV_STATUS __nvoc_objCreate_KernelSec2(KernelSec2**, Dynamic*, NvU32); 117 #define __objCreate_KernelSec2(ppNewObj, pParent, createFlags) \ 118 __nvoc_objCreate_KernelSec2((ppNewObj), staticCast((pParent), Dynamic), (createFlags)) 119 120 #define ksec2ConstructEngine(pGpu, pKernelSec2, arg0) ksec2ConstructEngine_DISPATCH(pGpu, pKernelSec2, arg0) 121 #define ksec2ConstructEngine_HAL(pGpu, pKernelSec2, arg0) ksec2ConstructEngine_DISPATCH(pGpu, pKernelSec2, arg0) 122 #define ksec2RegisterIntrService(pGpu, pKernelSec2, pRecords) ksec2RegisterIntrService_DISPATCH(pGpu, pKernelSec2, pRecords) 123 #define ksec2RegisterIntrService_HAL(pGpu, pKernelSec2, pRecords) ksec2RegisterIntrService_DISPATCH(pGpu, pKernelSec2, pRecords) 124 #define ksec2ServiceNotificationInterrupt(arg0, arg1, arg2) ksec2ServiceNotificationInterrupt_DISPATCH(arg0, arg1, arg2) 125 #define ksec2ServiceNotificationInterrupt_HAL(arg0, arg1, arg2) ksec2ServiceNotificationInterrupt_DISPATCH(arg0, arg1, arg2) 126 #define ksec2ConfigureFalcon(pGpu, pKernelSec2) ksec2ConfigureFalcon_DISPATCH(pGpu, pKernelSec2) 127 #define ksec2ConfigureFalcon_HAL(pGpu, pKernelSec2) ksec2ConfigureFalcon_DISPATCH(pGpu, pKernelSec2) 128 #define ksec2ResetHw(pGpu, pKernelSec2) ksec2ResetHw_DISPATCH(pGpu, pKernelSec2) 129 #define ksec2ResetHw_HAL(pGpu, pKernelSec2) ksec2ResetHw_DISPATCH(pGpu, pKernelSec2) 130 #define ksec2StateLoad(pGpu, pKernelSec2, arg0) ksec2StateLoad_DISPATCH(pGpu, pKernelSec2, arg0) 131 #define ksec2StateLoad_HAL(pGpu, pKernelSec2, arg0) ksec2StateLoad_DISPATCH(pGpu, pKernelSec2, arg0) 132 #define ksec2ReadUcodeFuseVersion(pGpu, pKernelSec2, ucodeId) ksec2ReadUcodeFuseVersion_DISPATCH(pGpu, pKernelSec2, ucodeId) 133 #define ksec2ReadUcodeFuseVersion_HAL(pGpu, pKernelSec2, ucodeId) ksec2ReadUcodeFuseVersion_DISPATCH(pGpu, pKernelSec2, ucodeId) 134 #define ksec2GetBinArchiveBlUcode(pGpu, pKernelSec2) ksec2GetBinArchiveBlUcode_DISPATCH(pGpu, pKernelSec2) 135 #define ksec2GetBinArchiveBlUcode_HAL(pGpu, pKernelSec2) ksec2GetBinArchiveBlUcode_DISPATCH(pGpu, pKernelSec2) 136 #define ksec2GetGenericBlUcode(pGpu, pKernelSec2, ppDesc, ppImg) ksec2GetGenericBlUcode_DISPATCH(pGpu, pKernelSec2, ppDesc, ppImg) 137 #define ksec2GetGenericBlUcode_HAL(pGpu, pKernelSec2, ppDesc, ppImg) ksec2GetGenericBlUcode_DISPATCH(pGpu, pKernelSec2, ppDesc, ppImg) 138 #define ksec2GetBinArchiveSecurescrubUcode(pGpu, pKernelSec2) ksec2GetBinArchiveSecurescrubUcode_DISPATCH(pGpu, pKernelSec2) 139 #define ksec2GetBinArchiveSecurescrubUcode_HAL(pGpu, pKernelSec2) ksec2GetBinArchiveSecurescrubUcode_DISPATCH(pGpu, pKernelSec2) 140 #define ksec2StateUnload(pGpu, pEngstate, arg0) ksec2StateUnload_DISPATCH(pGpu, pEngstate, arg0) 141 #define ksec2StateInitLocked(pGpu, pEngstate) ksec2StateInitLocked_DISPATCH(pGpu, pEngstate) 142 #define ksec2StatePreLoad(pGpu, pEngstate, arg0) ksec2StatePreLoad_DISPATCH(pGpu, pEngstate, arg0) 143 #define ksec2StatePostUnload(pGpu, pEngstate, arg0) ksec2StatePostUnload_DISPATCH(pGpu, pEngstate, arg0) 144 #define ksec2StateDestroy(pGpu, pEngstate) ksec2StateDestroy_DISPATCH(pGpu, pEngstate) 145 #define ksec2StatePreUnload(pGpu, pEngstate, arg0) ksec2StatePreUnload_DISPATCH(pGpu, pEngstate, arg0) 146 #define ksec2StateInitUnlocked(pGpu, pEngstate) ksec2StateInitUnlocked_DISPATCH(pGpu, pEngstate) 147 #define ksec2InitMissing(pGpu, pEngstate) ksec2InitMissing_DISPATCH(pGpu, pEngstate) 148 #define ksec2StatePreInitLocked(pGpu, pEngstate) ksec2StatePreInitLocked_DISPATCH(pGpu, pEngstate) 149 #define ksec2StatePreInitUnlocked(pGpu, pEngstate) ksec2StatePreInitUnlocked_DISPATCH(pGpu, pEngstate) 150 #define ksec2ClearInterrupt(pGpu, pIntrService, pParams) ksec2ClearInterrupt_DISPATCH(pGpu, pIntrService, pParams) 151 #define ksec2StatePostLoad(pGpu, pEngstate, arg0) ksec2StatePostLoad_DISPATCH(pGpu, pEngstate, arg0) 152 #define ksec2IsPresent(pGpu, pEngstate) ksec2IsPresent_DISPATCH(pGpu, pEngstate) 153 #define ksec2ServiceInterrupt(pGpu, pIntrService, pParams) ksec2ServiceInterrupt_DISPATCH(pGpu, pIntrService, pParams) 154 NV_STATUS ksec2ConstructEngine_IMPL(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, ENGDESCRIPTOR arg0); 155 156 static inline NV_STATUS ksec2ConstructEngine_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, ENGDESCRIPTOR arg0) { 157 return pKernelSec2->__ksec2ConstructEngine__(pGpu, pKernelSec2, arg0); 158 } 159 160 void ksec2RegisterIntrService_IMPL(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, IntrServiceRecord pRecords[167]); 161 162 static inline void ksec2RegisterIntrService_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, IntrServiceRecord pRecords[167]) { 163 pKernelSec2->__ksec2RegisterIntrService__(pGpu, pKernelSec2, pRecords); 164 } 165 166 NV_STATUS ksec2ServiceNotificationInterrupt_IMPL(struct OBJGPU *arg0, struct KernelSec2 *arg1, IntrServiceServiceNotificationInterruptArguments *arg2); 167 168 static inline NV_STATUS ksec2ServiceNotificationInterrupt_DISPATCH(struct OBJGPU *arg0, struct KernelSec2 *arg1, IntrServiceServiceNotificationInterruptArguments *arg2) { 169 return arg1->__ksec2ServiceNotificationInterrupt__(arg0, arg1, arg2); 170 } 171 172 void ksec2ConfigureFalcon_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2); 173 174 void ksec2ConfigureFalcon_GA100(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2); 175 176 void ksec2ConfigureFalcon_GA102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2); 177 178 static inline void ksec2ConfigureFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) { 179 pKernelSec2->__ksec2ConfigureFalcon__(pGpu, pKernelSec2); 180 } 181 182 NV_STATUS ksec2ResetHw_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2); 183 184 static inline NV_STATUS ksec2ResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) { 185 return pKernelSec2->__ksec2ResetHw__(pGpu, pKernelSec2); 186 } 187 188 NV_STATUS ksec2StateLoad_GH100(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 arg0); 189 190 static inline NV_STATUS ksec2StateLoad_56cd7a(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 arg0) { 191 return NV_OK; 192 } 193 194 static inline NV_STATUS ksec2StateLoad_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 arg0) { 195 return pKernelSec2->__ksec2StateLoad__(pGpu, pKernelSec2, arg0); 196 } 197 198 static inline NvU32 ksec2ReadUcodeFuseVersion_b2b553(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 ucodeId) { 199 return 0; 200 } 201 202 NvU32 ksec2ReadUcodeFuseVersion_GA100(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 ucodeId); 203 204 static inline NvU32 ksec2ReadUcodeFuseVersion_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, NvU32 ucodeId) { 205 return pKernelSec2->__ksec2ReadUcodeFuseVersion__(pGpu, pKernelSec2, ucodeId); 206 } 207 208 const BINDATA_ARCHIVE *ksec2GetBinArchiveBlUcode_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2); 209 210 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveBlUcode_80f438(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) { 211 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 212 } 213 214 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveBlUcode_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) { 215 return pKernelSec2->__ksec2GetBinArchiveBlUcode__(pGpu, pKernelSec2); 216 } 217 218 NV_STATUS ksec2GetGenericBlUcode_TU102(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, const RM_FLCN_BL_DESC **ppDesc, const NvU8 **ppImg); 219 220 static inline NV_STATUS ksec2GetGenericBlUcode_5baef9(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, const RM_FLCN_BL_DESC **ppDesc, const NvU8 **ppImg) { 221 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED); 222 } 223 224 static inline NV_STATUS ksec2GetGenericBlUcode_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2, const RM_FLCN_BL_DESC **ppDesc, const NvU8 **ppImg) { 225 return pKernelSec2->__ksec2GetGenericBlUcode__(pGpu, pKernelSec2, ppDesc, ppImg); 226 } 227 228 const BINDATA_ARCHIVE *ksec2GetBinArchiveSecurescrubUcode_AD10X(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2); 229 230 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveSecurescrubUcode_80f438(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) { 231 NV_ASSERT_OR_RETURN_PRECOMP(0, ((void *)0)); 232 } 233 234 static inline const BINDATA_ARCHIVE *ksec2GetBinArchiveSecurescrubUcode_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pKernelSec2) { 235 return pKernelSec2->__ksec2GetBinArchiveSecurescrubUcode__(pGpu, pKernelSec2); 236 } 237 238 static inline NV_STATUS ksec2StateUnload_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 239 return pEngstate->__ksec2StateUnload__(pGpu, pEngstate, arg0); 240 } 241 242 static inline NV_STATUS ksec2StateInitLocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 243 return pEngstate->__ksec2StateInitLocked__(pGpu, pEngstate); 244 } 245 246 static inline NV_STATUS ksec2StatePreLoad_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 247 return pEngstate->__ksec2StatePreLoad__(pGpu, pEngstate, arg0); 248 } 249 250 static inline NV_STATUS ksec2StatePostUnload_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 251 return pEngstate->__ksec2StatePostUnload__(pGpu, pEngstate, arg0); 252 } 253 254 static inline void ksec2StateDestroy_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 255 pEngstate->__ksec2StateDestroy__(pGpu, pEngstate); 256 } 257 258 static inline NV_STATUS ksec2StatePreUnload_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 259 return pEngstate->__ksec2StatePreUnload__(pGpu, pEngstate, arg0); 260 } 261 262 static inline NV_STATUS ksec2StateInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 263 return pEngstate->__ksec2StateInitUnlocked__(pGpu, pEngstate); 264 } 265 266 static inline void ksec2InitMissing_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 267 pEngstate->__ksec2InitMissing__(pGpu, pEngstate); 268 } 269 270 static inline NV_STATUS ksec2StatePreInitLocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 271 return pEngstate->__ksec2StatePreInitLocked__(pGpu, pEngstate); 272 } 273 274 static inline NV_STATUS ksec2StatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 275 return pEngstate->__ksec2StatePreInitUnlocked__(pGpu, pEngstate); 276 } 277 278 static inline NvBool ksec2ClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pIntrService, IntrServiceClearInterruptArguments *pParams) { 279 return pIntrService->__ksec2ClearInterrupt__(pGpu, pIntrService, pParams); 280 } 281 282 static inline NV_STATUS ksec2StatePostLoad_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate, NvU32 arg0) { 283 return pEngstate->__ksec2StatePostLoad__(pGpu, pEngstate, arg0); 284 } 285 286 static inline NvBool ksec2IsPresent_DISPATCH(POBJGPU pGpu, struct KernelSec2 *pEngstate) { 287 return pEngstate->__ksec2IsPresent__(pGpu, pEngstate); 288 } 289 290 static inline NvU32 ksec2ServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct KernelSec2 *pIntrService, IntrServiceServiceInterruptArguments *pParams) { 291 return pIntrService->__ksec2ServiceInterrupt__(pGpu, pIntrService, pParams); 292 } 293 294 void ksec2Destruct_IMPL(struct KernelSec2 *pKernelSec2); 295 296 #define __nvoc_ksec2Destruct(pKernelSec2) ksec2Destruct_IMPL(pKernelSec2) 297 #undef PRIVATE_FIELD 298 299 300 #endif // KERNEL_SEC2_H 301 302 #ifdef __cplusplus 303 } // extern "C" 304 #endif 305 #endif // _G_KERNEL_SEC2_NVOC_H_ 306