1 #ifndef _G_KERNEL_FALCON_NVOC_H_
2 #define _G_KERNEL_FALCON_NVOC_H_
3 #include "nvoc/runtime.h"
4
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8
9 /*
10 * SPDX-FileCopyrightText: Copyright (c) 1993-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11 * SPDX-License-Identifier: MIT
12 *
13 * Permission is hereby granted, free of charge, to any person obtaining a
14 * copy of this software and associated documentation files (the "Software"),
15 * to deal in the Software without restriction, including without limitation
16 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17 * and/or sell copies of the Software, and to permit persons to whom the
18 * Software is furnished to do so, subject to the following conditions:
19 *
20 * The above copyright notice and this permission notice shall be included in
21 * all copies or substantial portions of the Software.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29 * DEALINGS IN THE SOFTWARE.
30 */
31
32 /*!
33 * Provides definitions for all KernelFalcon data structures and
34 * interfaces.
35 */
36
37 #include "g_kernel_falcon_nvoc.h"
38
39 #ifndef KERNEL_FALCON_H
40 #define KERNEL_FALCON_H
41
42 #include "core/core.h"
43 #include "gpu/falcon/falcon_common.h"
44 #include "gpu/falcon/kernel_crashcat_engine.h"
45 #include "gpu/intr/intr_service.h"
46
47 struct KernelChannel;
48
49 #ifndef __NVOC_CLASS_KernelChannel_TYPEDEF__
50 #define __NVOC_CLASS_KernelChannel_TYPEDEF__
51 typedef struct KernelChannel KernelChannel;
52 #endif /* __NVOC_CLASS_KernelChannel_TYPEDEF__ */
53
54 #ifndef __nvoc_class_id_KernelChannel
55 #define __nvoc_class_id_KernelChannel 0x5d8d70
56 #endif /* __nvoc_class_id_KernelChannel */
57
58
59
60 typedef struct KernelFalconEngineConfig {
61 NvU32 registerBase; // i.e. NV_P{GSP,SEC,NVDEC}
62 NvU32 riscvRegisterBase; // i.e. NV_FALCON2_{GSP,SEC,NVDEC}_BASE
63 NvU32 fbifBase; // i.e. NV_P{GSP,SEC,NVDEC}_FBIF_BASE
64 NvBool bBootFromHs; // whether engine has Boot-from-HS (true for HS-capable engines GA10X+)
65 NvU32 pmcEnableMask; // engine's enable bitmask in PMC (or 0 if engine reset is not in PMC)
66 NvU32 bIsPmcDeviceEngine; // whether engine's enable bit is in NV_PMC_DEVICE_ENABLE (vs. NV_PMC_ENABLE)
67 ENGDESCRIPTOR physEngDesc; // The engine descriptor for the falcon (e.g. ENG_SEC2)
68 NvU32 ctxAttr; // Memory attributes used for context buffers
69 NvU32 ctxBufferSize; // Context buffer size in bytes
70 NvU32 addrSpaceList; // index into ADDRLIST array in mem_desc.h
71
72 KernelCrashCatEngineConfig crashcatEngConfig;
73 } KernelFalconEngineConfig;
74
75 /*!
76 * Base class for booting Falcon cores (including RISC-V)
77 */
78
79 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
80 // the matching C source file, but causes diagnostics to be issued if another
81 // source file references the field.
82 #ifdef NVOC_KERNEL_FALCON_H_PRIVATE_ACCESS_ALLOWED
83 #define PRIVATE_FIELD(x) x
84 #else
85 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
86 #endif
87
88 struct KernelFalcon {
89 const struct NVOC_RTTI *__nvoc_rtti;
90 struct KernelCrashCatEngine __nvoc_base_KernelCrashCatEngine;
91 struct CrashCatEngine *__nvoc_pbase_CrashCatEngine;
92 struct KernelCrashCatEngine *__nvoc_pbase_KernelCrashCatEngine;
93 struct KernelFalcon *__nvoc_pbase_KernelFalcon;
94 NvU32 (*__kflcnRegRead__)(struct OBJGPU *, struct KernelFalcon *, NvU32);
95 void (*__kflcnRegWrite__)(struct OBJGPU *, struct KernelFalcon *, NvU32, NvU32);
96 NvU32 (*__kflcnRiscvRegRead__)(struct OBJGPU *, struct KernelFalcon *, NvU32);
97 void (*__kflcnRiscvRegWrite__)(struct OBJGPU *, struct KernelFalcon *, NvU32, NvU32);
98 NvBool (*__kflcnIsRiscvCpuEnabled__)(struct OBJGPU *, struct KernelFalcon *);
99 NvBool (*__kflcnIsRiscvActive__)(struct OBJGPU *, struct KernelFalcon *);
100 void (*__kflcnRiscvProgramBcr__)(struct OBJGPU *, struct KernelFalcon *, NvBool);
101 void (*__kflcnSwitchToFalcon__)(struct OBJGPU *, struct KernelFalcon *);
102 NV_STATUS (*__kflcnResetHw__)(struct OBJGPU *, struct KernelFalcon *);
103 void (*__kflcnReset__)(struct OBJGPU *, struct KernelFalcon *);
104 void (*__kflcnResetIntoRiscv__)(struct OBJGPU *, struct KernelFalcon *);
105 void (*__kflcnStartCpu__)(struct OBJGPU *, struct KernelFalcon *);
106 void (*__kflcnDisableCtxReq__)(struct OBJGPU *, struct KernelFalcon *);
107 NV_STATUS (*__kflcnPreResetWait__)(struct OBJGPU *, struct KernelFalcon *);
108 NV_STATUS (*__kflcnWaitForResetToFinish__)(struct OBJGPU *, struct KernelFalcon *);
109 NV_STATUS (*__kflcnWaitForHalt__)(struct OBJGPU *, struct KernelFalcon *, NvU32, NvU32);
110 NvU32 (*__kflcnReadIntrStatus__)(struct OBJGPU *, struct KernelFalcon *);
111 void (*__kflcnIntrRetrigger__)(struct OBJGPU *, struct KernelFalcon *);
112 NvU32 (*__kflcnMaskImemAddr__)(struct OBJGPU *, struct KernelFalcon *, NvU32);
113 NvU32 (*__kflcnMaskDmemAddr__)(struct OBJGPU *, struct KernelFalcon *, NvU32);
114 void (*__kflcnReadEmem__)(struct KernelFalcon *, NvU64, NvU64, void *);
115 NvU32 (*__kflcnGetWFL0Offset__)(struct KernelFalcon *);
116 const NvU32 *(*__kflcnGetScratchOffsets__)(struct KernelFalcon *, NV_CRASHCAT_SCRATCH_GROUP_ID);
117 void (*__kflcnUnload__)(struct KernelFalcon *);
118 NvBool (*__kflcnConfigured__)(struct KernelFalcon *);
119 NvU32 (*__kflcnPriRead__)(struct KernelFalcon *, NvU32);
120 void (*__kflcnVprintf__)(struct KernelFalcon *, NvBool, const char *, va_list);
121 void (*__kflcnPriWrite__)(struct KernelFalcon *, NvU32, NvU32);
122 void (*__kflcnSyncBufferDescriptor__)(struct KernelFalcon *, CrashCatBufferDescriptor *, NvU32, NvU32);
123 void *(*__kflcnMapBufferDescriptor__)(struct KernelFalcon *, CrashCatBufferDescriptor *);
124 void (*__kflcnUnmapBufferDescriptor__)(struct KernelFalcon *, CrashCatBufferDescriptor *);
125 void (*__kflcnReadDmem__)(struct KernelFalcon *, NvU32, NvU32, void *);
126 NvU32 registerBase;
127 NvU32 riscvRegisterBase;
128 NvU32 fbifBase;
129 NvBool bBootFromHs;
130 NvU32 pmcEnableMask;
131 NvU32 bIsPmcDeviceEngine;
132 ENGDESCRIPTOR physEngDesc;
133 NvU32 ctxAttr;
134 NvU32 ctxBufferSize;
135 NvU32 addrSpaceList;
136 };
137
138 #ifndef __NVOC_CLASS_KernelFalcon_TYPEDEF__
139 #define __NVOC_CLASS_KernelFalcon_TYPEDEF__
140 typedef struct KernelFalcon KernelFalcon;
141 #endif /* __NVOC_CLASS_KernelFalcon_TYPEDEF__ */
142
143 #ifndef __nvoc_class_id_KernelFalcon
144 #define __nvoc_class_id_KernelFalcon 0xb6b1af
145 #endif /* __nvoc_class_id_KernelFalcon */
146
147 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelFalcon;
148
149 #define __staticCast_KernelFalcon(pThis) \
150 ((pThis)->__nvoc_pbase_KernelFalcon)
151
152 #ifdef __nvoc_kernel_falcon_h_disabled
153 #define __dynamicCast_KernelFalcon(pThis) ((KernelFalcon*)NULL)
154 #else //__nvoc_kernel_falcon_h_disabled
155 #define __dynamicCast_KernelFalcon(pThis) \
156 ((KernelFalcon*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelFalcon)))
157 #endif //__nvoc_kernel_falcon_h_disabled
158
159
160 NV_STATUS __nvoc_objCreateDynamic_KernelFalcon(KernelFalcon**, Dynamic*, NvU32, va_list);
161
162 NV_STATUS __nvoc_objCreate_KernelFalcon(KernelFalcon**, Dynamic*, NvU32);
163 #define __objCreate_KernelFalcon(ppNewObj, pParent, createFlags) \
164 __nvoc_objCreate_KernelFalcon((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
165
166 #define kflcnRegRead(pGpu, pKernelFlcn, offset) kflcnRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
167 #define kflcnRegRead_HAL(pGpu, pKernelFlcn, offset) kflcnRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
168 #define kflcnRegWrite(pGpu, pKernelFlcn, offset, data) kflcnRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
169 #define kflcnRegWrite_HAL(pGpu, pKernelFlcn, offset, data) kflcnRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
170 #define kflcnRiscvRegRead(pGpu, pKernelFlcn, offset) kflcnRiscvRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
171 #define kflcnRiscvRegRead_HAL(pGpu, pKernelFlcn, offset) kflcnRiscvRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
172 #define kflcnRiscvRegWrite(pGpu, pKernelFlcn, offset, data) kflcnRiscvRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
173 #define kflcnRiscvRegWrite_HAL(pGpu, pKernelFlcn, offset, data) kflcnRiscvRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
174 #define kflcnIsRiscvCpuEnabled(pGpu, pKernelFlcn) kflcnIsRiscvCpuEnabled_DISPATCH(pGpu, pKernelFlcn)
175 #define kflcnIsRiscvCpuEnabled_HAL(pGpu, pKernelFlcn) kflcnIsRiscvCpuEnabled_DISPATCH(pGpu, pKernelFlcn)
176 #define kflcnIsRiscvActive(pGpu, pKernelFlcn) kflcnIsRiscvActive_DISPATCH(pGpu, pKernelFlcn)
177 #define kflcnIsRiscvActive_HAL(pGpu, pKernelFlcn) kflcnIsRiscvActive_DISPATCH(pGpu, pKernelFlcn)
178 #define kflcnRiscvProgramBcr(pGpu, pKernelFlcn, bBRFetch) kflcnRiscvProgramBcr_DISPATCH(pGpu, pKernelFlcn, bBRFetch)
179 #define kflcnRiscvProgramBcr_HAL(pGpu, pKernelFlcn, bBRFetch) kflcnRiscvProgramBcr_DISPATCH(pGpu, pKernelFlcn, bBRFetch)
180 #define kflcnSwitchToFalcon(pGpu, pKernelFlcn) kflcnSwitchToFalcon_DISPATCH(pGpu, pKernelFlcn)
181 #define kflcnSwitchToFalcon_HAL(pGpu, pKernelFlcn) kflcnSwitchToFalcon_DISPATCH(pGpu, pKernelFlcn)
182 #define kflcnResetHw(pGpu, pKernelFlcn) kflcnResetHw_DISPATCH(pGpu, pKernelFlcn)
183 #define kflcnReset(pGpu, pKernelFlcn) kflcnReset_DISPATCH(pGpu, pKernelFlcn)
184 #define kflcnReset_HAL(pGpu, pKernelFlcn) kflcnReset_DISPATCH(pGpu, pKernelFlcn)
185 #define kflcnResetIntoRiscv(pGpu, pKernelFlcn) kflcnResetIntoRiscv_DISPATCH(pGpu, pKernelFlcn)
186 #define kflcnResetIntoRiscv_HAL(pGpu, pKernelFlcn) kflcnResetIntoRiscv_DISPATCH(pGpu, pKernelFlcn)
187 #define kflcnStartCpu(pGpu, pKernelFlcn) kflcnStartCpu_DISPATCH(pGpu, pKernelFlcn)
188 #define kflcnStartCpu_HAL(pGpu, pKernelFlcn) kflcnStartCpu_DISPATCH(pGpu, pKernelFlcn)
189 #define kflcnDisableCtxReq(pGpu, pKernelFlcn) kflcnDisableCtxReq_DISPATCH(pGpu, pKernelFlcn)
190 #define kflcnDisableCtxReq_HAL(pGpu, pKernelFlcn) kflcnDisableCtxReq_DISPATCH(pGpu, pKernelFlcn)
191 #define kflcnPreResetWait(pGpu, pKernelFlcn) kflcnPreResetWait_DISPATCH(pGpu, pKernelFlcn)
192 #define kflcnPreResetWait_HAL(pGpu, pKernelFlcn) kflcnPreResetWait_DISPATCH(pGpu, pKernelFlcn)
193 #define kflcnWaitForResetToFinish(pGpu, pKernelFlcn) kflcnWaitForResetToFinish_DISPATCH(pGpu, pKernelFlcn)
194 #define kflcnWaitForResetToFinish_HAL(pGpu, pKernelFlcn) kflcnWaitForResetToFinish_DISPATCH(pGpu, pKernelFlcn)
195 #define kflcnWaitForHalt(pGpu, pKernelFlcn, timeoutUs, flags) kflcnWaitForHalt_DISPATCH(pGpu, pKernelFlcn, timeoutUs, flags)
196 #define kflcnWaitForHalt_HAL(pGpu, pKernelFlcn, timeoutUs, flags) kflcnWaitForHalt_DISPATCH(pGpu, pKernelFlcn, timeoutUs, flags)
197 #define kflcnReadIntrStatus(pGpu, pKerneFlcn) kflcnReadIntrStatus_DISPATCH(pGpu, pKerneFlcn)
198 #define kflcnReadIntrStatus_HAL(pGpu, pKerneFlcn) kflcnReadIntrStatus_DISPATCH(pGpu, pKerneFlcn)
199 #define kflcnIntrRetrigger(pGpu, pKernelFlcn) kflcnIntrRetrigger_DISPATCH(pGpu, pKernelFlcn)
200 #define kflcnIntrRetrigger_HAL(pGpu, pKernelFlcn) kflcnIntrRetrigger_DISPATCH(pGpu, pKernelFlcn)
201 #define kflcnMaskImemAddr(pGpu, pKernelFlcn, addr) kflcnMaskImemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
202 #define kflcnMaskImemAddr_HAL(pGpu, pKernelFlcn, addr) kflcnMaskImemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
203 #define kflcnMaskDmemAddr(pGpu, pKernelFlcn, addr) kflcnMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
204 #define kflcnMaskDmemAddr_HAL(pGpu, pKernelFlcn, addr) kflcnMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
205 #define kflcnReadEmem(arg0, offset, size, pBuf) kflcnReadEmem_DISPATCH(arg0, offset, size, pBuf)
206 #define kflcnGetWFL0Offset(arg0) kflcnGetWFL0Offset_DISPATCH(arg0)
207 #define kflcnGetScratchOffsets(arg0, scratchGroupId) kflcnGetScratchOffsets_DISPATCH(arg0, scratchGroupId)
208 #define kflcnUnload(arg0) kflcnUnload_DISPATCH(arg0)
209 #define kflcnConfigured(arg0) kflcnConfigured_DISPATCH(arg0)
210 #define kflcnPriRead(arg0, offset) kflcnPriRead_DISPATCH(arg0, offset)
211 #define kflcnVprintf(arg0, bReportStart, fmt, args) kflcnVprintf_DISPATCH(arg0, bReportStart, fmt, args)
212 #define kflcnPriWrite(arg0, offset, data) kflcnPriWrite_DISPATCH(arg0, offset, data)
213 #define kflcnSyncBufferDescriptor(arg0, pBufDesc, offset, size) kflcnSyncBufferDescriptor_DISPATCH(arg0, pBufDesc, offset, size)
214 #define kflcnMapBufferDescriptor(arg0, pBufDesc) kflcnMapBufferDescriptor_DISPATCH(arg0, pBufDesc)
215 #define kflcnUnmapBufferDescriptor(arg0, pBufDesc) kflcnUnmapBufferDescriptor_DISPATCH(arg0, pBufDesc)
216 #define kflcnReadDmem(arg0, offset, size, pBuf) kflcnReadDmem_DISPATCH(arg0, offset, size, pBuf)
217 NvU32 kflcnRegRead_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset);
218
kflcnRegRead_474d46(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset)219 static inline NvU32 kflcnRegRead_474d46(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset) {
220 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
221 }
222
kflcnRegRead_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset)223 static inline NvU32 kflcnRegRead_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset) {
224 return pKernelFlcn->__kflcnRegRead__(pGpu, pKernelFlcn, offset);
225 }
226
227 void kflcnRegWrite_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data);
228
kflcnRegWrite_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset,NvU32 data)229 static inline void kflcnRegWrite_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data) {
230 NV_ASSERT_PRECOMP(0);
231 }
232
kflcnRegWrite_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset,NvU32 data)233 static inline void kflcnRegWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data) {
234 pKernelFlcn->__kflcnRegWrite__(pGpu, pKernelFlcn, offset, data);
235 }
236
237 NvU32 kflcnRiscvRegRead_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset);
238
kflcnRiscvRegRead_474d46(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset)239 static inline NvU32 kflcnRiscvRegRead_474d46(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset) {
240 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
241 }
242
kflcnRiscvRegRead_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset)243 static inline NvU32 kflcnRiscvRegRead_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset) {
244 return pKernelFlcn->__kflcnRiscvRegRead__(pGpu, pKernelFlcn, offset);
245 }
246
247 void kflcnRiscvRegWrite_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data);
248
kflcnRiscvRegWrite_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset,NvU32 data)249 static inline void kflcnRiscvRegWrite_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data) {
250 NV_ASSERT_PRECOMP(0);
251 }
252
kflcnRiscvRegWrite_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 offset,NvU32 data)253 static inline void kflcnRiscvRegWrite_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data) {
254 pKernelFlcn->__kflcnRiscvRegWrite__(pGpu, pKernelFlcn, offset, data);
255 }
256
257 NvBool kflcnIsRiscvCpuEnabled_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
258
kflcnIsRiscvCpuEnabled_108313(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)259 static inline NvBool kflcnIsRiscvCpuEnabled_108313(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
260 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
261 }
262
kflcnIsRiscvCpuEnabled_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)263 static inline NvBool kflcnIsRiscvCpuEnabled_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
264 return pKernelFlcn->__kflcnIsRiscvCpuEnabled__(pGpu, pKernelFlcn);
265 }
266
267 NvBool kflcnIsRiscvActive_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
268
269 NvBool kflcnIsRiscvActive_GA10X(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
270
kflcnIsRiscvActive_108313(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)271 static inline NvBool kflcnIsRiscvActive_108313(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
272 NV_ASSERT_OR_RETURN_PRECOMP(0, ((NvBool)(0 != 0)));
273 }
274
kflcnIsRiscvActive_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)275 static inline NvBool kflcnIsRiscvActive_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
276 return pKernelFlcn->__kflcnIsRiscvActive__(pGpu, pKernelFlcn);
277 }
278
279 void kflcnRiscvProgramBcr_GA102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvBool bBRFetch);
280
kflcnRiscvProgramBcr_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvBool bBRFetch)281 static inline void kflcnRiscvProgramBcr_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvBool bBRFetch) {
282 NV_ASSERT_PRECOMP(0);
283 }
284
kflcnRiscvProgramBcr_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvBool bBRFetch)285 static inline void kflcnRiscvProgramBcr_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvBool bBRFetch) {
286 pKernelFlcn->__kflcnRiscvProgramBcr__(pGpu, pKernelFlcn, bBRFetch);
287 }
288
289 void kflcnSwitchToFalcon_GA10X(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
290
kflcnSwitchToFalcon_b3696a(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)291 static inline void kflcnSwitchToFalcon_b3696a(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
292 return;
293 }
294
kflcnSwitchToFalcon_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)295 static inline void kflcnSwitchToFalcon_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
296 NV_ASSERT_PRECOMP(0);
297 }
298
kflcnSwitchToFalcon_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)299 static inline void kflcnSwitchToFalcon_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
300 pKernelFlcn->__kflcnSwitchToFalcon__(pGpu, pKernelFlcn);
301 }
302
kflcnResetHw_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)303 static inline NV_STATUS kflcnResetHw_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
304 return pKernelFlcn->__kflcnResetHw__(pGpu, pKernelFlcn);
305 }
306
307 void kflcnReset_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
308
kflcnReset_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)309 static inline void kflcnReset_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
310 NV_ASSERT_PRECOMP(0);
311 }
312
kflcnReset_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)313 static inline void kflcnReset_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
314 pKernelFlcn->__kflcnReset__(pGpu, pKernelFlcn);
315 }
316
317 void kflcnResetIntoRiscv_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
318
319 void kflcnResetIntoRiscv_GA102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
320
kflcnResetIntoRiscv_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)321 static inline void kflcnResetIntoRiscv_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
322 NV_ASSERT_PRECOMP(0);
323 }
324
kflcnResetIntoRiscv_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)325 static inline void kflcnResetIntoRiscv_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
326 pKernelFlcn->__kflcnResetIntoRiscv__(pGpu, pKernelFlcn);
327 }
328
329 void kflcnStartCpu_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
330
kflcnStartCpu_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)331 static inline void kflcnStartCpu_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
332 NV_ASSERT_PRECOMP(0);
333 }
334
kflcnStartCpu_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)335 static inline void kflcnStartCpu_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
336 pKernelFlcn->__kflcnStartCpu__(pGpu, pKernelFlcn);
337 }
338
339 void kflcnDisableCtxReq_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
340
kflcnDisableCtxReq_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)341 static inline void kflcnDisableCtxReq_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
342 NV_ASSERT_PRECOMP(0);
343 }
344
kflcnDisableCtxReq_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)345 static inline void kflcnDisableCtxReq_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
346 pKernelFlcn->__kflcnDisableCtxReq__(pGpu, pKernelFlcn);
347 }
348
349 NV_STATUS kflcnPreResetWait_GA10X(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
350
kflcnPreResetWait_56cd7a(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)351 static inline NV_STATUS kflcnPreResetWait_56cd7a(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
352 return NV_OK;
353 }
354
kflcnPreResetWait_5baef9(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)355 static inline NV_STATUS kflcnPreResetWait_5baef9(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
356 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
357 }
358
kflcnPreResetWait_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)359 static inline NV_STATUS kflcnPreResetWait_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
360 return pKernelFlcn->__kflcnPreResetWait__(pGpu, pKernelFlcn);
361 }
362
363 NV_STATUS kflcnWaitForResetToFinish_GA102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
364
365 NV_STATUS kflcnWaitForResetToFinish_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
366
kflcnWaitForResetToFinish_5baef9(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)367 static inline NV_STATUS kflcnWaitForResetToFinish_5baef9(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
368 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
369 }
370
kflcnWaitForResetToFinish_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)371 static inline NV_STATUS kflcnWaitForResetToFinish_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
372 return pKernelFlcn->__kflcnWaitForResetToFinish__(pGpu, pKernelFlcn);
373 }
374
375 NV_STATUS kflcnWaitForHalt_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 timeoutUs, NvU32 flags);
376
kflcnWaitForHalt_5baef9(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 timeoutUs,NvU32 flags)377 static inline NV_STATUS kflcnWaitForHalt_5baef9(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 timeoutUs, NvU32 flags) {
378 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
379 }
380
kflcnWaitForHalt_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 timeoutUs,NvU32 flags)381 static inline NV_STATUS kflcnWaitForHalt_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 timeoutUs, NvU32 flags) {
382 return pKernelFlcn->__kflcnWaitForHalt__(pGpu, pKernelFlcn, timeoutUs, flags);
383 }
384
385 NvU32 kflcnReadIntrStatus_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKerneFlcn);
386
387 NvU32 kflcnReadIntrStatus_GA102(struct OBJGPU *pGpu, struct KernelFalcon *pKerneFlcn);
388
kflcnReadIntrStatus_474d46(struct OBJGPU * pGpu,struct KernelFalcon * pKerneFlcn)389 static inline NvU32 kflcnReadIntrStatus_474d46(struct OBJGPU *pGpu, struct KernelFalcon *pKerneFlcn) {
390 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
391 }
392
kflcnReadIntrStatus_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKerneFlcn)393 static inline NvU32 kflcnReadIntrStatus_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKerneFlcn) {
394 return pKerneFlcn->__kflcnReadIntrStatus__(pGpu, pKerneFlcn);
395 }
396
397 void kflcnIntrRetrigger_GA100(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn);
398
kflcnIntrRetrigger_b3696a(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)399 static inline void kflcnIntrRetrigger_b3696a(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
400 return;
401 }
402
kflcnIntrRetrigger_f2d351(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)403 static inline void kflcnIntrRetrigger_f2d351(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
404 NV_ASSERT_PRECOMP(0);
405 }
406
kflcnIntrRetrigger_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn)407 static inline void kflcnIntrRetrigger_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn) {
408 pKernelFlcn->__kflcnIntrRetrigger__(pGpu, pKernelFlcn);
409 }
410
411 NvU32 kflcnMaskImemAddr_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr);
412
413 NvU32 kflcnMaskImemAddr_GA100(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr);
414
kflcnMaskImemAddr_474d46(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 addr)415 static inline NvU32 kflcnMaskImemAddr_474d46(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr) {
416 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
417 }
418
kflcnMaskImemAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 addr)419 static inline NvU32 kflcnMaskImemAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr) {
420 return pKernelFlcn->__kflcnMaskImemAddr__(pGpu, pKernelFlcn, addr);
421 }
422
423 NvU32 kflcnMaskDmemAddr_TU102(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr);
424
425 NvU32 kflcnMaskDmemAddr_GA100(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr);
426
kflcnMaskDmemAddr_474d46(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 addr)427 static inline NvU32 kflcnMaskDmemAddr_474d46(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr) {
428 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
429 }
430
kflcnMaskDmemAddr_DISPATCH(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFlcn,NvU32 addr)431 static inline NvU32 kflcnMaskDmemAddr_DISPATCH(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFlcn, NvU32 addr) {
432 return pKernelFlcn->__kflcnMaskDmemAddr__(pGpu, pKernelFlcn, addr);
433 }
434
kflcnReadEmem_DISPATCH(struct KernelFalcon * arg0,NvU64 offset,NvU64 size,void * pBuf)435 static inline void kflcnReadEmem_DISPATCH(struct KernelFalcon *arg0, NvU64 offset, NvU64 size, void *pBuf) {
436 arg0->__kflcnReadEmem__(arg0, offset, size, pBuf);
437 }
438
kflcnGetWFL0Offset_DISPATCH(struct KernelFalcon * arg0)439 static inline NvU32 kflcnGetWFL0Offset_DISPATCH(struct KernelFalcon *arg0) {
440 return arg0->__kflcnGetWFL0Offset__(arg0);
441 }
442
kflcnGetScratchOffsets_DISPATCH(struct KernelFalcon * arg0,NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId)443 static inline const NvU32 *kflcnGetScratchOffsets_DISPATCH(struct KernelFalcon *arg0, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) {
444 return arg0->__kflcnGetScratchOffsets__(arg0, scratchGroupId);
445 }
446
kflcnUnload_DISPATCH(struct KernelFalcon * arg0)447 static inline void kflcnUnload_DISPATCH(struct KernelFalcon *arg0) {
448 arg0->__kflcnUnload__(arg0);
449 }
450
kflcnConfigured_DISPATCH(struct KernelFalcon * arg0)451 static inline NvBool kflcnConfigured_DISPATCH(struct KernelFalcon *arg0) {
452 return arg0->__kflcnConfigured__(arg0);
453 }
454
kflcnPriRead_DISPATCH(struct KernelFalcon * arg0,NvU32 offset)455 static inline NvU32 kflcnPriRead_DISPATCH(struct KernelFalcon *arg0, NvU32 offset) {
456 return arg0->__kflcnPriRead__(arg0, offset);
457 }
458
kflcnVprintf_DISPATCH(struct KernelFalcon * arg0,NvBool bReportStart,const char * fmt,va_list args)459 static inline void kflcnVprintf_DISPATCH(struct KernelFalcon *arg0, NvBool bReportStart, const char *fmt, va_list args) {
460 arg0->__kflcnVprintf__(arg0, bReportStart, fmt, args);
461 }
462
kflcnPriWrite_DISPATCH(struct KernelFalcon * arg0,NvU32 offset,NvU32 data)463 static inline void kflcnPriWrite_DISPATCH(struct KernelFalcon *arg0, NvU32 offset, NvU32 data) {
464 arg0->__kflcnPriWrite__(arg0, offset, data);
465 }
466
kflcnSyncBufferDescriptor_DISPATCH(struct KernelFalcon * arg0,CrashCatBufferDescriptor * pBufDesc,NvU32 offset,NvU32 size)467 static inline void kflcnSyncBufferDescriptor_DISPATCH(struct KernelFalcon *arg0, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) {
468 arg0->__kflcnSyncBufferDescriptor__(arg0, pBufDesc, offset, size);
469 }
470
kflcnMapBufferDescriptor_DISPATCH(struct KernelFalcon * arg0,CrashCatBufferDescriptor * pBufDesc)471 static inline void *kflcnMapBufferDescriptor_DISPATCH(struct KernelFalcon *arg0, CrashCatBufferDescriptor *pBufDesc) {
472 return arg0->__kflcnMapBufferDescriptor__(arg0, pBufDesc);
473 }
474
kflcnUnmapBufferDescriptor_DISPATCH(struct KernelFalcon * arg0,CrashCatBufferDescriptor * pBufDesc)475 static inline void kflcnUnmapBufferDescriptor_DISPATCH(struct KernelFalcon *arg0, CrashCatBufferDescriptor *pBufDesc) {
476 arg0->__kflcnUnmapBufferDescriptor__(arg0, pBufDesc);
477 }
478
kflcnReadDmem_DISPATCH(struct KernelFalcon * arg0,NvU32 offset,NvU32 size,void * pBuf)479 static inline void kflcnReadDmem_DISPATCH(struct KernelFalcon *arg0, NvU32 offset, NvU32 size, void *pBuf) {
480 arg0->__kflcnReadDmem__(arg0, offset, size, pBuf);
481 }
482
483 void kflcnConfigureEngine_IMPL(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFalcon, KernelFalconEngineConfig *pFalconConfig);
484
485 #ifdef __nvoc_kernel_falcon_h_disabled
kflcnConfigureEngine(struct OBJGPU * pGpu,struct KernelFalcon * pKernelFalcon,KernelFalconEngineConfig * pFalconConfig)486 static inline void kflcnConfigureEngine(struct OBJGPU *pGpu, struct KernelFalcon *pKernelFalcon, KernelFalconEngineConfig *pFalconConfig) {
487 NV_ASSERT_FAILED_PRECOMP("KernelFalcon was disabled!");
488 }
489 #else //__nvoc_kernel_falcon_h_disabled
490 #define kflcnConfigureEngine(pGpu, pKernelFalcon, pFalconConfig) kflcnConfigureEngine_IMPL(pGpu, pKernelFalcon, pFalconConfig)
491 #endif //__nvoc_kernel_falcon_h_disabled
492
493 NV_STATUS kflcnAllocContext_IMPL(struct OBJGPU *arg0, struct KernelFalcon *arg1, struct KernelChannel *arg2, NvU32 arg3);
494
495 #ifdef __nvoc_kernel_falcon_h_disabled
kflcnAllocContext(struct OBJGPU * arg0,struct KernelFalcon * arg1,struct KernelChannel * arg2,NvU32 arg3)496 static inline NV_STATUS kflcnAllocContext(struct OBJGPU *arg0, struct KernelFalcon *arg1, struct KernelChannel *arg2, NvU32 arg3) {
497 NV_ASSERT_FAILED_PRECOMP("KernelFalcon was disabled!");
498 return NV_ERR_NOT_SUPPORTED;
499 }
500 #else //__nvoc_kernel_falcon_h_disabled
501 #define kflcnAllocContext(arg0, arg1, arg2, arg3) kflcnAllocContext_IMPL(arg0, arg1, arg2, arg3)
502 #endif //__nvoc_kernel_falcon_h_disabled
503
504 NV_STATUS kflcnFreeContext_IMPL(struct OBJGPU *arg0, struct KernelFalcon *arg1, struct KernelChannel *arg2, NvU32 arg3);
505
506 #ifdef __nvoc_kernel_falcon_h_disabled
kflcnFreeContext(struct OBJGPU * arg0,struct KernelFalcon * arg1,struct KernelChannel * arg2,NvU32 arg3)507 static inline NV_STATUS kflcnFreeContext(struct OBJGPU *arg0, struct KernelFalcon *arg1, struct KernelChannel *arg2, NvU32 arg3) {
508 NV_ASSERT_FAILED_PRECOMP("KernelFalcon was disabled!");
509 return NV_ERR_NOT_SUPPORTED;
510 }
511 #else //__nvoc_kernel_falcon_h_disabled
512 #define kflcnFreeContext(arg0, arg1, arg2, arg3) kflcnFreeContext_IMPL(arg0, arg1, arg2, arg3)
513 #endif //__nvoc_kernel_falcon_h_disabled
514
515 struct KernelFalcon *kflcnGetKernelFalconForEngine_IMPL(struct OBJGPU *pGpu, ENGDESCRIPTOR physEngDesc);
516
517 #define kflcnGetKernelFalconForEngine(pGpu, physEngDesc) kflcnGetKernelFalconForEngine_IMPL(pGpu, physEngDesc)
518 #undef PRIVATE_FIELD
519
520
521 // Basic implementation of KernelFalcon that can be instantiated.
522
523 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
524 // the matching C source file, but causes diagnostics to be issued if another
525 // source file references the field.
526 #ifdef NVOC_KERNEL_FALCON_H_PRIVATE_ACCESS_ALLOWED
527 #define PRIVATE_FIELD(x) x
528 #else
529 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
530 #endif
531
532 struct GenericKernelFalcon {
533 const struct NVOC_RTTI *__nvoc_rtti;
534 struct KernelFalcon __nvoc_base_KernelFalcon;
535 struct IntrService __nvoc_base_IntrService;
536 struct Object __nvoc_base_Object;
537 struct CrashCatEngine *__nvoc_pbase_CrashCatEngine;
538 struct KernelCrashCatEngine *__nvoc_pbase_KernelCrashCatEngine;
539 struct KernelFalcon *__nvoc_pbase_KernelFalcon;
540 struct IntrService *__nvoc_pbase_IntrService;
541 struct Object *__nvoc_pbase_Object;
542 struct GenericKernelFalcon *__nvoc_pbase_GenericKernelFalcon;
543 NV_STATUS (*__gkflcnResetHw__)(struct OBJGPU *, struct GenericKernelFalcon *);
544 void (*__gkflcnRegisterIntrService__)(struct OBJGPU *, struct GenericKernelFalcon *, IntrServiceRecord *);
545 NV_STATUS (*__gkflcnServiceNotificationInterrupt__)(struct OBJGPU *, struct GenericKernelFalcon *, IntrServiceServiceNotificationInterruptArguments *);
546 void (*__gkflcnReadEmem__)(struct GenericKernelFalcon *, NvU64, NvU64, void *);
547 NvU32 (*__gkflcnGetWFL0Offset__)(struct GenericKernelFalcon *);
548 void (*__gkflcnUnload__)(struct GenericKernelFalcon *);
549 NvBool (*__gkflcnConfigured__)(struct GenericKernelFalcon *);
550 NvU32 (*__gkflcnPriRead__)(struct GenericKernelFalcon *, NvU32);
551 const NvU32 *(*__gkflcnGetScratchOffsets__)(struct GenericKernelFalcon *, NV_CRASHCAT_SCRATCH_GROUP_ID);
552 void (*__gkflcnRegWrite__)(struct OBJGPU *, struct GenericKernelFalcon *, NvU32, NvU32);
553 NvU32 (*__gkflcnMaskDmemAddr__)(struct OBJGPU *, struct GenericKernelFalcon *, NvU32);
554 void (*__gkflcnVprintf__)(struct GenericKernelFalcon *, NvBool, const char *, va_list);
555 NvBool (*__gkflcnClearInterrupt__)(struct OBJGPU *, struct GenericKernelFalcon *, IntrServiceClearInterruptArguments *);
556 void (*__gkflcnPriWrite__)(struct GenericKernelFalcon *, NvU32, NvU32);
557 void *(*__gkflcnMapBufferDescriptor__)(struct GenericKernelFalcon *, CrashCatBufferDescriptor *);
558 void (*__gkflcnSyncBufferDescriptor__)(struct GenericKernelFalcon *, CrashCatBufferDescriptor *, NvU32, NvU32);
559 NvU32 (*__gkflcnRegRead__)(struct OBJGPU *, struct GenericKernelFalcon *, NvU32);
560 void (*__gkflcnUnmapBufferDescriptor__)(struct GenericKernelFalcon *, CrashCatBufferDescriptor *);
561 NvU32 (*__gkflcnServiceInterrupt__)(struct OBJGPU *, struct GenericKernelFalcon *, IntrServiceServiceInterruptArguments *);
562 void (*__gkflcnReadDmem__)(struct GenericKernelFalcon *, NvU32, NvU32, void *);
563 };
564
565 #ifndef __NVOC_CLASS_GenericKernelFalcon_TYPEDEF__
566 #define __NVOC_CLASS_GenericKernelFalcon_TYPEDEF__
567 typedef struct GenericKernelFalcon GenericKernelFalcon;
568 #endif /* __NVOC_CLASS_GenericKernelFalcon_TYPEDEF__ */
569
570 #ifndef __nvoc_class_id_GenericKernelFalcon
571 #define __nvoc_class_id_GenericKernelFalcon 0xabcf08
572 #endif /* __nvoc_class_id_GenericKernelFalcon */
573
574 extern const struct NVOC_CLASS_DEF __nvoc_class_def_GenericKernelFalcon;
575
576 #define __staticCast_GenericKernelFalcon(pThis) \
577 ((pThis)->__nvoc_pbase_GenericKernelFalcon)
578
579 #ifdef __nvoc_kernel_falcon_h_disabled
580 #define __dynamicCast_GenericKernelFalcon(pThis) ((GenericKernelFalcon*)NULL)
581 #else //__nvoc_kernel_falcon_h_disabled
582 #define __dynamicCast_GenericKernelFalcon(pThis) \
583 ((GenericKernelFalcon*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(GenericKernelFalcon)))
584 #endif //__nvoc_kernel_falcon_h_disabled
585
586
587 NV_STATUS __nvoc_objCreateDynamic_GenericKernelFalcon(GenericKernelFalcon**, Dynamic*, NvU32, va_list);
588
589 NV_STATUS __nvoc_objCreate_GenericKernelFalcon(GenericKernelFalcon**, Dynamic*, NvU32, struct OBJGPU * arg_pGpu, KernelFalconEngineConfig * arg_pFalconConfig);
590 #define __objCreate_GenericKernelFalcon(ppNewObj, pParent, createFlags, arg_pGpu, arg_pFalconConfig) \
591 __nvoc_objCreate_GenericKernelFalcon((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pGpu, arg_pFalconConfig)
592
593 #define gkflcnResetHw(pGpu, pGenKernFlcn) gkflcnResetHw_DISPATCH(pGpu, pGenKernFlcn)
594 #define gkflcnRegisterIntrService(arg0, arg1, arg2) gkflcnRegisterIntrService_DISPATCH(arg0, arg1, arg2)
595 #define gkflcnServiceNotificationInterrupt(arg0, arg1, arg2) gkflcnServiceNotificationInterrupt_DISPATCH(arg0, arg1, arg2)
596 #define gkflcnReadEmem(arg0, offset, size, pBuf) gkflcnReadEmem_DISPATCH(arg0, offset, size, pBuf)
597 #define gkflcnGetWFL0Offset(arg0) gkflcnGetWFL0Offset_DISPATCH(arg0)
598 #define gkflcnUnload(arg0) gkflcnUnload_DISPATCH(arg0)
599 #define gkflcnConfigured(arg0) gkflcnConfigured_DISPATCH(arg0)
600 #define gkflcnPriRead(arg0, offset) gkflcnPriRead_DISPATCH(arg0, offset)
601 #define gkflcnGetScratchOffsets(arg0, scratchGroupId) gkflcnGetScratchOffsets_DISPATCH(arg0, scratchGroupId)
602 #define gkflcnRegWrite(pGpu, pKernelFlcn, offset, data) gkflcnRegWrite_DISPATCH(pGpu, pKernelFlcn, offset, data)
603 #define gkflcnMaskDmemAddr(pGpu, pKernelFlcn, addr) gkflcnMaskDmemAddr_DISPATCH(pGpu, pKernelFlcn, addr)
604 #define gkflcnVprintf(arg0, bReportStart, fmt, args) gkflcnVprintf_DISPATCH(arg0, bReportStart, fmt, args)
605 #define gkflcnClearInterrupt(pGpu, pIntrService, pParams) gkflcnClearInterrupt_DISPATCH(pGpu, pIntrService, pParams)
606 #define gkflcnPriWrite(arg0, offset, data) gkflcnPriWrite_DISPATCH(arg0, offset, data)
607 #define gkflcnMapBufferDescriptor(arg0, pBufDesc) gkflcnMapBufferDescriptor_DISPATCH(arg0, pBufDesc)
608 #define gkflcnSyncBufferDescriptor(arg0, pBufDesc, offset, size) gkflcnSyncBufferDescriptor_DISPATCH(arg0, pBufDesc, offset, size)
609 #define gkflcnRegRead(pGpu, pKernelFlcn, offset) gkflcnRegRead_DISPATCH(pGpu, pKernelFlcn, offset)
610 #define gkflcnUnmapBufferDescriptor(arg0, pBufDesc) gkflcnUnmapBufferDescriptor_DISPATCH(arg0, pBufDesc)
611 #define gkflcnServiceInterrupt(pGpu, pIntrService, pParams) gkflcnServiceInterrupt_DISPATCH(pGpu, pIntrService, pParams)
612 #define gkflcnReadDmem(arg0, offset, size, pBuf) gkflcnReadDmem_DISPATCH(arg0, offset, size, pBuf)
613 NV_STATUS gkflcnResetHw_IMPL(struct OBJGPU *pGpu, struct GenericKernelFalcon *pGenKernFlcn);
614
gkflcnResetHw_DISPATCH(struct OBJGPU * pGpu,struct GenericKernelFalcon * pGenKernFlcn)615 static inline NV_STATUS gkflcnResetHw_DISPATCH(struct OBJGPU *pGpu, struct GenericKernelFalcon *pGenKernFlcn) {
616 return pGenKernFlcn->__gkflcnResetHw__(pGpu, pGenKernFlcn);
617 }
618
619 void gkflcnRegisterIntrService_IMPL(struct OBJGPU *arg0, struct GenericKernelFalcon *arg1, IntrServiceRecord arg2[171]);
620
gkflcnRegisterIntrService_DISPATCH(struct OBJGPU * arg0,struct GenericKernelFalcon * arg1,IntrServiceRecord arg2[171])621 static inline void gkflcnRegisterIntrService_DISPATCH(struct OBJGPU *arg0, struct GenericKernelFalcon *arg1, IntrServiceRecord arg2[171]) {
622 arg1->__gkflcnRegisterIntrService__(arg0, arg1, arg2);
623 }
624
625 NV_STATUS gkflcnServiceNotificationInterrupt_IMPL(struct OBJGPU *arg0, struct GenericKernelFalcon *arg1, IntrServiceServiceNotificationInterruptArguments *arg2);
626
gkflcnServiceNotificationInterrupt_DISPATCH(struct OBJGPU * arg0,struct GenericKernelFalcon * arg1,IntrServiceServiceNotificationInterruptArguments * arg2)627 static inline NV_STATUS gkflcnServiceNotificationInterrupt_DISPATCH(struct OBJGPU *arg0, struct GenericKernelFalcon *arg1, IntrServiceServiceNotificationInterruptArguments *arg2) {
628 return arg1->__gkflcnServiceNotificationInterrupt__(arg0, arg1, arg2);
629 }
630
gkflcnReadEmem_DISPATCH(struct GenericKernelFalcon * arg0,NvU64 offset,NvU64 size,void * pBuf)631 static inline void gkflcnReadEmem_DISPATCH(struct GenericKernelFalcon *arg0, NvU64 offset, NvU64 size, void *pBuf) {
632 arg0->__gkflcnReadEmem__(arg0, offset, size, pBuf);
633 }
634
gkflcnGetWFL0Offset_DISPATCH(struct GenericKernelFalcon * arg0)635 static inline NvU32 gkflcnGetWFL0Offset_DISPATCH(struct GenericKernelFalcon *arg0) {
636 return arg0->__gkflcnGetWFL0Offset__(arg0);
637 }
638
gkflcnUnload_DISPATCH(struct GenericKernelFalcon * arg0)639 static inline void gkflcnUnload_DISPATCH(struct GenericKernelFalcon *arg0) {
640 arg0->__gkflcnUnload__(arg0);
641 }
642
gkflcnConfigured_DISPATCH(struct GenericKernelFalcon * arg0)643 static inline NvBool gkflcnConfigured_DISPATCH(struct GenericKernelFalcon *arg0) {
644 return arg0->__gkflcnConfigured__(arg0);
645 }
646
gkflcnPriRead_DISPATCH(struct GenericKernelFalcon * arg0,NvU32 offset)647 static inline NvU32 gkflcnPriRead_DISPATCH(struct GenericKernelFalcon *arg0, NvU32 offset) {
648 return arg0->__gkflcnPriRead__(arg0, offset);
649 }
650
gkflcnGetScratchOffsets_DISPATCH(struct GenericKernelFalcon * arg0,NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId)651 static inline const NvU32 *gkflcnGetScratchOffsets_DISPATCH(struct GenericKernelFalcon *arg0, NV_CRASHCAT_SCRATCH_GROUP_ID scratchGroupId) {
652 return arg0->__gkflcnGetScratchOffsets__(arg0, scratchGroupId);
653 }
654
gkflcnRegWrite_DISPATCH(struct OBJGPU * pGpu,struct GenericKernelFalcon * pKernelFlcn,NvU32 offset,NvU32 data)655 static inline void gkflcnRegWrite_DISPATCH(struct OBJGPU *pGpu, struct GenericKernelFalcon *pKernelFlcn, NvU32 offset, NvU32 data) {
656 pKernelFlcn->__gkflcnRegWrite__(pGpu, pKernelFlcn, offset, data);
657 }
658
gkflcnMaskDmemAddr_DISPATCH(struct OBJGPU * pGpu,struct GenericKernelFalcon * pKernelFlcn,NvU32 addr)659 static inline NvU32 gkflcnMaskDmemAddr_DISPATCH(struct OBJGPU *pGpu, struct GenericKernelFalcon *pKernelFlcn, NvU32 addr) {
660 return pKernelFlcn->__gkflcnMaskDmemAddr__(pGpu, pKernelFlcn, addr);
661 }
662
gkflcnVprintf_DISPATCH(struct GenericKernelFalcon * arg0,NvBool bReportStart,const char * fmt,va_list args)663 static inline void gkflcnVprintf_DISPATCH(struct GenericKernelFalcon *arg0, NvBool bReportStart, const char *fmt, va_list args) {
664 arg0->__gkflcnVprintf__(arg0, bReportStart, fmt, args);
665 }
666
gkflcnClearInterrupt_DISPATCH(struct OBJGPU * pGpu,struct GenericKernelFalcon * pIntrService,IntrServiceClearInterruptArguments * pParams)667 static inline NvBool gkflcnClearInterrupt_DISPATCH(struct OBJGPU *pGpu, struct GenericKernelFalcon *pIntrService, IntrServiceClearInterruptArguments *pParams) {
668 return pIntrService->__gkflcnClearInterrupt__(pGpu, pIntrService, pParams);
669 }
670
gkflcnPriWrite_DISPATCH(struct GenericKernelFalcon * arg0,NvU32 offset,NvU32 data)671 static inline void gkflcnPriWrite_DISPATCH(struct GenericKernelFalcon *arg0, NvU32 offset, NvU32 data) {
672 arg0->__gkflcnPriWrite__(arg0, offset, data);
673 }
674
gkflcnMapBufferDescriptor_DISPATCH(struct GenericKernelFalcon * arg0,CrashCatBufferDescriptor * pBufDesc)675 static inline void *gkflcnMapBufferDescriptor_DISPATCH(struct GenericKernelFalcon *arg0, CrashCatBufferDescriptor *pBufDesc) {
676 return arg0->__gkflcnMapBufferDescriptor__(arg0, pBufDesc);
677 }
678
gkflcnSyncBufferDescriptor_DISPATCH(struct GenericKernelFalcon * arg0,CrashCatBufferDescriptor * pBufDesc,NvU32 offset,NvU32 size)679 static inline void gkflcnSyncBufferDescriptor_DISPATCH(struct GenericKernelFalcon *arg0, CrashCatBufferDescriptor *pBufDesc, NvU32 offset, NvU32 size) {
680 arg0->__gkflcnSyncBufferDescriptor__(arg0, pBufDesc, offset, size);
681 }
682
gkflcnRegRead_DISPATCH(struct OBJGPU * pGpu,struct GenericKernelFalcon * pKernelFlcn,NvU32 offset)683 static inline NvU32 gkflcnRegRead_DISPATCH(struct OBJGPU *pGpu, struct GenericKernelFalcon *pKernelFlcn, NvU32 offset) {
684 return pKernelFlcn->__gkflcnRegRead__(pGpu, pKernelFlcn, offset);
685 }
686
gkflcnUnmapBufferDescriptor_DISPATCH(struct GenericKernelFalcon * arg0,CrashCatBufferDescriptor * pBufDesc)687 static inline void gkflcnUnmapBufferDescriptor_DISPATCH(struct GenericKernelFalcon *arg0, CrashCatBufferDescriptor *pBufDesc) {
688 arg0->__gkflcnUnmapBufferDescriptor__(arg0, pBufDesc);
689 }
690
gkflcnServiceInterrupt_DISPATCH(struct OBJGPU * pGpu,struct GenericKernelFalcon * pIntrService,IntrServiceServiceInterruptArguments * pParams)691 static inline NvU32 gkflcnServiceInterrupt_DISPATCH(struct OBJGPU *pGpu, struct GenericKernelFalcon *pIntrService, IntrServiceServiceInterruptArguments *pParams) {
692 return pIntrService->__gkflcnServiceInterrupt__(pGpu, pIntrService, pParams);
693 }
694
gkflcnReadDmem_DISPATCH(struct GenericKernelFalcon * arg0,NvU32 offset,NvU32 size,void * pBuf)695 static inline void gkflcnReadDmem_DISPATCH(struct GenericKernelFalcon *arg0, NvU32 offset, NvU32 size, void *pBuf) {
696 arg0->__gkflcnReadDmem__(arg0, offset, size, pBuf);
697 }
698
699 NV_STATUS gkflcnConstruct_IMPL(struct GenericKernelFalcon *arg_pGenKernFlcn, struct OBJGPU *arg_pGpu, KernelFalconEngineConfig *arg_pFalconConfig);
700
701 #define __nvoc_gkflcnConstruct(arg_pGenKernFlcn, arg_pGpu, arg_pFalconConfig) gkflcnConstruct_IMPL(arg_pGenKernFlcn, arg_pGpu, arg_pFalconConfig)
702 #undef PRIVATE_FIELD
703
704
705 #endif // KERNEL_FALCON_H
706
707 #ifdef __cplusplus
708 } // extern "C"
709 #endif
710
711 #endif // _G_KERNEL_FALCON_NVOC_H_
712