1
2 #ifndef _G_INTR_NVOC_H_
3 #define _G_INTR_NVOC_H_
4 #include "nvoc/runtime.h"
5
6 // Version of generated metadata structures
7 #ifdef NVOC_METADATA_VERSION
8 #undef NVOC_METADATA_VERSION
9 #endif
10 #define NVOC_METADATA_VERSION 0
11
12 #ifdef __cplusplus
13 extern "C" {
14 #endif
15
16 /*
17 * SPDX-FileCopyrightText: Copyright (c) 2006-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
18 * SPDX-License-Identifier: MIT
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining a
21 * copy of this software and associated documentation files (the "Software"),
22 * to deal in the Software without restriction, including without limitation
23 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
24 * and/or sell copies of the Software, and to permit persons to whom the
25 * Software is furnished to do so, subject to the following conditions:
26 *
27 * The above copyright notice and this permission notice shall be included in
28 * all copies or substantial portions of the Software.
29 *
30 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
31 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
32 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
33 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
34 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
35 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
36 * DEALINGS IN THE SOFTWARE.
37 */
38
39 #pragma once
40 #include "g_intr_nvoc.h"
41
42 #ifndef INTR_H
43 #define INTR_H
44
45
46 #include "kernel/gpu/eng_state.h"
47 #include "kernel/gpu/gpu.h"
48 #include "kernel/gpu/intr/engine_idx.h"
49 #include "kernel/gpu/intr/intr_common.h"
50 #include "kernel/gpu/intr/intr_service.h"
51
52 #include "ctrl/ctrl2080/ctrl2080internal.h"
53
54 #include "dev_ctrl_defines.h"
55 #include "libraries/containers/list.h"
56 #include "libraries/containers/vector.h"
57 #include "libraries/nvoc/utility.h"
58 #include "libraries/utils/nvbitvector.h"
59
60
61 //
62 // Interrupt Type
63 //
64 // Abstraction of the disabled/software/hardware enumeration in NV_PMC_INTR_EN_0_INTA
65 // !!! This enumeration must exactly match NV_PMC_INTR_EN_0_INTA !!!
66 //
67
68 #define INTERRUPT_TYPE_DISABLED 0
69 #define INTERRUPT_TYPE_HARDWARE 1
70 #define INTERRUPT_TYPE_SOFTWARE 2
71 #define INTERRUPT_TYPE_MULTI 3
72 #define INTERRUPT_TYPE_MAX INTERRUPT_TYPE_MULTI
73
74 #define INTERRUPT_MASK_DISABLED 0x00000000
75 #define INTERRUPT_MASK_HARDWARE 0x7fffffff
76 #define INTERRUPT_MASK_SOFTWARE 0x80000000
77 #define INTERRUPT_MASK_ENABLED 0xffffffff
78
79 /**
80 * @brief Each entry corresponds to a top level interrupt
81 *
82 * This structure will eventually be replaced by #InterruptEntry.
83 */
84 typedef struct
85 {
86 /** MC_ENGINE_IDX* value */
87 NvU16 mcEngine;
88 /** Bit in top level PMC interrupt registers */
89 NvU32 pmcIntrMask;
90 /** Interrupt vector in CTRL interrupt tree (Turing+). For non-host driven
91 * engines, this is their single interrupt vector at top level; while for
92 * host driven engines, this is their stalling interrupt vector
93 */
94 NvU32 intrVector;
95 /** Nonstalling interrupt vector in CTRL interrupt tree (Turing+). Only
96 * valid for host driven engines. NV_INTR_VECTOR_INVALID signifies
97 * unavailable
98 */
99 NvU32 intrVectorNonStall;
100 } INTR_TABLE_ENTRY;
101
102 #define INTR_TABLE_MAX_INTRS_PER_ENTRY 6
103
104 MAKE_VECTOR(InterruptTable, INTR_TABLE_ENTRY);
105
106 /*!
107 * Mapping from leaf level interrupt to conceptual interrupt name.
108 *
109 * - The interrupt vector is implicit from the tree / index of an array which
110 * contains this struct.
111 * - The target is a conceptual name that represents the interrupt identified by
112 * (MC_ENGINE_IDX*, INTR_KIND*) pair.
113 * - A service routine may or may not be actually registered to handle the
114 * interrupt.
115 * - Multiple physical vectors can map to the same conceptual interrupt.
116 */
117 typedef struct
118 {
119 /*!
120 * MC_ENGINE_IDX* value.
121 *
122 * A value of #MC_ENGINE_IDX_NULL means that the vector corresponding to
123 * this entry is unused. Use #interruptEntryIsEmpty to check this.
124 */
125 NvU16 mcEngine;
126
127 /*!
128 * INTR_KIND_* value.
129 *
130 * This allows multiple logically separate interrupts to map to a service
131 * routine via a common mcEngine value.
132 */
133 INTR_KIND intrKind;
134
135 /*!
136 * If the interrupt should be handled.
137 *
138 * If this is false:
139 * - The interrupt may need to be visible for clients, VF, etc (error
140 * containment).
141 * - It can be an interrupt to be triggered to notify RM running in a
142 * different environment: doorbell, GSP triggered notifications to CPU.
143 * - The interrupt does not need to be serviced. There should be no
144 * corresponding entry in the #intrServiceTable.
145 */
146 NvBool bService;
147 } InterruptEntry;
148
149 static NV_FORCEINLINE NvBool
interruptEntryIsEmpty(const InterruptEntry * pEntry)150 interruptEntryIsEmpty(const InterruptEntry *pEntry)
151 {
152 return pEntry->mcEngine == MC_ENGINE_IDX_NULL;
153 }
154
155
156 //
157 // Default value for intrStuckThreshold
158 #define INTR_STUCK_THRESHOLD 1000
159
160 #define INTR_TABLE_INIT_KERNEL (1 << 0)
161 #define INTR_TABLE_INIT_PHYSICAL (1 << 1)
162
163 /**
164 * @brief This enum specifies the type of DPC node
165 * INTERRUPT_BASED_DPC: DPC queued for an interrupt source
166 * SPECIAL_DPC : DPC queued within processing of another interrupt
167 * source
168 *
169 * Currently only used on Fermi+.
170 */
171 typedef enum
172 {
173 INTERRUPT_BASED_DPC=0,
174 SPECIAL_DPC
175 } DPCTYPE;
176
177 /**
178 * @brief This is a structure for a node on the DPC Queue
179 * dpctype: Type of DPC for processing
180 * dpcdata: Data required for dpc processing
181 * This union will contain dpctype specific data
182 * pNext : Pointer to the next DPC node
183 *
184 * Currently only used on Fermi+.
185 */
186 typedef struct _DPCNODE
187 {
188 DPCTYPE dpctype;
189 union _dpcdata
190 {
191 MC_ENGINE_BITVECTOR pendingEngines;
192 } dpcdata;
193
194 struct _DPCNODE *pNext;
195 } DPCNODE;
196
197 /**
198 * @brief This is a structure for the DPC Queue
199 * numEntries: Number of entries currently on DPC queue (debugging purpose)
200 * pFront : Front pointer for the queue
201 * pRear : Rear pointer for the queue
202 *
203 * Currently only used on Fermi+.
204 */
205 typedef struct
206 {
207 NvU32 numEntries;
208 DPCNODE *pFront;
209 DPCNODE *pRear;
210 } DPCQUEUE;
211
212 // Data related to PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING
213 typedef struct
214 {
215 NvU32 flags;
216 NvU32 cached; // Pascal+, to implement intr mask in SW.
217 MC_ENGINE_BITVECTOR engMaskUnblocked;
218 } INTR_MASK;
219
220 //
221 // interrupt mask information used for lazy interrupt disable and interrupt
222 // masking for locking.
223 //
224 typedef struct
225 {
226 NvU32 intrEnable;
227 MC_ENGINE_BITVECTOR intrMask;
228 } INTR_MASK_CTX;
229
230
231 //!
232 //! List of interrupt trees that RM sees.
233 //!
234 //! Kernel RM should determine number of implemented vectors using the actual
235 //! interrupt table fetched.
236 //!
237 typedef enum
238 {
239 INTR_TREE_CPU,
240 INTR_TREE_COUNT
241 } INTR_TREE;
242
243
244 //
245 // IntrMask Locking Flag Defines
246 //
247 #define INTR_MASK_FLAGS_ISR_SKIP_MASK_UPDATE NVBIT(0)
248
249
250 struct Device;
251
252 #ifndef __NVOC_CLASS_Device_TYPEDEF__
253 #define __NVOC_CLASS_Device_TYPEDEF__
254 typedef struct Device Device;
255 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */
256
257 #ifndef __nvoc_class_id_Device
258 #define __nvoc_class_id_Device 0xe0ac20
259 #endif /* __nvoc_class_id_Device */
260
261
262
263
264 // Private field names are wrapped in PRIVATE_FIELD, which does nothing for
265 // the matching C source file, but causes diagnostics to be issued if another
266 // source file references the field.
267 #ifdef NVOC_INTR_H_PRIVATE_ACCESS_ALLOWED
268 #define PRIVATE_FIELD(x) x
269 #else
270 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
271 #endif
272
273
274 struct Intr {
275
276 // Metadata
277 const struct NVOC_RTTI *__nvoc_rtti;
278
279 // Parent (i.e. superclass or base class) object pointers
280 struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
281
282 // Ancestor object pointers for `staticCast` feature
283 struct Object *__nvoc_pbase_Object; // obj super^2
284 struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE; // engstate super
285 struct Intr *__nvoc_pbase_Intr; // intr
286
287 // Vtable with 32 per-object function pointers
288 NV_STATUS (*__intrConstructEngine__)(OBJGPU *, struct Intr * /*this*/, ENGDESCRIPTOR); // virtual override (engstate) base (engstate)
289 NV_STATUS (*__intrStatePreInitLocked__)(OBJGPU *, struct Intr * /*this*/); // virtual override (engstate) base (engstate)
290 NV_STATUS (*__intrStateInitUnlocked__)(OBJGPU *, struct Intr * /*this*/); // virtual override (engstate) base (engstate)
291 NV_STATUS (*__intrStateInitLocked__)(OBJGPU *, struct Intr * /*this*/); // virtual override (engstate) base (engstate)
292 void (*__intrStateDestroy__)(OBJGPU *, struct Intr * /*this*/); // virtual override (engstate) base (engstate)
293 NvU32 (*__intrDecodeStallIntrEn__)(OBJGPU *, struct Intr * /*this*/, NvU32); // halified (2 hals) body
294 void (*__intrServiceVirtual__)(OBJGPU *, struct Intr * /*this*/); // halified (2 hals) body
295 NV_STATUS (*__intrTriggerPrivDoorbell__)(OBJGPU *, struct Intr * /*this*/, NvU32); // halified (2 hals) body
296 void (*__intrGetLocklessVectorsInRmSubtree__)(OBJGPU *, struct Intr * /*this*/, NvU32 (*)[2]); // halified (2 hals) body
297 void (*__intrSetDisplayInterruptEnable__)(OBJGPU *, struct Intr * /*this*/, NvBool, struct THREAD_STATE_NODE *); // halified (2 hals) body
298 NvU32 (*__intrReadRegTopEnSet__)(OBJGPU *, struct Intr * /*this*/, NvU32, struct THREAD_STATE_NODE *); // halified (2 hals) body
299 void (*__intrWriteRegTopEnSet__)(OBJGPU *, struct Intr * /*this*/, NvU32, NvU32, struct THREAD_STATE_NODE *); // halified (2 hals) body
300 void (*__intrWriteRegTopEnClear__)(OBJGPU *, struct Intr * /*this*/, NvU32, NvU32, struct THREAD_STATE_NODE *); // halified (2 hals) body
301 NvU32 (*__intrGetNumLeaves__)(OBJGPU *, struct Intr * /*this*/); // halified (2 hals) body
302 NvU32 (*__intrGetLeafSize__)(OBJGPU *, struct Intr * /*this*/); // halified (2 hals) body
303 NvU64 (*__intrGetIntrTopNonStallMask__)(OBJGPU *, struct Intr * /*this*/); // halified (2 hals) body
304 void (*__intrSanityCheckEngineIntrStallVector__)(OBJGPU *, struct Intr * /*this*/, NvU32, NvU16); // halified (3 hals) body
305 void (*__intrSanityCheckEngineIntrNotificationVector__)(OBJGPU *, struct Intr * /*this*/, NvU32, NvU16); // halified (3 hals) body
306 NV_STATUS (*__intrStateLoad__)(OBJGPU *, struct Intr * /*this*/, NvU32); // virtual halified (singleton optimized) override (engstate) base (engstate) body
307 NV_STATUS (*__intrStateUnload__)(OBJGPU *, struct Intr * /*this*/, NvU32); // virtual halified (singleton optimized) override (engstate) base (engstate) body
308 NV_STATUS (*__intrInitSubtreeMap__)(OBJGPU *, struct Intr * /*this*/); // halified (3 hals) body
309 NV_STATUS (*__intrInitInterruptTable__)(OBJGPU *, struct Intr * /*this*/); // halified (2 hals)
310 NV_STATUS (*__intrSetIntrMask__)(OBJGPU *, struct Intr * /*this*/, union MC_ENGINE_BITVECTOR *, struct THREAD_STATE_NODE *); // halified (2 hals) body
311 void (*__intrSetIntrEnInHw__)(OBJGPU *, struct Intr * /*this*/, NvU32, struct THREAD_STATE_NODE *); // halified (2 hals) body
312 NvU32 (*__intrGetIntrEnFromHw__)(OBJGPU *, struct Intr * /*this*/, struct THREAD_STATE_NODE *); // halified (2 hals) body
313 void (*__intrInitMissing__)(struct OBJGPU *, struct Intr * /*this*/); // virtual inherited (engstate) base (engstate)
314 NV_STATUS (*__intrStatePreInitUnlocked__)(struct OBJGPU *, struct Intr * /*this*/); // virtual inherited (engstate) base (engstate)
315 NV_STATUS (*__intrStatePreLoad__)(struct OBJGPU *, struct Intr * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
316 NV_STATUS (*__intrStatePostLoad__)(struct OBJGPU *, struct Intr * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
317 NV_STATUS (*__intrStatePreUnload__)(struct OBJGPU *, struct Intr * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
318 NV_STATUS (*__intrStatePostUnload__)(struct OBJGPU *, struct Intr * /*this*/, NvU32); // virtual inherited (engstate) base (engstate)
319 NvBool (*__intrIsPresent__)(struct OBJGPU *, struct Intr * /*this*/); // virtual inherited (engstate) base (engstate)
320
321 // 8 PDB properties
322 NvBool PDB_PROP_INTR_ENABLE_DETAILED_LOGS;
323 NvBool PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC;
324 NvBool PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS;
325 NvBool PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET;
326 NvBool PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE;
327 NvBool PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING;
328 NvBool PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING;
329 NvBool PDB_PROP_INTR_MASK_SUPPORTED;
330
331 // Data members
332 NvU32 nonStallPmcIntrMask;
333 NvU64 uvmSharedCpuLeafEn;
334 NvU64 uvmSharedCpuLeafEnDisableMask;
335 NvU32 replayableFaultIntrVector;
336 NvU32 accessCntrIntrVector;
337 NvU32 displayIntrVector;
338 NvU64 intrTopEnMask;
339 InterruptTable intrTable;
340 IntrServiceRecord intrServiceTable[175];
341 InterruptEntry *(vectorToMcIdx[1]);
342 NvLength vectorToMcIdxCounts[1];
343 NvBool bDefaultNonstallNotify;
344 NvBool bUseLegacyVectorAssignment;
345 NV2080_INTR_CATEGORY_SUBTREE_MAP subtreeMap[7];
346 NvBool bDpcStarted;
347 union MC_ENGINE_BITVECTOR pmcIntrPending;
348 DPCQUEUE dpcQueue;
349 NvU32 intrStuckThreshold;
350 INTR_MASK intrMask;
351 union MC_ENGINE_BITVECTOR helperEngineMask;
352 NvU32 intrEn0;
353 NvU32 intrCachedEn0;
354 NvU32 intrCachedEnSet;
355 NvU32 intrCachedEnClear;
356 NvU32 intrEn0Orig;
357 NvBool halIntrEnabled;
358 NvU32 saveIntrEn0;
359 };
360
361 #ifndef __NVOC_CLASS_Intr_TYPEDEF__
362 #define __NVOC_CLASS_Intr_TYPEDEF__
363 typedef struct Intr Intr;
364 #endif /* __NVOC_CLASS_Intr_TYPEDEF__ */
365
366 #ifndef __nvoc_class_id_Intr
367 #define __nvoc_class_id_Intr 0xc06e44
368 #endif /* __nvoc_class_id_Intr */
369
370 // Casting support
371 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Intr;
372
373 #define __staticCast_Intr(pThis) \
374 ((pThis)->__nvoc_pbase_Intr)
375
376 #ifdef __nvoc_intr_h_disabled
377 #define __dynamicCast_Intr(pThis) ((Intr*)NULL)
378 #else //__nvoc_intr_h_disabled
379 #define __dynamicCast_Intr(pThis) \
380 ((Intr*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Intr)))
381 #endif //__nvoc_intr_h_disabled
382
383 // Property macros
384 #define PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC_BASE_CAST
385 #define PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC_BASE_NAME PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC
386 #define PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE_BASE_CAST
387 #define PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE_BASE_NAME PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE
388 #define PDB_PROP_INTR_MASK_SUPPORTED_BASE_CAST
389 #define PDB_PROP_INTR_MASK_SUPPORTED_BASE_NAME PDB_PROP_INTR_MASK_SUPPORTED
390 #define PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING_BASE_CAST
391 #define PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING_BASE_NAME PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING
392 #define PDB_PROP_INTR_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
393 #define PDB_PROP_INTR_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
394 #define PDB_PROP_INTR_ENABLE_DETAILED_LOGS_BASE_CAST
395 #define PDB_PROP_INTR_ENABLE_DETAILED_LOGS_BASE_NAME PDB_PROP_INTR_ENABLE_DETAILED_LOGS
396 #define PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET_BASE_CAST
397 #define PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET_BASE_NAME PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET
398 #define PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS_BASE_CAST
399 #define PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS_BASE_NAME PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS
400 #define PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING_BASE_CAST
401 #define PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING_BASE_NAME PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING
402
403 NV_STATUS __nvoc_objCreateDynamic_Intr(Intr**, Dynamic*, NvU32, va_list);
404
405 NV_STATUS __nvoc_objCreate_Intr(Intr**, Dynamic*, NvU32);
406 #define __objCreate_Intr(ppNewObj, pParent, createFlags) \
407 __nvoc_objCreate_Intr((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
408
409
410 // Wrapper macros
411 #define intrConstructEngine_FNPTR(pIntr) pIntr->__intrConstructEngine__
412 #define intrConstructEngine(pGpu, pIntr, arg3) intrConstructEngine_DISPATCH(pGpu, pIntr, arg3)
413 #define intrStatePreInitLocked_FNPTR(pIntr) pIntr->__intrStatePreInitLocked__
414 #define intrStatePreInitLocked(pGpu, pIntr) intrStatePreInitLocked_DISPATCH(pGpu, pIntr)
415 #define intrStateInitUnlocked_FNPTR(pIntr) pIntr->__intrStateInitUnlocked__
416 #define intrStateInitUnlocked(pGpu, pIntr) intrStateInitUnlocked_DISPATCH(pGpu, pIntr)
417 #define intrStateInitLocked_FNPTR(pIntr) pIntr->__intrStateInitLocked__
418 #define intrStateInitLocked(pGpu, pIntr) intrStateInitLocked_DISPATCH(pGpu, pIntr)
419 #define intrStateDestroy_FNPTR(pIntr) pIntr->__intrStateDestroy__
420 #define intrStateDestroy(pGpu, pIntr) intrStateDestroy_DISPATCH(pGpu, pIntr)
421 #define intrDecodeStallIntrEn_FNPTR(pIntr) pIntr->__intrDecodeStallIntrEn__
422 #define intrDecodeStallIntrEn(pGpu, pIntr, arg3) intrDecodeStallIntrEn_DISPATCH(pGpu, pIntr, arg3)
423 #define intrDecodeStallIntrEn_HAL(pGpu, pIntr, arg3) intrDecodeStallIntrEn_DISPATCH(pGpu, pIntr, arg3)
424 #define intrServiceVirtual_FNPTR(pIntr) pIntr->__intrServiceVirtual__
425 #define intrServiceVirtual(pGpu, pIntr) intrServiceVirtual_DISPATCH(pGpu, pIntr)
426 #define intrServiceVirtual_HAL(pGpu, pIntr) intrServiceVirtual_DISPATCH(pGpu, pIntr)
427 #define intrTriggerPrivDoorbell_FNPTR(pIntr) pIntr->__intrTriggerPrivDoorbell__
428 #define intrTriggerPrivDoorbell(pGpu, pIntr, gfid) intrTriggerPrivDoorbell_DISPATCH(pGpu, pIntr, gfid)
429 #define intrTriggerPrivDoorbell_HAL(pGpu, pIntr, gfid) intrTriggerPrivDoorbell_DISPATCH(pGpu, pIntr, gfid)
430 #define intrGetLocklessVectorsInRmSubtree_FNPTR(pIntr) pIntr->__intrGetLocklessVectorsInRmSubtree__
431 #define intrGetLocklessVectorsInRmSubtree(pGpu, pIntr, pInterruptVectors) intrGetLocklessVectorsInRmSubtree_DISPATCH(pGpu, pIntr, pInterruptVectors)
432 #define intrGetLocklessVectorsInRmSubtree_HAL(pGpu, pIntr, pInterruptVectors) intrGetLocklessVectorsInRmSubtree_DISPATCH(pGpu, pIntr, pInterruptVectors)
433 #define intrSetDisplayInterruptEnable_FNPTR(pIntr) pIntr->__intrSetDisplayInterruptEnable__
434 #define intrSetDisplayInterruptEnable(pGpu, pIntr, bEnable, pThreadState) intrSetDisplayInterruptEnable_DISPATCH(pGpu, pIntr, bEnable, pThreadState)
435 #define intrSetDisplayInterruptEnable_HAL(pGpu, pIntr, bEnable, pThreadState) intrSetDisplayInterruptEnable_DISPATCH(pGpu, pIntr, bEnable, pThreadState)
436 #define intrReadRegTopEnSet_FNPTR(pIntr) pIntr->__intrReadRegTopEnSet__
437 #define intrReadRegTopEnSet(pGpu, pIntr, arg3, arg4) intrReadRegTopEnSet_DISPATCH(pGpu, pIntr, arg3, arg4)
438 #define intrReadRegTopEnSet_HAL(pGpu, pIntr, arg3, arg4) intrReadRegTopEnSet_DISPATCH(pGpu, pIntr, arg3, arg4)
439 #define intrWriteRegTopEnSet_FNPTR(pIntr) pIntr->__intrWriteRegTopEnSet__
440 #define intrWriteRegTopEnSet(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegTopEnSet_DISPATCH(pGpu, pIntr, arg3, arg4, arg5)
441 #define intrWriteRegTopEnSet_HAL(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegTopEnSet_DISPATCH(pGpu, pIntr, arg3, arg4, arg5)
442 #define intrWriteRegTopEnClear_FNPTR(pIntr) pIntr->__intrWriteRegTopEnClear__
443 #define intrWriteRegTopEnClear(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegTopEnClear_DISPATCH(pGpu, pIntr, arg3, arg4, arg5)
444 #define intrWriteRegTopEnClear_HAL(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegTopEnClear_DISPATCH(pGpu, pIntr, arg3, arg4, arg5)
445 #define intrGetNumLeaves_FNPTR(pIntr) pIntr->__intrGetNumLeaves__
446 #define intrGetNumLeaves(pGpu, pIntr) intrGetNumLeaves_DISPATCH(pGpu, pIntr)
447 #define intrGetNumLeaves_HAL(pGpu, pIntr) intrGetNumLeaves_DISPATCH(pGpu, pIntr)
448 #define intrGetLeafSize_FNPTR(pIntr) pIntr->__intrGetLeafSize__
449 #define intrGetLeafSize(pGpu, pIntr) intrGetLeafSize_DISPATCH(pGpu, pIntr)
450 #define intrGetLeafSize_HAL(pGpu, pIntr) intrGetLeafSize_DISPATCH(pGpu, pIntr)
451 #define intrGetIntrTopNonStallMask_FNPTR(pIntr) pIntr->__intrGetIntrTopNonStallMask__
452 #define intrGetIntrTopNonStallMask(pGpu, pIntr) intrGetIntrTopNonStallMask_DISPATCH(pGpu, pIntr)
453 #define intrGetIntrTopNonStallMask_HAL(pGpu, pIntr) intrGetIntrTopNonStallMask_DISPATCH(pGpu, pIntr)
454 #define intrSanityCheckEngineIntrStallVector_FNPTR(pIntr) pIntr->__intrSanityCheckEngineIntrStallVector__
455 #define intrSanityCheckEngineIntrStallVector(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrStallVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
456 #define intrSanityCheckEngineIntrStallVector_HAL(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrStallVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
457 #define intrSanityCheckEngineIntrNotificationVector_FNPTR(pIntr) pIntr->__intrSanityCheckEngineIntrNotificationVector__
458 #define intrSanityCheckEngineIntrNotificationVector(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrNotificationVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
459 #define intrSanityCheckEngineIntrNotificationVector_HAL(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrNotificationVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
460 #define intrStateLoad_FNPTR(pIntr) pIntr->__intrStateLoad__
461 #define intrStateLoad(pGpu, pIntr, arg3) intrStateLoad_DISPATCH(pGpu, pIntr, arg3)
462 #define intrStateLoad_HAL(pGpu, pIntr, arg3) intrStateLoad_DISPATCH(pGpu, pIntr, arg3)
463 #define intrStateUnload_FNPTR(pIntr) pIntr->__intrStateUnload__
464 #define intrStateUnload(pGpu, pIntr, arg3) intrStateUnload_DISPATCH(pGpu, pIntr, arg3)
465 #define intrStateUnload_HAL(pGpu, pIntr, arg3) intrStateUnload_DISPATCH(pGpu, pIntr, arg3)
466 #define intrInitSubtreeMap_FNPTR(pIntr) pIntr->__intrInitSubtreeMap__
467 #define intrInitSubtreeMap(pGpu, pIntr) intrInitSubtreeMap_DISPATCH(pGpu, pIntr)
468 #define intrInitSubtreeMap_HAL(pGpu, pIntr) intrInitSubtreeMap_DISPATCH(pGpu, pIntr)
469 #define intrInitInterruptTable_FNPTR(pIntr) pIntr->__intrInitInterruptTable__
470 #define intrInitInterruptTable(pGpu, pIntr) intrInitInterruptTable_DISPATCH(pGpu, pIntr)
471 #define intrInitInterruptTable_HAL(pGpu, pIntr) intrInitInterruptTable_DISPATCH(pGpu, pIntr)
472 #define intrSetIntrMask_FNPTR(pIntr) pIntr->__intrSetIntrMask__
473 #define intrSetIntrMask(pGpu, pIntr, arg3, arg4) intrSetIntrMask_DISPATCH(pGpu, pIntr, arg3, arg4)
474 #define intrSetIntrMask_HAL(pGpu, pIntr, arg3, arg4) intrSetIntrMask_DISPATCH(pGpu, pIntr, arg3, arg4)
475 #define intrSetIntrEnInHw_FNPTR(pIntr) pIntr->__intrSetIntrEnInHw__
476 #define intrSetIntrEnInHw(pGpu, pIntr, arg3, arg4) intrSetIntrEnInHw_DISPATCH(pGpu, pIntr, arg3, arg4)
477 #define intrSetIntrEnInHw_HAL(pGpu, pIntr, arg3, arg4) intrSetIntrEnInHw_DISPATCH(pGpu, pIntr, arg3, arg4)
478 #define intrGetIntrEnFromHw_FNPTR(pIntr) pIntr->__intrGetIntrEnFromHw__
479 #define intrGetIntrEnFromHw(pGpu, pIntr, arg3) intrGetIntrEnFromHw_DISPATCH(pGpu, pIntr, arg3)
480 #define intrGetIntrEnFromHw_HAL(pGpu, pIntr, arg3) intrGetIntrEnFromHw_DISPATCH(pGpu, pIntr, arg3)
481 #define intrInitMissing_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateInitMissing__
482 #define intrInitMissing(pGpu, pEngstate) intrInitMissing_DISPATCH(pGpu, pEngstate)
483 #define intrStatePreInitUnlocked_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreInitUnlocked__
484 #define intrStatePreInitUnlocked(pGpu, pEngstate) intrStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
485 #define intrStatePreLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreLoad__
486 #define intrStatePreLoad(pGpu, pEngstate, arg3) intrStatePreLoad_DISPATCH(pGpu, pEngstate, arg3)
487 #define intrStatePostLoad_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostLoad__
488 #define intrStatePostLoad(pGpu, pEngstate, arg3) intrStatePostLoad_DISPATCH(pGpu, pEngstate, arg3)
489 #define intrStatePreUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePreUnload__
490 #define intrStatePreUnload(pGpu, pEngstate, arg3) intrStatePreUnload_DISPATCH(pGpu, pEngstate, arg3)
491 #define intrStatePostUnload_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateStatePostUnload__
492 #define intrStatePostUnload(pGpu, pEngstate, arg3) intrStatePostUnload_DISPATCH(pGpu, pEngstate, arg3)
493 #define intrIsPresent_FNPTR(pEngstate) pEngstate->__nvoc_base_OBJENGSTATE.__engstateIsPresent__
494 #define intrIsPresent(pGpu, pEngstate) intrIsPresent_DISPATCH(pGpu, pEngstate)
495
496 // Dispatch functions
intrConstructEngine_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,ENGDESCRIPTOR arg3)497 static inline NV_STATUS intrConstructEngine_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, ENGDESCRIPTOR arg3) {
498 return pIntr->__intrConstructEngine__(pGpu, pIntr, arg3);
499 }
500
intrStatePreInitLocked_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)501 static inline NV_STATUS intrStatePreInitLocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
502 return pIntr->__intrStatePreInitLocked__(pGpu, pIntr);
503 }
504
intrStateInitUnlocked_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)505 static inline NV_STATUS intrStateInitUnlocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
506 return pIntr->__intrStateInitUnlocked__(pGpu, pIntr);
507 }
508
intrStateInitLocked_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)509 static inline NV_STATUS intrStateInitLocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
510 return pIntr->__intrStateInitLocked__(pGpu, pIntr);
511 }
512
intrStateDestroy_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)513 static inline void intrStateDestroy_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
514 pIntr->__intrStateDestroy__(pGpu, pIntr);
515 }
516
intrDecodeStallIntrEn_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3)517 static inline NvU32 intrDecodeStallIntrEn_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3) {
518 return pIntr->__intrDecodeStallIntrEn__(pGpu, pIntr, arg3);
519 }
520
intrServiceVirtual_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)521 static inline void intrServiceVirtual_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
522 pIntr->__intrServiceVirtual__(pGpu, pIntr);
523 }
524
intrTriggerPrivDoorbell_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 gfid)525 static inline NV_STATUS intrTriggerPrivDoorbell_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
526 return pIntr->__intrTriggerPrivDoorbell__(pGpu, pIntr, gfid);
527 }
528
intrGetLocklessVectorsInRmSubtree_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 (* pInterruptVectors)[2])529 static inline void intrGetLocklessVectorsInRmSubtree_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 (*pInterruptVectors)[2]) {
530 pIntr->__intrGetLocklessVectorsInRmSubtree__(pGpu, pIntr, pInterruptVectors);
531 }
532
intrSetDisplayInterruptEnable_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvBool bEnable,struct THREAD_STATE_NODE * pThreadState)533 static inline void intrSetDisplayInterruptEnable_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState) {
534 pIntr->__intrSetDisplayInterruptEnable__(pGpu, pIntr, bEnable, pThreadState);
535 }
536
intrReadRegTopEnSet_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)537 static inline NvU32 intrReadRegTopEnSet_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
538 return pIntr->__intrReadRegTopEnSet__(pGpu, pIntr, arg3, arg4);
539 }
540
intrWriteRegTopEnSet_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)541 static inline void intrWriteRegTopEnSet_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
542 pIntr->__intrWriteRegTopEnSet__(pGpu, pIntr, arg3, arg4, arg5);
543 }
544
intrWriteRegTopEnClear_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)545 static inline void intrWriteRegTopEnClear_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
546 pIntr->__intrWriteRegTopEnClear__(pGpu, pIntr, arg3, arg4, arg5);
547 }
548
intrGetNumLeaves_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)549 static inline NvU32 intrGetNumLeaves_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
550 return pIntr->__intrGetNumLeaves__(pGpu, pIntr);
551 }
552
intrGetLeafSize_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)553 static inline NvU32 intrGetLeafSize_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
554 return pIntr->__intrGetLeafSize__(pGpu, pIntr);
555 }
556
intrGetIntrTopNonStallMask_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)557 static inline NvU64 intrGetIntrTopNonStallMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
558 return pIntr->__intrGetIntrTopNonStallMask__(pGpu, pIntr);
559 }
560
intrSanityCheckEngineIntrStallVector_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 vector,NvU16 mcEngine)561 static inline void intrSanityCheckEngineIntrStallVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
562 pIntr->__intrSanityCheckEngineIntrStallVector__(pGpu, pIntr, vector, mcEngine);
563 }
564
intrSanityCheckEngineIntrNotificationVector_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 vector,NvU16 mcEngine)565 static inline void intrSanityCheckEngineIntrNotificationVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
566 pIntr->__intrSanityCheckEngineIntrNotificationVector__(pGpu, pIntr, vector, mcEngine);
567 }
568
intrStateLoad_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3)569 static inline NV_STATUS intrStateLoad_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3) {
570 return pIntr->__intrStateLoad__(pGpu, pIntr, arg3);
571 }
572
intrStateUnload_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3)573 static inline NV_STATUS intrStateUnload_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3) {
574 return pIntr->__intrStateUnload__(pGpu, pIntr, arg3);
575 }
576
intrInitSubtreeMap_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)577 static inline NV_STATUS intrInitSubtreeMap_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
578 return pIntr->__intrInitSubtreeMap__(pGpu, pIntr);
579 }
580
intrInitInterruptTable_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr)581 static inline NV_STATUS intrInitInterruptTable_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
582 return pIntr->__intrInitInterruptTable__(pGpu, pIntr);
583 }
584
intrSetIntrMask_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)585 static inline NV_STATUS intrSetIntrMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
586 return pIntr->__intrSetIntrMask__(pGpu, pIntr, arg3, arg4);
587 }
588
intrSetIntrEnInHw_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)589 static inline void intrSetIntrEnInHw_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
590 pIntr->__intrSetIntrEnInHw__(pGpu, pIntr, arg3, arg4);
591 }
592
intrGetIntrEnFromHw_DISPATCH(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * arg3)593 static inline NvU32 intrGetIntrEnFromHw_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3) {
594 return pIntr->__intrGetIntrEnFromHw__(pGpu, pIntr, arg3);
595 }
596
intrInitMissing_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate)597 static inline void intrInitMissing_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate) {
598 pEngstate->__intrInitMissing__(pGpu, pEngstate);
599 }
600
intrStatePreInitUnlocked_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate)601 static inline NV_STATUS intrStatePreInitUnlocked_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate) {
602 return pEngstate->__intrStatePreInitUnlocked__(pGpu, pEngstate);
603 }
604
intrStatePreLoad_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate,NvU32 arg3)605 static inline NV_STATUS intrStatePreLoad_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate, NvU32 arg3) {
606 return pEngstate->__intrStatePreLoad__(pGpu, pEngstate, arg3);
607 }
608
intrStatePostLoad_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate,NvU32 arg3)609 static inline NV_STATUS intrStatePostLoad_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate, NvU32 arg3) {
610 return pEngstate->__intrStatePostLoad__(pGpu, pEngstate, arg3);
611 }
612
intrStatePreUnload_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate,NvU32 arg3)613 static inline NV_STATUS intrStatePreUnload_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate, NvU32 arg3) {
614 return pEngstate->__intrStatePreUnload__(pGpu, pEngstate, arg3);
615 }
616
intrStatePostUnload_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate,NvU32 arg3)617 static inline NV_STATUS intrStatePostUnload_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate, NvU32 arg3) {
618 return pEngstate->__intrStatePostUnload__(pGpu, pEngstate, arg3);
619 }
620
intrIsPresent_DISPATCH(struct OBJGPU * pGpu,struct Intr * pEngstate)621 static inline NvBool intrIsPresent_DISPATCH(struct OBJGPU *pGpu, struct Intr *pEngstate) {
622 return pEngstate->__intrIsPresent__(pGpu, pEngstate);
623 }
624
625 NV_STATUS intrCheckFecsEventbufferPending_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool *arg4);
626
627
628 #ifdef __nvoc_intr_h_disabled
intrCheckFecsEventbufferPending(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,NvBool * arg4)629 static inline NV_STATUS intrCheckFecsEventbufferPending(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool *arg4) {
630 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
631 return NV_ERR_NOT_SUPPORTED;
632 }
633 #else //__nvoc_intr_h_disabled
634 #define intrCheckFecsEventbufferPending(pGpu, pIntr, arg3, arg4) intrCheckFecsEventbufferPending_IMPL(pGpu, pIntr, arg3, arg4)
635 #endif //__nvoc_intr_h_disabled
636
637 #define intrCheckFecsEventbufferPending_HAL(pGpu, pIntr, arg3, arg4) intrCheckFecsEventbufferPending(pGpu, pIntr, arg3, arg4)
638
639 NV_STATUS intrCheckAndServiceFecsEventbuffer_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
640
641
642 #ifdef __nvoc_intr_h_disabled
intrCheckAndServiceFecsEventbuffer(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)643 static inline NV_STATUS intrCheckAndServiceFecsEventbuffer(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
644 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
645 return NV_ERR_NOT_SUPPORTED;
646 }
647 #else //__nvoc_intr_h_disabled
648 #define intrCheckAndServiceFecsEventbuffer(pGpu, pIntr, arg3, arg4) intrCheckAndServiceFecsEventbuffer_IMPL(pGpu, pIntr, arg3, arg4)
649 #endif //__nvoc_intr_h_disabled
650
651 #define intrCheckAndServiceFecsEventbuffer_HAL(pGpu, pIntr, arg3, arg4) intrCheckAndServiceFecsEventbuffer(pGpu, pIntr, arg3, arg4)
652
intrStateDestroyPhysical_56cd7a(OBJGPU * pGpu,struct Intr * pIntr)653 static inline NV_STATUS intrStateDestroyPhysical_56cd7a(OBJGPU *pGpu, struct Intr *pIntr) {
654 return NV_OK;
655 }
656
657 NV_STATUS intrStateDestroyPhysical_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
658
659
660 #ifdef __nvoc_intr_h_disabled
intrStateDestroyPhysical(OBJGPU * pGpu,struct Intr * pIntr)661 static inline NV_STATUS intrStateDestroyPhysical(OBJGPU *pGpu, struct Intr *pIntr) {
662 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
663 return NV_ERR_NOT_SUPPORTED;
664 }
665 #else //__nvoc_intr_h_disabled
666 #define intrStateDestroyPhysical(pGpu, pIntr) intrStateDestroyPhysical_56cd7a(pGpu, pIntr)
667 #endif //__nvoc_intr_h_disabled
668
669 #define intrStateDestroyPhysical_HAL(pGpu, pIntr) intrStateDestroyPhysical(pGpu, pIntr)
670
intrSetInterruptMaskBug1470153War_b3696a(OBJGPU * pGpu,struct Intr * pIntr)671 static inline void intrSetInterruptMaskBug1470153War_b3696a(OBJGPU *pGpu, struct Intr *pIntr) {
672 return;
673 }
674
675
676 #ifdef __nvoc_intr_h_disabled
intrSetInterruptMaskBug1470153War(OBJGPU * pGpu,struct Intr * pIntr)677 static inline void intrSetInterruptMaskBug1470153War(OBJGPU *pGpu, struct Intr *pIntr) {
678 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
679 }
680 #else //__nvoc_intr_h_disabled
681 #define intrSetInterruptMaskBug1470153War(pGpu, pIntr) intrSetInterruptMaskBug1470153War_b3696a(pGpu, pIntr)
682 #endif //__nvoc_intr_h_disabled
683
684 #define intrSetInterruptMaskBug1470153War_HAL(pGpu, pIntr) intrSetInterruptMaskBug1470153War(pGpu, pIntr)
685
686 NV_STATUS intrGetPendingNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
687
688
689 #ifdef __nvoc_intr_h_disabled
intrGetPendingNonStall(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)690 static inline NV_STATUS intrGetPendingNonStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
691 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
692 return NV_ERR_NOT_SUPPORTED;
693 }
694 #else //__nvoc_intr_h_disabled
695 #define intrGetPendingNonStall(pGpu, pIntr, arg3, arg4) intrGetPendingNonStall_TU102(pGpu, pIntr, arg3, arg4)
696 #endif //__nvoc_intr_h_disabled
697
698 #define intrGetPendingNonStall_HAL(pGpu, pIntr, arg3, arg4) intrGetPendingNonStall(pGpu, pIntr, arg3, arg4)
699
700 NV_STATUS intrServiceNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
701
702
703 #ifdef __nvoc_intr_h_disabled
intrServiceNonStall(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)704 static inline NV_STATUS intrServiceNonStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
705 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
706 return NV_ERR_NOT_SUPPORTED;
707 }
708 #else //__nvoc_intr_h_disabled
709 #define intrServiceNonStall(pGpu, pIntr, arg3, arg4) intrServiceNonStall_TU102(pGpu, pIntr, arg3, arg4)
710 #endif //__nvoc_intr_h_disabled
711
712 #define intrServiceNonStall_HAL(pGpu, pIntr, arg3, arg4) intrServiceNonStall(pGpu, pIntr, arg3, arg4)
713
714 NvU32 intrGetNonStallEnable_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3);
715
716
717 #ifdef __nvoc_intr_h_disabled
intrGetNonStallEnable(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * arg3)718 static inline NvU32 intrGetNonStallEnable(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3) {
719 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
720 return 0;
721 }
722 #else //__nvoc_intr_h_disabled
723 #define intrGetNonStallEnable(pGpu, pIntr, arg3) intrGetNonStallEnable_TU102(pGpu, pIntr, arg3)
724 #endif //__nvoc_intr_h_disabled
725
726 #define intrGetNonStallEnable_HAL(pGpu, pIntr, arg3) intrGetNonStallEnable(pGpu, pIntr, arg3)
727
728 void intrDisableNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3);
729
730
731 #ifdef __nvoc_intr_h_disabled
intrDisableNonStall(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * arg3)732 static inline void intrDisableNonStall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3) {
733 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
734 }
735 #else //__nvoc_intr_h_disabled
736 #define intrDisableNonStall(pGpu, pIntr, arg3) intrDisableNonStall_TU102(pGpu, pIntr, arg3)
737 #endif //__nvoc_intr_h_disabled
738
739 #define intrDisableNonStall_HAL(pGpu, pIntr, arg3) intrDisableNonStall(pGpu, pIntr, arg3)
740
741 void intrRestoreNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
742
743
744 #ifdef __nvoc_intr_h_disabled
intrRestoreNonStall(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)745 static inline void intrRestoreNonStall(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
746 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
747 }
748 #else //__nvoc_intr_h_disabled
749 #define intrRestoreNonStall(pGpu, pIntr, arg3, arg4) intrRestoreNonStall_TU102(pGpu, pIntr, arg3, arg4)
750 #endif //__nvoc_intr_h_disabled
751
752 #define intrRestoreNonStall_HAL(pGpu, pIntr, arg3, arg4) intrRestoreNonStall(pGpu, pIntr, arg3, arg4)
753
754 void intrGetStallInterruptMode_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *pIntrmode, NvBool *pPending);
755
756
757 #ifdef __nvoc_intr_h_disabled
intrGetStallInterruptMode(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * pIntrmode,NvBool * pPending)758 static inline void intrGetStallInterruptMode(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *pIntrmode, NvBool *pPending) {
759 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
760 }
761 #else //__nvoc_intr_h_disabled
762 #define intrGetStallInterruptMode(pGpu, pIntr, pIntrmode, pPending) intrGetStallInterruptMode_TU102(pGpu, pIntr, pIntrmode, pPending)
763 #endif //__nvoc_intr_h_disabled
764
765 #define intrGetStallInterruptMode_HAL(pGpu, pIntr, pIntrmode, pPending) intrGetStallInterruptMode(pGpu, pIntr, pIntrmode, pPending)
766
767 void intrEncodeStallIntrEn_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrEn, NvU32 *pIntrEnSet, NvU32 *pIntrEnClear);
768
769
770 #ifdef __nvoc_intr_h_disabled
intrEncodeStallIntrEn(OBJGPU * pGpu,struct Intr * pIntr,NvU32 intrEn,NvU32 * pIntrEnSet,NvU32 * pIntrEnClear)771 static inline void intrEncodeStallIntrEn(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrEn, NvU32 *pIntrEnSet, NvU32 *pIntrEnClear) {
772 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
773 }
774 #else //__nvoc_intr_h_disabled
775 #define intrEncodeStallIntrEn(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear) intrEncodeStallIntrEn_GP100(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear)
776 #endif //__nvoc_intr_h_disabled
777
778 #define intrEncodeStallIntrEn_HAL(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear) intrEncodeStallIntrEn(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear)
779
780 NV_STATUS intrCheckAndServiceNonReplayableFault_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3);
781
782
783 #ifdef __nvoc_intr_h_disabled
intrCheckAndServiceNonReplayableFault(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * arg3)784 static inline NV_STATUS intrCheckAndServiceNonReplayableFault(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3) {
785 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
786 return NV_ERR_NOT_SUPPORTED;
787 }
788 #else //__nvoc_intr_h_disabled
789 #define intrCheckAndServiceNonReplayableFault(pGpu, pIntr, arg3) intrCheckAndServiceNonReplayableFault_TU102(pGpu, pIntr, arg3)
790 #endif //__nvoc_intr_h_disabled
791
792 #define intrCheckAndServiceNonReplayableFault_HAL(pGpu, pIntr, arg3) intrCheckAndServiceNonReplayableFault(pGpu, pIntr, arg3)
793
intrGetStallBaseVector_4a4dee(OBJGPU * pGpu,struct Intr * pIntr)794 static inline NvU32 intrGetStallBaseVector_4a4dee(OBJGPU *pGpu, struct Intr *pIntr) {
795 return 0;
796 }
797
798 NvU32 intrGetStallBaseVector_TU102(OBJGPU *pGpu, struct Intr *pIntr);
799
intrGetStallBaseVector_c067f9(OBJGPU * pGpu,struct Intr * pIntr)800 static inline NvU32 intrGetStallBaseVector_c067f9(OBJGPU *pGpu, struct Intr *pIntr) {
801 NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
802 }
803
804
805 #ifdef __nvoc_intr_h_disabled
intrGetStallBaseVector(OBJGPU * pGpu,struct Intr * pIntr)806 static inline NvU32 intrGetStallBaseVector(OBJGPU *pGpu, struct Intr *pIntr) {
807 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
808 return 0;
809 }
810 #else //__nvoc_intr_h_disabled
811 #define intrGetStallBaseVector(pGpu, pIntr) intrGetStallBaseVector_4a4dee(pGpu, pIntr)
812 #endif //__nvoc_intr_h_disabled
813
814 #define intrGetStallBaseVector_HAL(pGpu, pIntr) intrGetStallBaseVector(pGpu, pIntr)
815
816 void intrEnableLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector);
817
818
819 #ifdef __nvoc_intr_h_disabled
intrEnableLeaf(OBJGPU * pGpu,struct Intr * pIntr,NvU32 intrVector)820 static inline void intrEnableLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector) {
821 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
822 }
823 #else //__nvoc_intr_h_disabled
824 #define intrEnableLeaf(pGpu, pIntr, intrVector) intrEnableLeaf_TU102(pGpu, pIntr, intrVector)
825 #endif //__nvoc_intr_h_disabled
826
827 #define intrEnableLeaf_HAL(pGpu, pIntr, intrVector) intrEnableLeaf(pGpu, pIntr, intrVector)
828
829 void intrDisableLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector);
830
831
832 #ifdef __nvoc_intr_h_disabled
intrDisableLeaf(OBJGPU * pGpu,struct Intr * pIntr,NvU32 intrVector)833 static inline void intrDisableLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector) {
834 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
835 }
836 #else //__nvoc_intr_h_disabled
837 #define intrDisableLeaf(pGpu, pIntr, intrVector) intrDisableLeaf_TU102(pGpu, pIntr, intrVector)
838 #endif //__nvoc_intr_h_disabled
839
840 #define intrDisableLeaf_HAL(pGpu, pIntr, intrVector) intrDisableLeaf(pGpu, pIntr, intrVector)
841
842 void intrEnableTopNonstall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState);
843
844
845 #ifdef __nvoc_intr_h_disabled
intrEnableTopNonstall(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * pThreadState)846 static inline void intrEnableTopNonstall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState) {
847 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
848 }
849 #else //__nvoc_intr_h_disabled
850 #define intrEnableTopNonstall(pGpu, pIntr, pThreadState) intrEnableTopNonstall_TU102(pGpu, pIntr, pThreadState)
851 #endif //__nvoc_intr_h_disabled
852
853 #define intrEnableTopNonstall_HAL(pGpu, pIntr, pThreadState) intrEnableTopNonstall(pGpu, pIntr, pThreadState)
854
855 void intrDisableTopNonstall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState);
856
857
858 #ifdef __nvoc_intr_h_disabled
intrDisableTopNonstall(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * pThreadState)859 static inline void intrDisableTopNonstall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState) {
860 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
861 }
862 #else //__nvoc_intr_h_disabled
863 #define intrDisableTopNonstall(pGpu, pIntr, pThreadState) intrDisableTopNonstall_TU102(pGpu, pIntr, pThreadState)
864 #endif //__nvoc_intr_h_disabled
865
866 #define intrDisableTopNonstall_HAL(pGpu, pIntr, pThreadState) intrDisableTopNonstall(pGpu, pIntr, pThreadState)
867
868 void intrSetStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrType, struct THREAD_STATE_NODE *pThreadState);
869
870
871 #ifdef __nvoc_intr_h_disabled
intrSetStall(OBJGPU * pGpu,struct Intr * pIntr,NvU32 intrType,struct THREAD_STATE_NODE * pThreadState)872 static inline void intrSetStall(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrType, struct THREAD_STATE_NODE *pThreadState) {
873 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
874 }
875 #else //__nvoc_intr_h_disabled
876 #define intrSetStall(pGpu, pIntr, intrType, pThreadState) intrSetStall_TU102(pGpu, pIntr, intrType, pThreadState)
877 #endif //__nvoc_intr_h_disabled
878
879 #define intrSetStall_HAL(pGpu, pIntr, intrType, pThreadState) intrSetStall(pGpu, pIntr, intrType, pThreadState)
880
881 void intrClearLeafVector_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
882
883
884 #ifdef __nvoc_intr_h_disabled
intrClearLeafVector(OBJGPU * pGpu,struct Intr * pIntr,NvU32 vector,struct THREAD_STATE_NODE * pThreadState)885 static inline void intrClearLeafVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
886 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
887 }
888 #else //__nvoc_intr_h_disabled
889 #define intrClearLeafVector(pGpu, pIntr, vector, pThreadState) intrClearLeafVector_TU102(pGpu, pIntr, vector, pThreadState)
890 #endif //__nvoc_intr_h_disabled
891
892 #define intrClearLeafVector_HAL(pGpu, pIntr, vector, pThreadState) intrClearLeafVector(pGpu, pIntr, vector, pThreadState)
893
894 NvBool intrIsVectorPending_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
895
896
897 #ifdef __nvoc_intr_h_disabled
intrIsVectorPending(OBJGPU * pGpu,struct Intr * pIntr,NvU32 vector,struct THREAD_STATE_NODE * pThreadState)898 static inline NvBool intrIsVectorPending(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
899 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
900 return NV_FALSE;
901 }
902 #else //__nvoc_intr_h_disabled
903 #define intrIsVectorPending(pGpu, pIntr, vector, pThreadState) intrIsVectorPending_TU102(pGpu, pIntr, vector, pThreadState)
904 #endif //__nvoc_intr_h_disabled
905
906 #define intrIsVectorPending_HAL(pGpu, pIntr, vector, pThreadState) intrIsVectorPending(pGpu, pIntr, vector, pThreadState)
907
908 NV_STATUS intrSetStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
909
910
911 #ifdef __nvoc_intr_h_disabled
intrSetStallSWIntr(OBJGPU * pGpu,struct Intr * pIntr)912 static inline NV_STATUS intrSetStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
913 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
914 return NV_ERR_NOT_SUPPORTED;
915 }
916 #else //__nvoc_intr_h_disabled
917 #define intrSetStallSWIntr(pGpu, pIntr) intrSetStallSWIntr_TU102(pGpu, pIntr)
918 #endif //__nvoc_intr_h_disabled
919
920 #define intrSetStallSWIntr_HAL(pGpu, pIntr) intrSetStallSWIntr(pGpu, pIntr)
921
922 NV_STATUS intrClearStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
923
924
925 #ifdef __nvoc_intr_h_disabled
intrClearStallSWIntr(OBJGPU * pGpu,struct Intr * pIntr)926 static inline NV_STATUS intrClearStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
927 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
928 return NV_ERR_NOT_SUPPORTED;
929 }
930 #else //__nvoc_intr_h_disabled
931 #define intrClearStallSWIntr(pGpu, pIntr) intrClearStallSWIntr_TU102(pGpu, pIntr)
932 #endif //__nvoc_intr_h_disabled
933
934 #define intrClearStallSWIntr_HAL(pGpu, pIntr) intrClearStallSWIntr(pGpu, pIntr)
935
936 void intrEnableStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
937
938
939 #ifdef __nvoc_intr_h_disabled
intrEnableStallSWIntr(OBJGPU * pGpu,struct Intr * pIntr)940 static inline void intrEnableStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
941 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
942 }
943 #else //__nvoc_intr_h_disabled
944 #define intrEnableStallSWIntr(pGpu, pIntr) intrEnableStallSWIntr_TU102(pGpu, pIntr)
945 #endif //__nvoc_intr_h_disabled
946
947 #define intrEnableStallSWIntr_HAL(pGpu, pIntr) intrEnableStallSWIntr(pGpu, pIntr)
948
949 void intrDisableStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
950
951
952 #ifdef __nvoc_intr_h_disabled
intrDisableStallSWIntr(OBJGPU * pGpu,struct Intr * pIntr)953 static inline void intrDisableStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
954 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
955 }
956 #else //__nvoc_intr_h_disabled
957 #define intrDisableStallSWIntr(pGpu, pIntr) intrDisableStallSWIntr_TU102(pGpu, pIntr)
958 #endif //__nvoc_intr_h_disabled
959
960 #define intrDisableStallSWIntr_HAL(pGpu, pIntr) intrDisableStallSWIntr(pGpu, pIntr)
961
intrResetIntrRegistersForVF_b3696a(OBJGPU * pGpu,struct Intr * pIntr,NvU32 gfid)962 static inline void intrResetIntrRegistersForVF_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
963 return;
964 }
965
966 void intrResetIntrRegistersForVF_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
967
968
969 #ifdef __nvoc_intr_h_disabled
intrResetIntrRegistersForVF(OBJGPU * pGpu,struct Intr * pIntr,NvU32 gfid)970 static inline void intrResetIntrRegistersForVF(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
971 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
972 }
973 #else //__nvoc_intr_h_disabled
974 #define intrResetIntrRegistersForVF(pGpu, pIntr, gfid) intrResetIntrRegistersForVF_b3696a(pGpu, pIntr, gfid)
975 #endif //__nvoc_intr_h_disabled
976
977 #define intrResetIntrRegistersForVF_HAL(pGpu, pIntr, gfid) intrResetIntrRegistersForVF(pGpu, pIntr, gfid)
978
intrSaveIntrRegValue_46f6a7(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 * arg4,NvU32 * arg5)979 static inline NV_STATUS intrSaveIntrRegValue_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 *arg4, NvU32 *arg5) {
980 return NV_ERR_NOT_SUPPORTED;
981 }
982
983 NV_STATUS intrSaveIntrRegValue_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 *arg4, NvU32 *arg5);
984
985
986 #ifdef __nvoc_intr_h_disabled
intrSaveIntrRegValue(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 * arg4,NvU32 * arg5)987 static inline NV_STATUS intrSaveIntrRegValue(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 *arg4, NvU32 *arg5) {
988 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
989 return NV_ERR_NOT_SUPPORTED;
990 }
991 #else //__nvoc_intr_h_disabled
992 #define intrSaveIntrRegValue(pGpu, pIntr, arg3, arg4, arg5) intrSaveIntrRegValue_46f6a7(pGpu, pIntr, arg3, arg4, arg5)
993 #endif //__nvoc_intr_h_disabled
994
995 #define intrSaveIntrRegValue_HAL(pGpu, pIntr, arg3, arg4, arg5) intrSaveIntrRegValue(pGpu, pIntr, arg3, arg4, arg5)
996
intrRestoreIntrRegValue_46f6a7(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,NvU32 * arg5)997 static inline NV_STATUS intrRestoreIntrRegValue_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, NvU32 *arg5) {
998 return NV_ERR_NOT_SUPPORTED;
999 }
1000
1001 NV_STATUS intrRestoreIntrRegValue_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, NvU32 *arg5);
1002
1003
1004 #ifdef __nvoc_intr_h_disabled
intrRestoreIntrRegValue(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,NvU32 * arg5)1005 static inline NV_STATUS intrRestoreIntrRegValue(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, NvU32 *arg5) {
1006 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1007 return NV_ERR_NOT_SUPPORTED;
1008 }
1009 #else //__nvoc_intr_h_disabled
1010 #define intrRestoreIntrRegValue(pGpu, pIntr, arg3, arg4, arg5) intrRestoreIntrRegValue_46f6a7(pGpu, pIntr, arg3, arg4, arg5)
1011 #endif //__nvoc_intr_h_disabled
1012
1013 #define intrRestoreIntrRegValue_HAL(pGpu, pIntr, arg3, arg4, arg5) intrRestoreIntrRegValue(pGpu, pIntr, arg3, arg4, arg5)
1014
intrTriggerCpuDoorbellForVF_46f6a7(OBJGPU * pGpu,struct Intr * pIntr,NvU32 gfid)1015 static inline NV_STATUS intrTriggerCpuDoorbellForVF_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
1016 return NV_ERR_NOT_SUPPORTED;
1017 }
1018
1019 NV_STATUS intrTriggerCpuDoorbellForVF_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
1020
1021
1022 #ifdef __nvoc_intr_h_disabled
intrTriggerCpuDoorbellForVF(OBJGPU * pGpu,struct Intr * pIntr,NvU32 gfid)1023 static inline NV_STATUS intrTriggerCpuDoorbellForVF(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
1024 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1025 return NV_ERR_NOT_SUPPORTED;
1026 }
1027 #else //__nvoc_intr_h_disabled
1028 #define intrTriggerCpuDoorbellForVF(pGpu, pIntr, gfid) intrTriggerCpuDoorbellForVF_46f6a7(pGpu, pIntr, gfid)
1029 #endif //__nvoc_intr_h_disabled
1030
1031 #define intrTriggerCpuDoorbellForVF_HAL(pGpu, pIntr, gfid) intrTriggerCpuDoorbellForVF(pGpu, pIntr, gfid)
1032
1033 void intrRetriggerTopLevel_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1034
1035
1036 #ifdef __nvoc_intr_h_disabled
intrRetriggerTopLevel(OBJGPU * pGpu,struct Intr * pIntr)1037 static inline void intrRetriggerTopLevel(OBJGPU *pGpu, struct Intr *pIntr) {
1038 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1039 }
1040 #else //__nvoc_intr_h_disabled
1041 #define intrRetriggerTopLevel(pGpu, pIntr) intrRetriggerTopLevel_TU102(pGpu, pIntr)
1042 #endif //__nvoc_intr_h_disabled
1043
1044 #define intrRetriggerTopLevel_HAL(pGpu, pIntr) intrRetriggerTopLevel(pGpu, pIntr)
1045
1046 NV_STATUS intrGetLeafStatus_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, struct THREAD_STATE_NODE *arg4);
1047
1048
1049 #ifdef __nvoc_intr_h_disabled
intrGetLeafStatus(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,struct THREAD_STATE_NODE * arg4)1050 static inline NV_STATUS intrGetLeafStatus(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, struct THREAD_STATE_NODE *arg4) {
1051 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1052 return NV_ERR_NOT_SUPPORTED;
1053 }
1054 #else //__nvoc_intr_h_disabled
1055 #define intrGetLeafStatus(pGpu, pIntr, arg3, arg4) intrGetLeafStatus_TU102(pGpu, pIntr, arg3, arg4)
1056 #endif //__nvoc_intr_h_disabled
1057
1058 #define intrGetLeafStatus_HAL(pGpu, pIntr, arg3, arg4) intrGetLeafStatus(pGpu, pIntr, arg3, arg4)
1059
1060 NV_STATUS intrGetPendingDisplayIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines, struct THREAD_STATE_NODE *pThreadState);
1061
1062
1063 #ifdef __nvoc_intr_h_disabled
intrGetPendingDisplayIntr(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * pEngines,struct THREAD_STATE_NODE * pThreadState)1064 static inline NV_STATUS intrGetPendingDisplayIntr(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines, struct THREAD_STATE_NODE *pThreadState) {
1065 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1066 return NV_ERR_NOT_SUPPORTED;
1067 }
1068 #else //__nvoc_intr_h_disabled
1069 #define intrGetPendingDisplayIntr(pGpu, pIntr, pEngines, pThreadState) intrGetPendingDisplayIntr_TU102(pGpu, pIntr, pEngines, pThreadState)
1070 #endif //__nvoc_intr_h_disabled
1071
1072 #define intrGetPendingDisplayIntr_HAL(pGpu, pIntr, pEngines, pThreadState) intrGetPendingDisplayIntr(pGpu, pIntr, pEngines, pThreadState)
1073
1074 void intrDumpState_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1075
1076
1077 #ifdef __nvoc_intr_h_disabled
intrDumpState(OBJGPU * pGpu,struct Intr * pIntr)1078 static inline void intrDumpState(OBJGPU *pGpu, struct Intr *pIntr) {
1079 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1080 }
1081 #else //__nvoc_intr_h_disabled
1082 #define intrDumpState(pGpu, pIntr) intrDumpState_TU102(pGpu, pIntr)
1083 #endif //__nvoc_intr_h_disabled
1084
1085 #define intrDumpState_HAL(pGpu, pIntr) intrDumpState(pGpu, pIntr)
1086
1087 NV_STATUS intrCacheIntrFields_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1088
1089
1090 #ifdef __nvoc_intr_h_disabled
intrCacheIntrFields(OBJGPU * pGpu,struct Intr * pIntr)1091 static inline NV_STATUS intrCacheIntrFields(OBJGPU *pGpu, struct Intr *pIntr) {
1092 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1093 return NV_ERR_NOT_SUPPORTED;
1094 }
1095 #else //__nvoc_intr_h_disabled
1096 #define intrCacheIntrFields(pGpu, pIntr) intrCacheIntrFields_TU102(pGpu, pIntr)
1097 #endif //__nvoc_intr_h_disabled
1098
1099 #define intrCacheIntrFields_HAL(pGpu, pIntr) intrCacheIntrFields(pGpu, pIntr)
1100
1101 NvU32 intrReadRegLeafEnSet_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1102
1103 NvU32 intrReadRegLeafEnSet_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1104
1105
1106 #ifdef __nvoc_intr_h_disabled
intrReadRegLeafEnSet(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)1107 static inline NvU32 intrReadRegLeafEnSet(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
1108 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1109 return 0;
1110 }
1111 #else //__nvoc_intr_h_disabled
1112 #define intrReadRegLeafEnSet(pGpu, pIntr, arg3, arg4) intrReadRegLeafEnSet_CPU_TU102(pGpu, pIntr, arg3, arg4)
1113 #endif //__nvoc_intr_h_disabled
1114
1115 #define intrReadRegLeafEnSet_HAL(pGpu, pIntr, arg3, arg4) intrReadRegLeafEnSet(pGpu, pIntr, arg3, arg4)
1116
1117 NvU32 intrReadRegLeaf_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1118
1119 NvU32 intrReadRegLeaf_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1120
1121
1122 #ifdef __nvoc_intr_h_disabled
intrReadRegLeaf(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)1123 static inline NvU32 intrReadRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
1124 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1125 return 0;
1126 }
1127 #else //__nvoc_intr_h_disabled
1128 #define intrReadRegLeaf(pGpu, pIntr, arg3, arg4) intrReadRegLeaf_CPU_TU102(pGpu, pIntr, arg3, arg4)
1129 #endif //__nvoc_intr_h_disabled
1130
1131 #define intrReadRegLeaf_HAL(pGpu, pIntr, arg3, arg4) intrReadRegLeaf(pGpu, pIntr, arg3, arg4)
1132
1133 NvU32 intrReadRegTop_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1134
1135 NvU32 intrReadRegTop_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1136
1137
1138 #ifdef __nvoc_intr_h_disabled
intrReadRegTop(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)1139 static inline NvU32 intrReadRegTop(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
1140 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1141 return 0;
1142 }
1143 #else //__nvoc_intr_h_disabled
1144 #define intrReadRegTop(pGpu, pIntr, arg3, arg4) intrReadRegTop_CPU_TU102(pGpu, pIntr, arg3, arg4)
1145 #endif //__nvoc_intr_h_disabled
1146
1147 #define intrReadRegTop_HAL(pGpu, pIntr, arg3, arg4) intrReadRegTop(pGpu, pIntr, arg3, arg4)
1148
1149 void intrWriteRegLeafEnSet_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1150
1151 void intrWriteRegLeafEnSet_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1152
1153
1154 #ifdef __nvoc_intr_h_disabled
intrWriteRegLeafEnSet(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)1155 static inline void intrWriteRegLeafEnSet(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
1156 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1157 }
1158 #else //__nvoc_intr_h_disabled
1159 #define intrWriteRegLeafEnSet(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegLeafEnSet_CPU_TU102(pGpu, pIntr, arg3, arg4, arg5)
1160 #endif //__nvoc_intr_h_disabled
1161
1162 #define intrWriteRegLeafEnSet_HAL(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegLeafEnSet(pGpu, pIntr, arg3, arg4, arg5)
1163
1164 void intrWriteRegLeafEnClear_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1165
1166 void intrWriteRegLeafEnClear_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1167
1168
1169 #ifdef __nvoc_intr_h_disabled
intrWriteRegLeafEnClear(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)1170 static inline void intrWriteRegLeafEnClear(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
1171 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1172 }
1173 #else //__nvoc_intr_h_disabled
1174 #define intrWriteRegLeafEnClear(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegLeafEnClear_CPU_TU102(pGpu, pIntr, arg3, arg4, arg5)
1175 #endif //__nvoc_intr_h_disabled
1176
1177 #define intrWriteRegLeafEnClear_HAL(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegLeafEnClear(pGpu, pIntr, arg3, arg4, arg5)
1178
1179 void intrWriteRegLeaf_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1180
1181 void intrWriteRegLeaf_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1182
1183
1184 #ifdef __nvoc_intr_h_disabled
intrWriteRegLeaf(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 arg4,struct THREAD_STATE_NODE * arg5)1185 static inline void intrWriteRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5) {
1186 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1187 }
1188 #else //__nvoc_intr_h_disabled
1189 #define intrWriteRegLeaf(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegLeaf_CPU_TU102(pGpu, pIntr, arg3, arg4, arg5)
1190 #endif //__nvoc_intr_h_disabled
1191
1192 #define intrWriteRegLeaf_HAL(pGpu, pIntr, arg3, arg4, arg5) intrWriteRegLeaf(pGpu, pIntr, arg3, arg4, arg5)
1193
intrRouteInterruptsToSystemFirmware_56cd7a(OBJGPU * pGpu,struct Intr * pIntr,NvBool bEnable)1194 static inline NV_STATUS intrRouteInterruptsToSystemFirmware_56cd7a(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable) {
1195 return NV_OK;
1196 }
1197
1198 NV_STATUS intrRouteInterruptsToSystemFirmware_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable);
1199
1200 NV_STATUS intrRouteInterruptsToSystemFirmware_GB100(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable);
1201
1202
1203 #ifdef __nvoc_intr_h_disabled
intrRouteInterruptsToSystemFirmware(OBJGPU * pGpu,struct Intr * pIntr,NvBool bEnable)1204 static inline NV_STATUS intrRouteInterruptsToSystemFirmware(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable) {
1205 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1206 return NV_ERR_NOT_SUPPORTED;
1207 }
1208 #else //__nvoc_intr_h_disabled
1209 #define intrRouteInterruptsToSystemFirmware(pGpu, pIntr, bEnable) intrRouteInterruptsToSystemFirmware_56cd7a(pGpu, pIntr, bEnable)
1210 #endif //__nvoc_intr_h_disabled
1211
1212 #define intrRouteInterruptsToSystemFirmware_HAL(pGpu, pIntr, bEnable) intrRouteInterruptsToSystemFirmware(pGpu, pIntr, bEnable)
1213
intrInitDynamicInterruptTable_5baef9(OBJGPU * pGpu,struct Intr * pIntr,struct OBJFIFO * arg3,InterruptTable * arg4,NvU32 initFlags)1214 static inline NV_STATUS intrInitDynamicInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, struct OBJFIFO *arg3, InterruptTable *arg4, NvU32 initFlags) {
1215 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1216 }
1217
1218
1219 #ifdef __nvoc_intr_h_disabled
intrInitDynamicInterruptTable(OBJGPU * pGpu,struct Intr * pIntr,struct OBJFIFO * arg3,InterruptTable * arg4,NvU32 initFlags)1220 static inline NV_STATUS intrInitDynamicInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, struct OBJFIFO *arg3, InterruptTable *arg4, NvU32 initFlags) {
1221 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1222 return NV_ERR_NOT_SUPPORTED;
1223 }
1224 #else //__nvoc_intr_h_disabled
1225 #define intrInitDynamicInterruptTable(pGpu, pIntr, arg3, arg4, initFlags) intrInitDynamicInterruptTable_5baef9(pGpu, pIntr, arg3, arg4, initFlags)
1226 #endif //__nvoc_intr_h_disabled
1227
1228 #define intrInitDynamicInterruptTable_HAL(pGpu, pIntr, arg3, arg4, initFlags) intrInitDynamicInterruptTable(pGpu, pIntr, arg3, arg4, initFlags)
1229
intrInitAnyInterruptTable_5baef9(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pIntrTable,NvU32 initFlags)1230 static inline NV_STATUS intrInitAnyInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1231 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1232 }
1233
1234
1235 #ifdef __nvoc_intr_h_disabled
intrInitAnyInterruptTable(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pIntrTable,NvU32 initFlags)1236 static inline NV_STATUS intrInitAnyInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1237 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1238 return NV_ERR_NOT_SUPPORTED;
1239 }
1240 #else //__nvoc_intr_h_disabled
1241 #define intrInitAnyInterruptTable(pGpu, pIntr, pIntrTable, initFlags) intrInitAnyInterruptTable_5baef9(pGpu, pIntr, pIntrTable, initFlags)
1242 #endif //__nvoc_intr_h_disabled
1243
1244 #define intrInitAnyInterruptTable_HAL(pGpu, pIntr, pIntrTable, initFlags) intrInitAnyInterruptTable(pGpu, pIntr, pIntrTable, initFlags)
1245
1246 NV_STATUS intrGetInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable **ppIntrTable);
1247
1248
1249 #ifdef __nvoc_intr_h_disabled
intrGetInterruptTable(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable ** ppIntrTable)1250 static inline NV_STATUS intrGetInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable **ppIntrTable) {
1251 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1252 return NV_ERR_NOT_SUPPORTED;
1253 }
1254 #else //__nvoc_intr_h_disabled
1255 #define intrGetInterruptTable(pGpu, pIntr, ppIntrTable) intrGetInterruptTable_IMPL(pGpu, pIntr, ppIntrTable)
1256 #endif //__nvoc_intr_h_disabled
1257
1258 #define intrGetInterruptTable_HAL(pGpu, pIntr, ppIntrTable) intrGetInterruptTable(pGpu, pIntr, ppIntrTable)
1259
1260 NV_STATUS intrDestroyInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1261
1262
1263 #ifdef __nvoc_intr_h_disabled
intrDestroyInterruptTable(OBJGPU * pGpu,struct Intr * pIntr)1264 static inline NV_STATUS intrDestroyInterruptTable(OBJGPU *pGpu, struct Intr *pIntr) {
1265 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1266 return NV_ERR_NOT_SUPPORTED;
1267 }
1268 #else //__nvoc_intr_h_disabled
1269 #define intrDestroyInterruptTable(pGpu, pIntr) intrDestroyInterruptTable_IMPL(pGpu, pIntr)
1270 #endif //__nvoc_intr_h_disabled
1271
1272 #define intrDestroyInterruptTable_HAL(pGpu, pIntr) intrDestroyInterruptTable(pGpu, pIntr)
1273
intrGetStaticVFmcEngines_5baef9(OBJGPU * pGpu,struct Intr * pIntr,NvU16 ** ppMcEngines,NvU32 * pCount)1274 static inline NV_STATUS intrGetStaticVFmcEngines_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount) {
1275 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1276 }
1277
1278 NV_STATUS intrGetStaticVFmcEngines_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount);
1279
1280 NV_STATUS intrGetStaticVFmcEngines_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount);
1281
1282
1283 #ifdef __nvoc_intr_h_disabled
intrGetStaticVFmcEngines(OBJGPU * pGpu,struct Intr * pIntr,NvU16 ** ppMcEngines,NvU32 * pCount)1284 static inline NV_STATUS intrGetStaticVFmcEngines(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount) {
1285 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1286 return NV_ERR_NOT_SUPPORTED;
1287 }
1288 #else //__nvoc_intr_h_disabled
1289 #define intrGetStaticVFmcEngines(pGpu, pIntr, ppMcEngines, pCount) intrGetStaticVFmcEngines_5baef9(pGpu, pIntr, ppMcEngines, pCount)
1290 #endif //__nvoc_intr_h_disabled
1291
1292 #define intrGetStaticVFmcEngines_HAL(pGpu, pIntr, ppMcEngines, pCount) intrGetStaticVFmcEngines(pGpu, pIntr, ppMcEngines, pCount)
1293
intrGetStaticInterruptTable_5baef9(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pTable,NvU32 initFlags)1294 static inline NV_STATUS intrGetStaticInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags) {
1295 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1296 }
1297
1298 NV_STATUS intrGetStaticInterruptTable_TU102(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1299
1300 NV_STATUS intrGetStaticInterruptTable_GA100(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1301
1302 NV_STATUS intrGetStaticInterruptTable_GA102(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1303
1304 NV_STATUS intrGetStaticInterruptTable_GH100(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1305
1306
1307 #ifdef __nvoc_intr_h_disabled
intrGetStaticInterruptTable(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pTable,NvU32 initFlags)1308 static inline NV_STATUS intrGetStaticInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags) {
1309 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1310 return NV_ERR_NOT_SUPPORTED;
1311 }
1312 #else //__nvoc_intr_h_disabled
1313 #define intrGetStaticInterruptTable(pGpu, pIntr, pTable, initFlags) intrGetStaticInterruptTable_5baef9(pGpu, pIntr, pTable, initFlags)
1314 #endif //__nvoc_intr_h_disabled
1315
1316 #define intrGetStaticInterruptTable_HAL(pGpu, pIntr, pTable, initFlags) intrGetStaticInterruptTable(pGpu, pIntr, pTable, initFlags)
1317
intrInitGPUHostInterruptTable_5baef9(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pIntrTable,NvU32 initFlags)1318 static inline NV_STATUS intrInitGPUHostInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1319 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1320 }
1321
1322 NV_STATUS intrInitGPUHostInterruptTable_GM107(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags);
1323
1324 NV_STATUS intrInitGPUHostInterruptTable_GA100(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags);
1325
1326
1327 #ifdef __nvoc_intr_h_disabled
intrInitGPUHostInterruptTable(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pIntrTable,NvU32 initFlags)1328 static inline NV_STATUS intrInitGPUHostInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1329 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1330 return NV_ERR_NOT_SUPPORTED;
1331 }
1332 #else //__nvoc_intr_h_disabled
1333 #define intrInitGPUHostInterruptTable(pGpu, pIntr, pIntrTable, initFlags) intrInitGPUHostInterruptTable_5baef9(pGpu, pIntr, pIntrTable, initFlags)
1334 #endif //__nvoc_intr_h_disabled
1335
1336 #define intrInitGPUHostInterruptTable_HAL(pGpu, pIntr, pIntrTable, initFlags) intrInitGPUHostInterruptTable(pGpu, pIntr, pIntrTable, initFlags)
1337
intrInitEngineSchedInterruptTable_5baef9(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pIntrTable)1338 static inline NV_STATUS intrInitEngineSchedInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable) {
1339 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1340 }
1341
1342
1343 #ifdef __nvoc_intr_h_disabled
intrInitEngineSchedInterruptTable(OBJGPU * pGpu,struct Intr * pIntr,InterruptTable * pIntrTable)1344 static inline NV_STATUS intrInitEngineSchedInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable) {
1345 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1346 return NV_ERR_NOT_SUPPORTED;
1347 }
1348 #else //__nvoc_intr_h_disabled
1349 #define intrInitEngineSchedInterruptTable(pGpu, pIntr, pIntrTable) intrInitEngineSchedInterruptTable_5baef9(pGpu, pIntr, pIntrTable)
1350 #endif //__nvoc_intr_h_disabled
1351
1352 #define intrInitEngineSchedInterruptTable_HAL(pGpu, pIntr, pIntrTable) intrInitEngineSchedInterruptTable(pGpu, pIntr, pIntrTable)
1353
1354 void intrServiceStall_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1355
1356
1357 #ifdef __nvoc_intr_h_disabled
intrServiceStall(OBJGPU * pGpu,struct Intr * pIntr)1358 static inline void intrServiceStall(OBJGPU *pGpu, struct Intr *pIntr) {
1359 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1360 }
1361 #else //__nvoc_intr_h_disabled
1362 #define intrServiceStall(pGpu, pIntr) intrServiceStall_IMPL(pGpu, pIntr)
1363 #endif //__nvoc_intr_h_disabled
1364
1365 #define intrServiceStall_HAL(pGpu, pIntr) intrServiceStall(pGpu, pIntr)
1366
1367 void intrServiceStallList_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool arg4);
1368
1369
1370 #ifdef __nvoc_intr_h_disabled
intrServiceStallList(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,NvBool arg4)1371 static inline void intrServiceStallList(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool arg4) {
1372 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1373 }
1374 #else //__nvoc_intr_h_disabled
1375 #define intrServiceStallList(pGpu, pIntr, arg3, arg4) intrServiceStallList_IMPL(pGpu, pIntr, arg3, arg4)
1376 #endif //__nvoc_intr_h_disabled
1377
1378 #define intrServiceStallList_HAL(pGpu, pIntr, arg3, arg4) intrServiceStallList(pGpu, pIntr, arg3, arg4)
1379
1380 void intrServiceStallSingle_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg3, NvBool arg4);
1381
1382
1383 #ifdef __nvoc_intr_h_disabled
intrServiceStallSingle(OBJGPU * pGpu,struct Intr * pIntr,NvU16 arg3,NvBool arg4)1384 static inline void intrServiceStallSingle(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg3, NvBool arg4) {
1385 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1386 }
1387 #else //__nvoc_intr_h_disabled
1388 #define intrServiceStallSingle(pGpu, pIntr, arg3, arg4) intrServiceStallSingle_IMPL(pGpu, pIntr, arg3, arg4)
1389 #endif //__nvoc_intr_h_disabled
1390
1391 #define intrServiceStallSingle_HAL(pGpu, pIntr, arg3, arg4) intrServiceStallSingle(pGpu, pIntr, arg3, arg4)
1392
1393 void intrProcessDPCQueue_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1394
1395
1396 #ifdef __nvoc_intr_h_disabled
intrProcessDPCQueue(OBJGPU * pGpu,struct Intr * pIntr)1397 static inline void intrProcessDPCQueue(OBJGPU *pGpu, struct Intr *pIntr) {
1398 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1399 }
1400 #else //__nvoc_intr_h_disabled
1401 #define intrProcessDPCQueue(pGpu, pIntr) intrProcessDPCQueue_IMPL(pGpu, pIntr)
1402 #endif //__nvoc_intr_h_disabled
1403
1404 #define intrProcessDPCQueue_HAL(pGpu, pIntr) intrProcessDPCQueue(pGpu, pIntr)
1405
1406 NV_STATUS intrGetIntrMask_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
1407
1408
1409 #ifdef __nvoc_intr_h_disabled
intrGetIntrMask(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)1410 static inline NV_STATUS intrGetIntrMask(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
1411 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1412 return NV_ERR_NOT_SUPPORTED;
1413 }
1414 #else //__nvoc_intr_h_disabled
1415 #define intrGetIntrMask(pGpu, pIntr, arg3, arg4) intrGetIntrMask_GP100(pGpu, pIntr, arg3, arg4)
1416 #endif //__nvoc_intr_h_disabled
1417
1418 #define intrGetIntrMask_HAL(pGpu, pIntr, arg3, arg4) intrGetIntrMask(pGpu, pIntr, arg3, arg4)
1419
intrGetEccIntrMaskOffset_5baef9(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,NvU32 * arg4)1420 static inline NV_STATUS intrGetEccIntrMaskOffset_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4) {
1421 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1422 }
1423
1424 NV_STATUS intrGetEccIntrMaskOffset_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4);
1425
intrGetEccIntrMaskOffset_46f6a7(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,NvU32 * arg4)1426 static inline NV_STATUS intrGetEccIntrMaskOffset_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4) {
1427 return NV_ERR_NOT_SUPPORTED;
1428 }
1429
1430
1431 #ifdef __nvoc_intr_h_disabled
intrGetEccIntrMaskOffset(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,NvU32 * arg4)1432 static inline NV_STATUS intrGetEccIntrMaskOffset(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4) {
1433 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1434 return NV_ERR_NOT_SUPPORTED;
1435 }
1436 #else //__nvoc_intr_h_disabled
1437 #define intrGetEccIntrMaskOffset(pGpu, pIntr, arg3, arg4) intrGetEccIntrMaskOffset_5baef9(pGpu, pIntr, arg3, arg4)
1438 #endif //__nvoc_intr_h_disabled
1439
1440 #define intrGetEccIntrMaskOffset_HAL(pGpu, pIntr, arg3, arg4) intrGetEccIntrMaskOffset(pGpu, pIntr, arg3, arg4)
1441
intrGetNvlinkIntrMaskOffset_5baef9(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,NvU32 * arg4)1442 static inline NV_STATUS intrGetNvlinkIntrMaskOffset_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4) {
1443 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1444 }
1445
1446 NV_STATUS intrGetNvlinkIntrMaskOffset_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4);
1447
intrGetNvlinkIntrMaskOffset_46f6a7(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,NvU32 * arg4)1448 static inline NV_STATUS intrGetNvlinkIntrMaskOffset_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4) {
1449 return NV_ERR_NOT_SUPPORTED;
1450 }
1451
1452
1453 #ifdef __nvoc_intr_h_disabled
intrGetNvlinkIntrMaskOffset(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3,NvU32 * arg4)1454 static inline NV_STATUS intrGetNvlinkIntrMaskOffset(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3, NvU32 *arg4) {
1455 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1456 return NV_ERR_NOT_SUPPORTED;
1457 }
1458 #else //__nvoc_intr_h_disabled
1459 #define intrGetNvlinkIntrMaskOffset(pGpu, pIntr, arg3, arg4) intrGetNvlinkIntrMaskOffset_5baef9(pGpu, pIntr, arg3, arg4)
1460 #endif //__nvoc_intr_h_disabled
1461
1462 #define intrGetNvlinkIntrMaskOffset_HAL(pGpu, pIntr, arg3, arg4) intrGetNvlinkIntrMaskOffset(pGpu, pIntr, arg3, arg4)
1463
intrRequiresPossibleErrorNotifier_491d52(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * pEngines)1464 static inline NvBool intrRequiresPossibleErrorNotifier_491d52(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines) {
1465 return ((NvBool)(0 != 0));
1466 }
1467
1468 NvBool intrRequiresPossibleErrorNotifier_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1469
1470 NvBool intrRequiresPossibleErrorNotifier_GA100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1471
1472 NvBool intrRequiresPossibleErrorNotifier_GH100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1473
1474
1475 #ifdef __nvoc_intr_h_disabled
intrRequiresPossibleErrorNotifier(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * pEngines)1476 static inline NvBool intrRequiresPossibleErrorNotifier(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines) {
1477 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1478 return NV_FALSE;
1479 }
1480 #else //__nvoc_intr_h_disabled
1481 #define intrRequiresPossibleErrorNotifier(pGpu, pIntr, pEngines) intrRequiresPossibleErrorNotifier_491d52(pGpu, pIntr, pEngines)
1482 #endif //__nvoc_intr_h_disabled
1483
1484 #define intrRequiresPossibleErrorNotifier_HAL(pGpu, pIntr, pEngines) intrRequiresPossibleErrorNotifier(pGpu, pIntr, pEngines)
1485
intrReadErrCont_4a4dee(OBJGPU * pGpu,struct Intr * pIntr)1486 static inline NvU32 intrReadErrCont_4a4dee(OBJGPU *pGpu, struct Intr *pIntr) {
1487 return 0;
1488 }
1489
1490 NvU32 intrReadErrCont_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1491
1492
1493 #ifdef __nvoc_intr_h_disabled
intrReadErrCont(OBJGPU * pGpu,struct Intr * pIntr)1494 static inline NvU32 intrReadErrCont(OBJGPU *pGpu, struct Intr *pIntr) {
1495 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1496 return 0;
1497 }
1498 #else //__nvoc_intr_h_disabled
1499 #define intrReadErrCont(pGpu, pIntr) intrReadErrCont_4a4dee(pGpu, pIntr)
1500 #endif //__nvoc_intr_h_disabled
1501
1502 #define intrReadErrCont_HAL(pGpu, pIntr) intrReadErrCont(pGpu, pIntr)
1503
1504 NV_STATUS intrGetPendingStall_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
1505
1506
1507 #ifdef __nvoc_intr_h_disabled
intrGetPendingStall(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)1508 static inline NV_STATUS intrGetPendingStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
1509 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1510 return NV_ERR_NOT_SUPPORTED;
1511 }
1512 #else //__nvoc_intr_h_disabled
1513 #define intrGetPendingStall(pGpu, pIntr, arg3, arg4) intrGetPendingStall_GP100(pGpu, pIntr, arg3, arg4)
1514 #endif //__nvoc_intr_h_disabled
1515
1516 #define intrGetPendingStall_HAL(pGpu, pIntr, arg3, arg4) intrGetPendingStall(pGpu, pIntr, arg3, arg4)
1517
1518 NV_STATUS intrGetPendingStallEngines_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
1519
1520
1521 #ifdef __nvoc_intr_h_disabled
intrGetPendingStallEngines(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)1522 static inline NV_STATUS intrGetPendingStallEngines(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
1523 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1524 return NV_ERR_NOT_SUPPORTED;
1525 }
1526 #else //__nvoc_intr_h_disabled
1527 #define intrGetPendingStallEngines(pGpu, pIntr, arg3, arg4) intrGetPendingStallEngines_TU102(pGpu, pIntr, arg3, arg4)
1528 #endif //__nvoc_intr_h_disabled
1529
1530 #define intrGetPendingStallEngines_HAL(pGpu, pIntr, arg3, arg4) intrGetPendingStallEngines(pGpu, pIntr, arg3, arg4)
1531
1532 NvBool intrIsIntrEnabled_IMPL(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3);
1533
1534
1535 #ifdef __nvoc_intr_h_disabled
intrIsIntrEnabled(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * arg3)1536 static inline NvBool intrIsIntrEnabled(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3) {
1537 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1538 return NV_FALSE;
1539 }
1540 #else //__nvoc_intr_h_disabled
1541 #define intrIsIntrEnabled(pGpu, pIntr, arg3) intrIsIntrEnabled_IMPL(pGpu, pIntr, arg3)
1542 #endif //__nvoc_intr_h_disabled
1543
1544 #define intrIsIntrEnabled_HAL(pGpu, pIntr, arg3) intrIsIntrEnabled(pGpu, pIntr, arg3)
1545
intrSetHubLeafIntr_b3696a(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 * arg4,NvU32 * arg5,struct THREAD_STATE_NODE * arg6)1546 static inline void intrSetHubLeafIntr_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 *arg4, NvU32 *arg5, struct THREAD_STATE_NODE *arg6) {
1547 return;
1548 }
1549
1550
1551 #ifdef __nvoc_intr_h_disabled
intrSetHubLeafIntr(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,NvU32 * arg4,NvU32 * arg5,struct THREAD_STATE_NODE * arg6)1552 static inline void intrSetHubLeafIntr(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 *arg4, NvU32 *arg5, struct THREAD_STATE_NODE *arg6) {
1553 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1554 }
1555 #else //__nvoc_intr_h_disabled
1556 #define intrSetHubLeafIntr(pGpu, pIntr, arg3, arg4, arg5, arg6) intrSetHubLeafIntr_b3696a(pGpu, pIntr, arg3, arg4, arg5, arg6)
1557 #endif //__nvoc_intr_h_disabled
1558
1559 #define intrSetHubLeafIntr_HAL(pGpu, pIntr, arg3, arg4, arg5, arg6) intrSetHubLeafIntr(pGpu, pIntr, arg3, arg4, arg5, arg6)
1560
1561 void intrGetHubLeafIntrPending_STUB(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
1562
1563
1564 #ifdef __nvoc_intr_h_disabled
intrGetHubLeafIntrPending(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)1565 static inline void intrGetHubLeafIntrPending(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
1566 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1567 }
1568 #else //__nvoc_intr_h_disabled
1569 #define intrGetHubLeafIntrPending(pGpu, pIntr, arg3, arg4) intrGetHubLeafIntrPending_STUB(pGpu, pIntr, arg3, arg4)
1570 #endif //__nvoc_intr_h_disabled
1571
1572 #define intrGetHubLeafIntrPending_HAL(pGpu, pIntr, arg3, arg4) intrGetHubLeafIntrPending(pGpu, pIntr, arg3, arg4)
1573
1574 NV_STATUS intrRefetchInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1575
1576
1577 #ifdef __nvoc_intr_h_disabled
intrRefetchInterruptTable(OBJGPU * pGpu,struct Intr * pIntr)1578 static inline NV_STATUS intrRefetchInterruptTable(OBJGPU *pGpu, struct Intr *pIntr) {
1579 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1580 return NV_ERR_NOT_SUPPORTED;
1581 }
1582 #else //__nvoc_intr_h_disabled
1583 #define intrRefetchInterruptTable(pGpu, pIntr) intrRefetchInterruptTable_IMPL(pGpu, pIntr)
1584 #endif //__nvoc_intr_h_disabled
1585
1586 #define intrRefetchInterruptTable_HAL(pGpu, pIntr) intrRefetchInterruptTable(pGpu, pIntr)
1587
1588 NV_STATUS intrConstructEngine_IMPL(OBJGPU *pGpu, struct Intr *pIntr, ENGDESCRIPTOR arg3);
1589
1590 NV_STATUS intrStatePreInitLocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1591
1592 NV_STATUS intrStateInitUnlocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1593
1594 NV_STATUS intrStateInitLocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1595
1596 void intrStateDestroy_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1597
1598 NvU32 intrDecodeStallIntrEn_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3);
1599
intrDecodeStallIntrEn_4a4dee(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3)1600 static inline NvU32 intrDecodeStallIntrEn_4a4dee(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3) {
1601 return 0;
1602 }
1603
intrServiceVirtual_f2d351(OBJGPU * pGpu,struct Intr * pIntr)1604 static inline void intrServiceVirtual_f2d351(OBJGPU *pGpu, struct Intr *pIntr) {
1605 NV_ASSERT_PRECOMP(0);
1606 }
1607
1608 void intrServiceVirtual_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1609
intrTriggerPrivDoorbell_5baef9(OBJGPU * pGpu,struct Intr * pIntr,NvU32 gfid)1610 static inline NV_STATUS intrTriggerPrivDoorbell_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
1611 NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1612 }
1613
1614 NV_STATUS intrTriggerPrivDoorbell_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
1615
1616 void intrGetLocklessVectorsInRmSubtree_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 (*pInterruptVectors)[2]);
1617
1618 void intrGetLocklessVectorsInRmSubtree_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 (*pInterruptVectors)[2]);
1619
1620 void intrSetDisplayInterruptEnable_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState);
1621
intrSetDisplayInterruptEnable_b3696a(OBJGPU * pGpu,struct Intr * pIntr,NvBool bEnable,struct THREAD_STATE_NODE * pThreadState)1622 static inline void intrSetDisplayInterruptEnable_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState) {
1623 return;
1624 }
1625
1626 NvU32 intrReadRegTopEnSet_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1627
1628 NvU32 intrReadRegTopEnSet_CPU_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1629
1630 NvU32 intrReadRegTopEnSet_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1631
1632 NvU32 intrReadRegTopEnSet_GSP_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1633
1634 void intrWriteRegTopEnSet_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1635
1636 void intrWriteRegTopEnSet_CPU_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1637
1638 void intrWriteRegTopEnSet_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1639
1640 void intrWriteRegTopEnSet_GSP_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1641
1642 void intrWriteRegTopEnClear_CPU_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1643
1644 void intrWriteRegTopEnClear_CPU_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1645
1646 void intrWriteRegTopEnClear_GSP_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1647
1648 void intrWriteRegTopEnClear_GSP_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, NvU32 arg4, struct THREAD_STATE_NODE *arg5);
1649
1650 NvU32 intrGetNumLeaves_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1651
1652 NvU32 intrGetNumLeaves_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1653
1654 NvU32 intrGetLeafSize_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1655
1656 NvU32 intrGetLeafSize_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1657
1658 NvU64 intrGetIntrTopNonStallMask_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1659
1660 NvU64 intrGetIntrTopNonStallMask_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1661
1662 void intrSanityCheckEngineIntrStallVector_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1663
1664 void intrSanityCheckEngineIntrStallVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1665
intrSanityCheckEngineIntrStallVector_b3696a(OBJGPU * pGpu,struct Intr * pIntr,NvU32 vector,NvU16 mcEngine)1666 static inline void intrSanityCheckEngineIntrStallVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1667 return;
1668 }
1669
1670 void intrSanityCheckEngineIntrNotificationVector_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1671
1672 void intrSanityCheckEngineIntrNotificationVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1673
intrSanityCheckEngineIntrNotificationVector_b3696a(OBJGPU * pGpu,struct Intr * pIntr,NvU32 vector,NvU16 mcEngine)1674 static inline void intrSanityCheckEngineIntrNotificationVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1675 return;
1676 }
1677
1678 NV_STATUS intrStateLoad_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3);
1679
1680 NV_STATUS intrStateUnload_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3);
1681
intrInitSubtreeMap_395e98(OBJGPU * pGpu,struct Intr * pIntr)1682 static inline NV_STATUS intrInitSubtreeMap_395e98(OBJGPU *pGpu, struct Intr *pIntr) {
1683 return NV_ERR_NOT_SUPPORTED;
1684 }
1685
1686 NV_STATUS intrInitSubtreeMap_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1687
1688 NV_STATUS intrInitSubtreeMap_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1689
1690 NV_STATUS intrInitInterruptTable_VF(OBJGPU *pGpu, struct Intr *pIntr);
1691
1692 NV_STATUS intrInitInterruptTable_KERNEL(OBJGPU *pGpu, struct Intr *pIntr);
1693
1694 NV_STATUS intrSetIntrMask_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
1695
intrSetIntrMask_46f6a7(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)1696 static inline NV_STATUS intrSetIntrMask_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
1697 return NV_ERR_NOT_SUPPORTED;
1698 }
1699
1700 void intrSetIntrEnInHw_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4);
1701
intrSetIntrEnInHw_d44104(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,struct THREAD_STATE_NODE * arg4)1702 static inline void intrSetIntrEnInHw_d44104(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, struct THREAD_STATE_NODE *arg4) {
1703 return;
1704 }
1705
1706 NvU32 intrGetIntrEnFromHw_GP100(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3);
1707
intrGetIntrEnFromHw_b2b553(OBJGPU * pGpu,struct Intr * pIntr,struct THREAD_STATE_NODE * arg3)1708 static inline NvU32 intrGetIntrEnFromHw_b2b553(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg3) {
1709 return 0;
1710 }
1711
1712 void intrDestruct_IMPL(struct Intr *pIntr);
1713
1714 #define __nvoc_intrDestruct(pIntr) intrDestruct_IMPL(pIntr)
1715 NV_STATUS intrServiceNonStallBottomHalf_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
1716
1717 #ifdef __nvoc_intr_h_disabled
intrServiceNonStallBottomHalf(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)1718 static inline NV_STATUS intrServiceNonStallBottomHalf(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
1719 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1720 return NV_ERR_NOT_SUPPORTED;
1721 }
1722 #else //__nvoc_intr_h_disabled
1723 #define intrServiceNonStallBottomHalf(pGpu, pIntr, arg3, arg4) intrServiceNonStallBottomHalf_IMPL(pGpu, pIntr, arg3, arg4)
1724 #endif //__nvoc_intr_h_disabled
1725
1726 NV_STATUS intrServiceNotificationRecords_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineIdx, struct THREAD_STATE_NODE *arg4);
1727
1728 #ifdef __nvoc_intr_h_disabled
intrServiceNotificationRecords(OBJGPU * pGpu,struct Intr * pIntr,NvU16 mcEngineIdx,struct THREAD_STATE_NODE * arg4)1729 static inline NV_STATUS intrServiceNotificationRecords(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineIdx, struct THREAD_STATE_NODE *arg4) {
1730 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1731 return NV_ERR_NOT_SUPPORTED;
1732 }
1733 #else //__nvoc_intr_h_disabled
1734 #define intrServiceNotificationRecords(pGpu, pIntr, mcEngineIdx, arg4) intrServiceNotificationRecords_IMPL(pGpu, pIntr, mcEngineIdx, arg4)
1735 #endif //__nvoc_intr_h_disabled
1736
1737 NvU64 intrGetUvmSharedLeafEnDisableMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1738
1739 #ifdef __nvoc_intr_h_disabled
intrGetUvmSharedLeafEnDisableMask(OBJGPU * pGpu,struct Intr * pIntr)1740 static inline NvU64 intrGetUvmSharedLeafEnDisableMask(OBJGPU *pGpu, struct Intr *pIntr) {
1741 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1742 return 0;
1743 }
1744 #else //__nvoc_intr_h_disabled
1745 #define intrGetUvmSharedLeafEnDisableMask(pGpu, pIntr) intrGetUvmSharedLeafEnDisableMask_IMPL(pGpu, pIntr)
1746 #endif //__nvoc_intr_h_disabled
1747
1748 NvU64 intrGetIntrTopLegacyStallMask_IMPL(struct Intr *pIntr);
1749
1750 #ifdef __nvoc_intr_h_disabled
intrGetIntrTopLegacyStallMask(struct Intr * pIntr)1751 static inline NvU64 intrGetIntrTopLegacyStallMask(struct Intr *pIntr) {
1752 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1753 return 0;
1754 }
1755 #else //__nvoc_intr_h_disabled
1756 #define intrGetIntrTopLegacyStallMask(pIntr) intrGetIntrTopLegacyStallMask_IMPL(pIntr)
1757 #endif //__nvoc_intr_h_disabled
1758
1759 NvU64 intrGetIntrTopLockedMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1760
1761 #ifdef __nvoc_intr_h_disabled
intrGetIntrTopLockedMask(OBJGPU * pGpu,struct Intr * pIntr)1762 static inline NvU64 intrGetIntrTopLockedMask(OBJGPU *pGpu, struct Intr *pIntr) {
1763 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1764 return 0;
1765 }
1766 #else //__nvoc_intr_h_disabled
1767 #define intrGetIntrTopLockedMask(pGpu, pIntr) intrGetIntrTopLockedMask_IMPL(pGpu, pIntr)
1768 #endif //__nvoc_intr_h_disabled
1769
1770 NV_STATUS intrGetSubtreeRange_IMPL(struct Intr *pIntr, NV2080_INTR_CATEGORY category, NV2080_INTR_CATEGORY_SUBTREE_MAP *pRange);
1771
1772 #ifdef __nvoc_intr_h_disabled
intrGetSubtreeRange(struct Intr * pIntr,NV2080_INTR_CATEGORY category,NV2080_INTR_CATEGORY_SUBTREE_MAP * pRange)1773 static inline NV_STATUS intrGetSubtreeRange(struct Intr *pIntr, NV2080_INTR_CATEGORY category, NV2080_INTR_CATEGORY_SUBTREE_MAP *pRange) {
1774 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1775 return NV_ERR_NOT_SUPPORTED;
1776 }
1777 #else //__nvoc_intr_h_disabled
1778 #define intrGetSubtreeRange(pIntr, category, pRange) intrGetSubtreeRange_IMPL(pIntr, category, pRange)
1779 #endif //__nvoc_intr_h_disabled
1780
1781 NvU64 intrGetIntrTopCategoryMask_IMPL(struct Intr *pIntr, NV2080_INTR_CATEGORY category);
1782
1783 #ifdef __nvoc_intr_h_disabled
intrGetIntrTopCategoryMask(struct Intr * pIntr,NV2080_INTR_CATEGORY category)1784 static inline NvU64 intrGetIntrTopCategoryMask(struct Intr *pIntr, NV2080_INTR_CATEGORY category) {
1785 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1786 return 0;
1787 }
1788 #else //__nvoc_intr_h_disabled
1789 #define intrGetIntrTopCategoryMask(pIntr, category) intrGetIntrTopCategoryMask_IMPL(pIntr, category)
1790 #endif //__nvoc_intr_h_disabled
1791
1792 NV_STATUS intrSetInterruptEntry_IMPL(struct Intr *pIntr, INTR_TREE tree, NvU32 vector, const InterruptEntry *pEntry);
1793
1794 #ifdef __nvoc_intr_h_disabled
intrSetInterruptEntry(struct Intr * pIntr,INTR_TREE tree,NvU32 vector,const InterruptEntry * pEntry)1795 static inline NV_STATUS intrSetInterruptEntry(struct Intr *pIntr, INTR_TREE tree, NvU32 vector, const InterruptEntry *pEntry) {
1796 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1797 return NV_ERR_NOT_SUPPORTED;
1798 }
1799 #else //__nvoc_intr_h_disabled
1800 #define intrSetInterruptEntry(pIntr, tree, vector, pEntry) intrSetInterruptEntry_IMPL(pIntr, tree, vector, pEntry)
1801 #endif //__nvoc_intr_h_disabled
1802
1803 void intrServiceStallListAllGpusCond_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool arg4);
1804
1805 #ifdef __nvoc_intr_h_disabled
intrServiceStallListAllGpusCond(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,NvBool arg4)1806 static inline void intrServiceStallListAllGpusCond(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool arg4) {
1807 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1808 }
1809 #else //__nvoc_intr_h_disabled
1810 #define intrServiceStallListAllGpusCond(pGpu, pIntr, arg3, arg4) intrServiceStallListAllGpusCond_IMPL(pGpu, pIntr, arg3, arg4)
1811 #endif //__nvoc_intr_h_disabled
1812
1813 void intrServiceStallListDevice_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool arg4);
1814
1815 #ifdef __nvoc_intr_h_disabled
intrServiceStallListDevice(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,NvBool arg4)1816 static inline void intrServiceStallListDevice(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, NvBool arg4) {
1817 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1818 }
1819 #else //__nvoc_intr_h_disabled
1820 #define intrServiceStallListDevice(pGpu, pIntr, arg3, arg4) intrServiceStallListDevice_IMPL(pGpu, pIntr, arg3, arg4)
1821 #endif //__nvoc_intr_h_disabled
1822
1823 NvU32 intrServiceInterruptRecords_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg3, NvBool *arg4);
1824
1825 #ifdef __nvoc_intr_h_disabled
intrServiceInterruptRecords(OBJGPU * pGpu,struct Intr * pIntr,NvU16 arg3,NvBool * arg4)1826 static inline NvU32 intrServiceInterruptRecords(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg3, NvBool *arg4) {
1827 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1828 return 0;
1829 }
1830 #else //__nvoc_intr_h_disabled
1831 #define intrServiceInterruptRecords(pGpu, pIntr, arg3, arg4) intrServiceInterruptRecords_IMPL(pGpu, pIntr, arg3, arg4)
1832 #endif //__nvoc_intr_h_disabled
1833
1834 void intrQueueDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg3, DPCNODE *arg4);
1835
1836 #ifdef __nvoc_intr_h_disabled
intrQueueDpc(OBJGPU * pGpu,struct Intr * pIntr,DPCQUEUE * arg3,DPCNODE * arg4)1837 static inline void intrQueueDpc(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg3, DPCNODE *arg4) {
1838 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1839 }
1840 #else //__nvoc_intr_h_disabled
1841 #define intrQueueDpc(pGpu, pIntr, arg3, arg4) intrQueueDpc_IMPL(pGpu, pIntr, arg3, arg4)
1842 #endif //__nvoc_intr_h_disabled
1843
1844 DPCNODE *intrDequeueDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg3);
1845
1846 #ifdef __nvoc_intr_h_disabled
intrDequeueDpc(OBJGPU * pGpu,struct Intr * pIntr,DPCQUEUE * arg3)1847 static inline DPCNODE *intrDequeueDpc(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg3) {
1848 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1849 return NULL;
1850 }
1851 #else //__nvoc_intr_h_disabled
1852 #define intrDequeueDpc(pGpu, pIntr, arg3) intrDequeueDpc_IMPL(pGpu, pIntr, arg3)
1853 #endif //__nvoc_intr_h_disabled
1854
1855 NvBool intrIsDpcQueueEmpty_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg3);
1856
1857 #ifdef __nvoc_intr_h_disabled
intrIsDpcQueueEmpty(OBJGPU * pGpu,struct Intr * pIntr,DPCQUEUE * arg3)1858 static inline NvBool intrIsDpcQueueEmpty(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg3) {
1859 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1860 return NV_FALSE;
1861 }
1862 #else //__nvoc_intr_h_disabled
1863 #define intrIsDpcQueueEmpty(pGpu, pIntr, arg3) intrIsDpcQueueEmpty_IMPL(pGpu, pIntr, arg3)
1864 #endif //__nvoc_intr_h_disabled
1865
1866 void intrQueueInterruptBasedDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg3);
1867
1868 #ifdef __nvoc_intr_h_disabled
intrQueueInterruptBasedDpc(OBJGPU * pGpu,struct Intr * pIntr,NvU16 arg3)1869 static inline void intrQueueInterruptBasedDpc(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg3) {
1870 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1871 }
1872 #else //__nvoc_intr_h_disabled
1873 #define intrQueueInterruptBasedDpc(pGpu, pIntr, arg3) intrQueueInterruptBasedDpc_IMPL(pGpu, pIntr, arg3)
1874 #endif //__nvoc_intr_h_disabled
1875
1876 NvU32 intrConvertEngineMaskToPmcIntrMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3);
1877
1878 #ifdef __nvoc_intr_h_disabled
intrConvertEngineMaskToPmcIntrMask(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3)1879 static inline NvU32 intrConvertEngineMaskToPmcIntrMask(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3) {
1880 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1881 return 0;
1882 }
1883 #else //__nvoc_intr_h_disabled
1884 #define intrConvertEngineMaskToPmcIntrMask(pGpu, pIntr, arg3) intrConvertEngineMaskToPmcIntrMask_IMPL(pGpu, pIntr, arg3)
1885 #endif //__nvoc_intr_h_disabled
1886
1887 void intrConvertPmcIntrMaskToEngineMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, union MC_ENGINE_BITVECTOR *arg4);
1888
1889 #ifdef __nvoc_intr_h_disabled
intrConvertPmcIntrMaskToEngineMask(OBJGPU * pGpu,struct Intr * pIntr,NvU32 arg3,union MC_ENGINE_BITVECTOR * arg4)1890 static inline void intrConvertPmcIntrMaskToEngineMask(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg3, union MC_ENGINE_BITVECTOR *arg4) {
1891 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1892 }
1893 #else //__nvoc_intr_h_disabled
1894 #define intrConvertPmcIntrMaskToEngineMask(pGpu, pIntr, arg3, arg4) intrConvertPmcIntrMaskToEngineMask_IMPL(pGpu, pIntr, arg3, arg4)
1895 #endif //__nvoc_intr_h_disabled
1896
1897 INTR_TABLE_ENTRY *intrGetInterruptTableEntryFromEngineId_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall);
1898
1899 #ifdef __nvoc_intr_h_disabled
intrGetInterruptTableEntryFromEngineId(OBJGPU * pGpu,struct Intr * pIntr,NvU16 mcEngineId,NvBool bNonStall)1900 static inline INTR_TABLE_ENTRY *intrGetInterruptTableEntryFromEngineId(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall) {
1901 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1902 return NULL;
1903 }
1904 #else //__nvoc_intr_h_disabled
1905 #define intrGetInterruptTableEntryFromEngineId(pGpu, pIntr, mcEngineId, bNonStall) intrGetInterruptTableEntryFromEngineId_IMPL(pGpu, pIntr, mcEngineId, bNonStall)
1906 #endif //__nvoc_intr_h_disabled
1907
1908 NvU32 intrGetVectorFromEngineId_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall);
1909
1910 #ifdef __nvoc_intr_h_disabled
intrGetVectorFromEngineId(OBJGPU * pGpu,struct Intr * pIntr,NvU16 mcEngineId,NvBool bNonStall)1911 static inline NvU32 intrGetVectorFromEngineId(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall) {
1912 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1913 return 0;
1914 }
1915 #else //__nvoc_intr_h_disabled
1916 #define intrGetVectorFromEngineId(pGpu, pIntr, mcEngineId, bNonStall) intrGetVectorFromEngineId_IMPL(pGpu, pIntr, mcEngineId, bNonStall)
1917 #endif //__nvoc_intr_h_disabled
1918
1919 NV_STATUS intrGetSmallestNotificationVector_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3);
1920
1921 #ifdef __nvoc_intr_h_disabled
intrGetSmallestNotificationVector(OBJGPU * pGpu,struct Intr * pIntr,NvU32 * arg3)1922 static inline NV_STATUS intrGetSmallestNotificationVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg3) {
1923 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1924 return NV_ERR_NOT_SUPPORTED;
1925 }
1926 #else //__nvoc_intr_h_disabled
1927 #define intrGetSmallestNotificationVector(pGpu, pIntr, arg3) intrGetSmallestNotificationVector_IMPL(pGpu, pIntr, arg3)
1928 #endif //__nvoc_intr_h_disabled
1929
1930 void intrSetIntrMaskUnblocked_IMPL(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg2);
1931
1932 #ifdef __nvoc_intr_h_disabled
intrSetIntrMaskUnblocked(struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg2)1933 static inline void intrSetIntrMaskUnblocked(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg2) {
1934 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1935 }
1936 #else //__nvoc_intr_h_disabled
1937 #define intrSetIntrMaskUnblocked(pIntr, arg2) intrSetIntrMaskUnblocked_IMPL(pIntr, arg2)
1938 #endif //__nvoc_intr_h_disabled
1939
1940 void intrGetIntrMaskUnblocked_IMPL(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg2);
1941
1942 #ifdef __nvoc_intr_h_disabled
intrGetIntrMaskUnblocked(struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg2)1943 static inline void intrGetIntrMaskUnblocked(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg2) {
1944 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1945 }
1946 #else //__nvoc_intr_h_disabled
1947 #define intrGetIntrMaskUnblocked(pIntr, arg2) intrGetIntrMaskUnblocked_IMPL(pIntr, arg2)
1948 #endif //__nvoc_intr_h_disabled
1949
1950 void intrSetIntrMaskFlags_IMPL(struct Intr *pIntr, NvU32 arg2);
1951
1952 #ifdef __nvoc_intr_h_disabled
intrSetIntrMaskFlags(struct Intr * pIntr,NvU32 arg2)1953 static inline void intrSetIntrMaskFlags(struct Intr *pIntr, NvU32 arg2) {
1954 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1955 }
1956 #else //__nvoc_intr_h_disabled
1957 #define intrSetIntrMaskFlags(pIntr, arg2) intrSetIntrMaskFlags_IMPL(pIntr, arg2)
1958 #endif //__nvoc_intr_h_disabled
1959
1960 NvU32 intrGetIntrMaskFlags_IMPL(struct Intr *pIntr);
1961
1962 #ifdef __nvoc_intr_h_disabled
intrGetIntrMaskFlags(struct Intr * pIntr)1963 static inline NvU32 intrGetIntrMaskFlags(struct Intr *pIntr) {
1964 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1965 return 0;
1966 }
1967 #else //__nvoc_intr_h_disabled
1968 #define intrGetIntrMaskFlags(pIntr) intrGetIntrMaskFlags_IMPL(pIntr)
1969 #endif //__nvoc_intr_h_disabled
1970
1971 void intrSetDefaultIntrEn_IMPL(struct Intr *pIntr, NvU32 arg2);
1972
1973 #ifdef __nvoc_intr_h_disabled
intrSetDefaultIntrEn(struct Intr * pIntr,NvU32 arg2)1974 static inline void intrSetDefaultIntrEn(struct Intr *pIntr, NvU32 arg2) {
1975 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1976 }
1977 #else //__nvoc_intr_h_disabled
1978 #define intrSetDefaultIntrEn(pIntr, arg2) intrSetDefaultIntrEn_IMPL(pIntr, arg2)
1979 #endif //__nvoc_intr_h_disabled
1980
1981 NvU32 intrGetDefaultIntrEn_IMPL(struct Intr *pIntr);
1982
1983 #ifdef __nvoc_intr_h_disabled
intrGetDefaultIntrEn(struct Intr * pIntr)1984 static inline NvU32 intrGetDefaultIntrEn(struct Intr *pIntr) {
1985 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1986 return 0;
1987 }
1988 #else //__nvoc_intr_h_disabled
1989 #define intrGetDefaultIntrEn(pIntr) intrGetDefaultIntrEn_IMPL(pIntr)
1990 #endif //__nvoc_intr_h_disabled
1991
1992 void intrSetIntrEn_IMPL(struct Intr *pIntr, NvU32 arg2);
1993
1994 #ifdef __nvoc_intr_h_disabled
intrSetIntrEn(struct Intr * pIntr,NvU32 arg2)1995 static inline void intrSetIntrEn(struct Intr *pIntr, NvU32 arg2) {
1996 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1997 }
1998 #else //__nvoc_intr_h_disabled
1999 #define intrSetIntrEn(pIntr, arg2) intrSetIntrEn_IMPL(pIntr, arg2)
2000 #endif //__nvoc_intr_h_disabled
2001
2002 NvU32 intrGetIntrEn_IMPL(struct Intr *pIntr);
2003
2004 #ifdef __nvoc_intr_h_disabled
intrGetIntrEn(struct Intr * pIntr)2005 static inline NvU32 intrGetIntrEn(struct Intr *pIntr) {
2006 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
2007 return 0;
2008 }
2009 #else //__nvoc_intr_h_disabled
2010 #define intrGetIntrEn(pIntr) intrGetIntrEn_IMPL(pIntr)
2011 #endif //__nvoc_intr_h_disabled
2012
2013 void intrSaveIntrEn0FromHw_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
2014
2015 #ifdef __nvoc_intr_h_disabled
intrSaveIntrEn0FromHw(OBJGPU * pGpu,struct Intr * pIntr)2016 static inline void intrSaveIntrEn0FromHw(OBJGPU *pGpu, struct Intr *pIntr) {
2017 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
2018 }
2019 #else //__nvoc_intr_h_disabled
2020 #define intrSaveIntrEn0FromHw(pGpu, pIntr) intrSaveIntrEn0FromHw_IMPL(pGpu, pIntr)
2021 #endif //__nvoc_intr_h_disabled
2022
2023 void intrGetGmmuInterrupts_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4);
2024
2025 #ifdef __nvoc_intr_h_disabled
intrGetGmmuInterrupts(OBJGPU * pGpu,struct Intr * pIntr,union MC_ENGINE_BITVECTOR * arg3,struct THREAD_STATE_NODE * arg4)2026 static inline void intrGetGmmuInterrupts(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg3, struct THREAD_STATE_NODE *arg4) {
2027 NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
2028 }
2029 #else //__nvoc_intr_h_disabled
2030 #define intrGetGmmuInterrupts(pGpu, pIntr, arg3, arg4) intrGetGmmuInterrupts_IMPL(pGpu, pIntr, arg3, arg4)
2031 #endif //__nvoc_intr_h_disabled
2032
2033 #undef PRIVATE_FIELD
2034
2035
2036 // This mask is used for interrupts that should be masked off in the PMC tree
2037 #define NV_PMC_INTR_INVALID_MASK (0)
2038
2039 #endif // INTR_H
2040
2041 #ifdef __cplusplus
2042 } // extern "C"
2043 #endif
2044
2045 #endif // _G_INTR_NVOC_H_
2046