1 #ifndef _G_INTR_NVOC_H_
2 #define _G_INTR_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2006-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_intr_nvoc.h"
33 
34 #ifndef INTR_H
35 #define INTR_H
36 
37 #include "kernel/gpu/eng_state.h"
38 #include "kernel/gpu/gpu.h"
39 #include "kernel/gpu/intr/engine_idx.h"
40 #include "kernel/gpu/intr/intr_common.h"
41 #include "kernel/gpu/intr/intr_service.h"
42 
43 #include "ctrl/ctrl2080/ctrl2080internal.h"
44 
45 #include "dev_ctrl_defines.h"
46 #include "libraries/containers/list.h"
47 #include "libraries/containers/vector.h"
48 #include "libraries/nvoc/utility.h"
49 #include "libraries/utils/nvbitvector.h"
50 
51 
52 //
53 // Interrupt Type
54 //
55 // Abstraction of the disabled/software/hardware enumeration in NV_PMC_INTR_EN_0_INTA
56 // !!! This enumeration must exactly match NV_PMC_INTR_EN_0_INTA !!!
57 //
58 
59 #define INTERRUPT_TYPE_DISABLED         0
60 #define INTERRUPT_TYPE_HARDWARE         1
61 #define INTERRUPT_TYPE_SOFTWARE         2
62 #define INTERRUPT_TYPE_MULTI            3
63 #define INTERRUPT_TYPE_MAX              INTERRUPT_TYPE_MULTI
64 
65 #define INTERRUPT_MASK_DISABLED          0x00000000
66 #define INTERRUPT_MASK_HARDWARE          0x7fffffff
67 #define INTERRUPT_MASK_SOFTWARE          0x80000000
68 #define INTERRUPT_MASK_ENABLED           0xffffffff
69 
70 /**
71  * @brief Each entry corresponds to a top level interrupt
72  *
73  * This structure will eventually be replaced by #InterruptEntry.
74  */
75 typedef struct
76 {
77     /** MC_ENGINE_IDX* value */
78     NvU16 mcEngine;
79     /** Bit in top level PMC interrupt registers */
80     NvU32 pmcIntrMask;
81     /** Interrupt vector in CTRL interrupt tree (Turing+). For non-host driven
82      *  engines, this is their single interrupt vector at top level; while for
83      *  host driven engines, this is their stalling interrupt vector
84      */
85     NvU32 intrVector;
86     /** Nonstalling interrupt vector in CTRL interrupt tree (Turing+). Only
87      *  valid for host driven engines. NV_INTR_VECTOR_INVALID signifies
88      *  unavailable
89      */
90     NvU32 intrVectorNonStall;
91 } INTR_TABLE_ENTRY;
92 
93 #define INTR_TABLE_MAX_INTRS_PER_ENTRY       6
94 
95 MAKE_VECTOR(InterruptTable, INTR_TABLE_ENTRY);
96 
97 
98 /*!
99  * Mapping from leaf level interrupt to conceptual interrupt name.
100  *
101  * - The interrupt vector is implicit from the tree / index of an array which
102  *   contains this struct.
103  * - The target is a conceptual name that represents the interrupt identified by
104  *   (MC_ENGINE_IDX*, INTR_KIND*) pair.
105  * - A service routine may or may not be actually registered to handle the
106  *   interrupt.
107  * - Multiple physical vectors can map to the same conceptual interrupt.
108  */
109 typedef struct
110 {
111     /*!
112      * MC_ENGINE_IDX* value.
113      *
114      * A value of #MC_ENGINE_IDX_NULL means that the vector corresponding to
115      * this entry is unused. Use #interruptEntryIsEmpty to check this.
116      */
117     NvU16 mcEngine;
118 
119     /*!
120      * INTR_KIND_* value.
121      *
122      * This allows multiple logically separate interrupts to map to a service
123      * routine via a common mcEngine value.
124      */
125     INTR_KIND intrKind;
126 
127     /*!
128      * If the interrupt should be handled.
129      *
130      * If this is false:
131      * - The interrupt may need to be visible for clients, VF, etc (error
132      *   containment).
133      * - It can be an interrupt to be triggered to notify RM running in a
134      *   different environment: doorbell, GSP triggered notifications to CPU.
135      * - The interrupt does not need to be serviced. There should be no
136      *   corresponding entry in the #intrServiceTable.
137      */
138     NvBool bService;
139 } InterruptEntry;
140 
141 static NV_FORCEINLINE NvBool
142 interruptEntryIsEmpty(const InterruptEntry *pEntry)
143 {
144     return pEntry->mcEngine == MC_ENGINE_IDX_NULL;
145 }
146 
147 
148 //
149 // Default value for intrStuckThreshold
150 #define INTR_STUCK_THRESHOLD 1000
151 
152 #define INTR_TABLE_INIT_KERNEL (1 << 0)
153 #define INTR_TABLE_INIT_PHYSICAL (1 << 1)
154 
155 /**
156  * @brief This enum specifies the type of DPC node
157  *      INTERRUPT_BASED_DPC: DPC queued for an interrupt source
158  *      SPECIAL_DPC        : DPC queued within processing of another interrupt
159  *                           source
160  *
161  * Currently only used on Fermi+.
162  */
163 typedef enum
164 {
165     INTERRUPT_BASED_DPC=0,
166     SPECIAL_DPC
167 } DPCTYPE;
168 
169 /**
170  * @brief This is a structure for a node on the DPC Queue
171  *          dpctype: Type of DPC for processing
172  *          dpcdata: Data required for dpc processing
173  *                   This union will contain dpctype specific data
174  *          pNext  : Pointer to the next DPC node
175  *
176  * Currently only used on Fermi+.
177  */
178 typedef struct _DPCNODE
179 {
180     DPCTYPE dpctype;
181     union _dpcdata
182     {
183         MC_ENGINE_BITVECTOR pendingEngines;
184     } dpcdata;
185 
186     struct _DPCNODE *pNext;
187 } DPCNODE;
188 
189 /**
190  * @brief This is a structure for the DPC Queue
191  *          numEntries: Number of entries currently on DPC queue (debugging purpose)
192  *          pFront    : Front pointer for the queue
193  *          pRear     : Rear pointer for the queue
194  *
195  * Currently only used on Fermi+.
196  */
197 typedef struct
198 {
199     NvU32    numEntries;
200     DPCNODE *pFront;
201     DPCNODE *pRear;
202 } DPCQUEUE;
203 
204 // Data related to PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING
205 typedef struct
206 {
207     NvU32 flags;
208     NvU32 cached;                   // Pascal+, to implement intr mask in SW.
209     MC_ENGINE_BITVECTOR engMaskUnblocked;
210     MC_ENGINE_BITVECTOR engMaskOrig;
211     MC_ENGINE_BITVECTOR engMaskIntrsSeen;
212     MC_ENGINE_BITVECTOR engMaskIntrsDisabled;
213 } INTR_MASK;
214 
215 //
216 // interrupt mask information used for lazy interrupt disable and interrupt
217 // masking for locking.
218 //
219 typedef struct
220 {
221     NvU32 intrEnable;
222     MC_ENGINE_BITVECTOR intrMask;
223 } INTR_MASK_CTX;
224 
225 
226 //!
227 //! List of interrupt trees that RM sees.
228 //!
229 //! Kernel RM should determine number of implemented vectors using the actual
230 //! interrupt table fetched.
231 //!
232 typedef enum
233 {
234     INTR_TREE_CPU,
235     INTR_TREE_COUNT
236 } INTR_TREE;
237 
238 
239 //
240 // IntrMask Locking Flag Defines
241 //
242 #define INTR_MASK_FLAGS_ISR_SKIP_MASK_UPDATE     NVBIT(0)
243 
244 
245 struct Device;
246 
247 #ifndef __NVOC_CLASS_Device_TYPEDEF__
248 #define __NVOC_CLASS_Device_TYPEDEF__
249 typedef struct Device Device;
250 #endif /* __NVOC_CLASS_Device_TYPEDEF__ */
251 
252 #ifndef __nvoc_class_id_Device
253 #define __nvoc_class_id_Device 0xe0ac20
254 #endif /* __nvoc_class_id_Device */
255 
256 
257 
258 #ifdef NVOC_INTR_H_PRIVATE_ACCESS_ALLOWED
259 #define PRIVATE_FIELD(x) x
260 #else
261 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
262 #endif
263 struct Intr {
264     const struct NVOC_RTTI *__nvoc_rtti;
265     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
266     struct Object *__nvoc_pbase_Object;
267     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
268     struct Intr *__nvoc_pbase_Intr;
269     NV_STATUS (*__intrConstructEngine__)(OBJGPU *, struct Intr *, ENGDESCRIPTOR);
270     NV_STATUS (*__intrStateInitUnlocked__)(OBJGPU *, struct Intr *);
271     NV_STATUS (*__intrStateInitLocked__)(OBJGPU *, struct Intr *);
272     void (*__intrStateDestroy__)(OBJGPU *, struct Intr *);
273     NvU32 (*__intrDecodeStallIntrEn__)(OBJGPU *, struct Intr *, NvU32);
274     NvU32 (*__intrGetNonStallBaseVector__)(OBJGPU *, struct Intr *);
275     NvU64 (*__intrGetUvmSharedLeafEnDisableMask__)(OBJGPU *, struct Intr *);
276     void (*__intrSetDisplayInterruptEnable__)(OBJGPU *, struct Intr *, NvBool, struct THREAD_STATE_NODE *);
277     NvU32 (*__intrReadRegTopEnSet__)(OBJGPU *, struct Intr *, NvU32, struct THREAD_STATE_NODE *);
278     void (*__intrWriteRegTopEnSet__)(OBJGPU *, struct Intr *, NvU32, NvU32, struct THREAD_STATE_NODE *);
279     void (*__intrWriteRegTopEnClear__)(OBJGPU *, struct Intr *, NvU32, NvU32, struct THREAD_STATE_NODE *);
280     NvU32 (*__intrGetNumLeaves__)(OBJGPU *, struct Intr *);
281     NvU32 (*__intrGetLeafSize__)(OBJGPU *, struct Intr *);
282     NvU64 (*__intrGetIntrTopNonStallMask__)(OBJGPU *, struct Intr *);
283     void (*__intrSanityCheckEngineIntrStallVector__)(OBJGPU *, struct Intr *, NvU32, NvU16);
284     void (*__intrSanityCheckEngineIntrNotificationVector__)(OBJGPU *, struct Intr *, NvU32, NvU16);
285     NV_STATUS (*__intrStateLoad__)(OBJGPU *, struct Intr *, NvU32);
286     NV_STATUS (*__intrStateUnload__)(OBJGPU *, struct Intr *, NvU32);
287     NV_STATUS (*__intrSetIntrMask__)(OBJGPU *, struct Intr *, union MC_ENGINE_BITVECTOR *, struct THREAD_STATE_NODE *);
288     void (*__intrSetIntrEnInHw__)(OBJGPU *, struct Intr *, NvU32, struct THREAD_STATE_NODE *);
289     NvU32 (*__intrGetIntrEnFromHw__)(OBJGPU *, struct Intr *, struct THREAD_STATE_NODE *);
290     NV_STATUS (*__intrStatePreLoad__)(POBJGPU, struct Intr *, NvU32);
291     NV_STATUS (*__intrStatePostUnload__)(POBJGPU, struct Intr *, NvU32);
292     NV_STATUS (*__intrStatePreUnload__)(POBJGPU, struct Intr *, NvU32);
293     void (*__intrInitMissing__)(POBJGPU, struct Intr *);
294     NV_STATUS (*__intrStatePreInitLocked__)(POBJGPU, struct Intr *);
295     NV_STATUS (*__intrStatePreInitUnlocked__)(POBJGPU, struct Intr *);
296     NV_STATUS (*__intrStatePostLoad__)(POBJGPU, struct Intr *, NvU32);
297     NvBool (*__intrIsPresent__)(POBJGPU, struct Intr *);
298     NvBool PDB_PROP_INTR_ENABLE_DETAILED_LOGS;
299     NvBool PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC;
300     NvBool PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS;
301     NvBool PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET;
302     NvBool PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE;
303     NvBool PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING;
304     NvBool PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING;
305     NvBool PDB_PROP_INTR_MASK_SUPPORTED;
306     NvU32 nonStallPmcIntrMask;
307     NvU64 uvmSharedCpuLeafEn;
308     NvU64 uvmSharedCpuLeafEnDisableMask;
309     NvU32 replayableFaultIntrVector;
310     NvU32 accessCntrIntrVector;
311     NvU32 displayIntrVector;
312     NvU64 intrTopEnMask;
313     InterruptTable intrTable;
314     IntrServiceRecord intrServiceTable[168];
315     InterruptEntry *(vectorToMcIdx[1]);
316     NvLength vectorToMcIdxCounts[1];
317     NvBool bDefaultNonstallNotify;
318     NvBool bUseLegacyVectorAssignment;
319     NV2080_INTR_CATEGORY_SUBTREE_MAP subtreeMap[7];
320     NvBool bDpcStarted;
321     union MC_ENGINE_BITVECTOR pmcIntrPending;
322     DPCQUEUE dpcQueue;
323     NvU32 intrStuckThreshold;
324     INTR_MASK intrMask;
325     union MC_ENGINE_BITVECTOR helperEngineMask;
326     NvU32 intrEn0;
327     NvU32 intrCachedEn0;
328     NvU32 intrCachedEnSet;
329     NvU32 intrCachedEnClear;
330     NvU32 intrEn0Orig;
331     NvBool halIntrEnabled;
332     NvU32 saveIntrEn0;
333 };
334 
335 #ifndef __NVOC_CLASS_Intr_TYPEDEF__
336 #define __NVOC_CLASS_Intr_TYPEDEF__
337 typedef struct Intr Intr;
338 #endif /* __NVOC_CLASS_Intr_TYPEDEF__ */
339 
340 #ifndef __nvoc_class_id_Intr
341 #define __nvoc_class_id_Intr 0xc06e44
342 #endif /* __nvoc_class_id_Intr */
343 
344 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Intr;
345 
346 #define __staticCast_Intr(pThis) \
347     ((pThis)->__nvoc_pbase_Intr)
348 
349 #ifdef __nvoc_intr_h_disabled
350 #define __dynamicCast_Intr(pThis) ((Intr*)NULL)
351 #else //__nvoc_intr_h_disabled
352 #define __dynamicCast_Intr(pThis) \
353     ((Intr*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Intr)))
354 #endif //__nvoc_intr_h_disabled
355 
356 #define PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC_BASE_CAST
357 #define PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC_BASE_NAME PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC
358 #define PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE_BASE_CAST
359 #define PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE_BASE_NAME PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE
360 #define PDB_PROP_INTR_MASK_SUPPORTED_BASE_CAST
361 #define PDB_PROP_INTR_MASK_SUPPORTED_BASE_NAME PDB_PROP_INTR_MASK_SUPPORTED
362 #define PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING_BASE_CAST
363 #define PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING_BASE_NAME PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING
364 #define PDB_PROP_INTR_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
365 #define PDB_PROP_INTR_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
366 #define PDB_PROP_INTR_ENABLE_DETAILED_LOGS_BASE_CAST
367 #define PDB_PROP_INTR_ENABLE_DETAILED_LOGS_BASE_NAME PDB_PROP_INTR_ENABLE_DETAILED_LOGS
368 #define PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET_BASE_CAST
369 #define PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET_BASE_NAME PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET
370 #define PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS_BASE_CAST
371 #define PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS_BASE_NAME PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS
372 #define PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING_BASE_CAST
373 #define PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING_BASE_NAME PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING
374 
375 NV_STATUS __nvoc_objCreateDynamic_Intr(Intr**, Dynamic*, NvU32, va_list);
376 
377 NV_STATUS __nvoc_objCreate_Intr(Intr**, Dynamic*, NvU32);
378 #define __objCreate_Intr(ppNewObj, pParent, createFlags) \
379     __nvoc_objCreate_Intr((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
380 
381 #define intrConstructEngine(pGpu, pIntr, arg0) intrConstructEngine_DISPATCH(pGpu, pIntr, arg0)
382 #define intrStateInitUnlocked(pGpu, pIntr) intrStateInitUnlocked_DISPATCH(pGpu, pIntr)
383 #define intrStateInitLocked(pGpu, pIntr) intrStateInitLocked_DISPATCH(pGpu, pIntr)
384 #define intrStateDestroy(pGpu, pIntr) intrStateDestroy_DISPATCH(pGpu, pIntr)
385 #define intrDecodeStallIntrEn(pGpu, pIntr, arg0) intrDecodeStallIntrEn_DISPATCH(pGpu, pIntr, arg0)
386 #define intrDecodeStallIntrEn_HAL(pGpu, pIntr, arg0) intrDecodeStallIntrEn_DISPATCH(pGpu, pIntr, arg0)
387 #define intrGetNonStallBaseVector(pGpu, pIntr) intrGetNonStallBaseVector_DISPATCH(pGpu, pIntr)
388 #define intrGetNonStallBaseVector_HAL(pGpu, pIntr) intrGetNonStallBaseVector_DISPATCH(pGpu, pIntr)
389 #define intrGetUvmSharedLeafEnDisableMask(pGpu, pIntr) intrGetUvmSharedLeafEnDisableMask_DISPATCH(pGpu, pIntr)
390 #define intrGetUvmSharedLeafEnDisableMask_HAL(pGpu, pIntr) intrGetUvmSharedLeafEnDisableMask_DISPATCH(pGpu, pIntr)
391 #define intrSetDisplayInterruptEnable(pGpu, pIntr, bEnable, pThreadState) intrSetDisplayInterruptEnable_DISPATCH(pGpu, pIntr, bEnable, pThreadState)
392 #define intrSetDisplayInterruptEnable_HAL(pGpu, pIntr, bEnable, pThreadState) intrSetDisplayInterruptEnable_DISPATCH(pGpu, pIntr, bEnable, pThreadState)
393 #define intrReadRegTopEnSet(pGpu, pIntr, arg0, arg1) intrReadRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1)
394 #define intrReadRegTopEnSet_HAL(pGpu, pIntr, arg0, arg1) intrReadRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1)
395 #define intrWriteRegTopEnSet(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
396 #define intrWriteRegTopEnSet_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
397 #define intrWriteRegTopEnClear(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnClear_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
398 #define intrWriteRegTopEnClear_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnClear_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
399 #define intrGetNumLeaves(pGpu, pIntr) intrGetNumLeaves_DISPATCH(pGpu, pIntr)
400 #define intrGetNumLeaves_HAL(pGpu, pIntr) intrGetNumLeaves_DISPATCH(pGpu, pIntr)
401 #define intrGetLeafSize(pGpu, pIntr) intrGetLeafSize_DISPATCH(pGpu, pIntr)
402 #define intrGetLeafSize_HAL(pGpu, pIntr) intrGetLeafSize_DISPATCH(pGpu, pIntr)
403 #define intrGetIntrTopNonStallMask(pGpu, pIntr) intrGetIntrTopNonStallMask_DISPATCH(pGpu, pIntr)
404 #define intrGetIntrTopNonStallMask_HAL(pGpu, pIntr) intrGetIntrTopNonStallMask_DISPATCH(pGpu, pIntr)
405 #define intrSanityCheckEngineIntrStallVector(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrStallVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
406 #define intrSanityCheckEngineIntrStallVector_HAL(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrStallVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
407 #define intrSanityCheckEngineIntrNotificationVector(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrNotificationVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
408 #define intrSanityCheckEngineIntrNotificationVector_HAL(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrNotificationVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
409 #define intrStateLoad(pGpu, pIntr, arg0) intrStateLoad_DISPATCH(pGpu, pIntr, arg0)
410 #define intrStateLoad_HAL(pGpu, pIntr, arg0) intrStateLoad_DISPATCH(pGpu, pIntr, arg0)
411 #define intrStateUnload(pGpu, pIntr, arg0) intrStateUnload_DISPATCH(pGpu, pIntr, arg0)
412 #define intrStateUnload_HAL(pGpu, pIntr, arg0) intrStateUnload_DISPATCH(pGpu, pIntr, arg0)
413 #define intrSetIntrMask(pGpu, pIntr, arg0, arg1) intrSetIntrMask_DISPATCH(pGpu, pIntr, arg0, arg1)
414 #define intrSetIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrSetIntrMask_DISPATCH(pGpu, pIntr, arg0, arg1)
415 #define intrSetIntrEnInHw(pGpu, pIntr, arg0, arg1) intrSetIntrEnInHw_DISPATCH(pGpu, pIntr, arg0, arg1)
416 #define intrSetIntrEnInHw_HAL(pGpu, pIntr, arg0, arg1) intrSetIntrEnInHw_DISPATCH(pGpu, pIntr, arg0, arg1)
417 #define intrGetIntrEnFromHw(pGpu, pIntr, arg0) intrGetIntrEnFromHw_DISPATCH(pGpu, pIntr, arg0)
418 #define intrGetIntrEnFromHw_HAL(pGpu, pIntr, arg0) intrGetIntrEnFromHw_DISPATCH(pGpu, pIntr, arg0)
419 #define intrStatePreLoad(pGpu, pEngstate, arg0) intrStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
420 #define intrStatePostUnload(pGpu, pEngstate, arg0) intrStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
421 #define intrStatePreUnload(pGpu, pEngstate, arg0) intrStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
422 #define intrInitMissing(pGpu, pEngstate) intrInitMissing_DISPATCH(pGpu, pEngstate)
423 #define intrStatePreInitLocked(pGpu, pEngstate) intrStatePreInitLocked_DISPATCH(pGpu, pEngstate)
424 #define intrStatePreInitUnlocked(pGpu, pEngstate) intrStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
425 #define intrStatePostLoad(pGpu, pEngstate, arg0) intrStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
426 #define intrIsPresent(pGpu, pEngstate) intrIsPresent_DISPATCH(pGpu, pEngstate)
427 NV_STATUS intrCheckFecsEventbufferPending_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool *arg1);
428 
429 
430 #ifdef __nvoc_intr_h_disabled
431 static inline NV_STATUS intrCheckFecsEventbufferPending(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool *arg1) {
432     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
433     return NV_ERR_NOT_SUPPORTED;
434 }
435 #else //__nvoc_intr_h_disabled
436 #define intrCheckFecsEventbufferPending(pGpu, pIntr, arg0, arg1) intrCheckFecsEventbufferPending_IMPL(pGpu, pIntr, arg0, arg1)
437 #endif //__nvoc_intr_h_disabled
438 
439 #define intrCheckFecsEventbufferPending_HAL(pGpu, pIntr, arg0, arg1) intrCheckFecsEventbufferPending(pGpu, pIntr, arg0, arg1)
440 
441 NV_STATUS intrCheckAndServiceFecsEventbuffer_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
442 
443 
444 #ifdef __nvoc_intr_h_disabled
445 static inline NV_STATUS intrCheckAndServiceFecsEventbuffer(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
446     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
447     return NV_ERR_NOT_SUPPORTED;
448 }
449 #else //__nvoc_intr_h_disabled
450 #define intrCheckAndServiceFecsEventbuffer(pGpu, pIntr, arg0, arg1) intrCheckAndServiceFecsEventbuffer_IMPL(pGpu, pIntr, arg0, arg1)
451 #endif //__nvoc_intr_h_disabled
452 
453 #define intrCheckAndServiceFecsEventbuffer_HAL(pGpu, pIntr, arg0, arg1) intrCheckAndServiceFecsEventbuffer(pGpu, pIntr, arg0, arg1)
454 
455 static inline NV_STATUS intrStateDestroyPhysical_56cd7a(OBJGPU *pGpu, struct Intr *pIntr) {
456     return NV_OK;
457 }
458 
459 NV_STATUS intrStateDestroyPhysical_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
460 
461 
462 #ifdef __nvoc_intr_h_disabled
463 static inline NV_STATUS intrStateDestroyPhysical(OBJGPU *pGpu, struct Intr *pIntr) {
464     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
465     return NV_ERR_NOT_SUPPORTED;
466 }
467 #else //__nvoc_intr_h_disabled
468 #define intrStateDestroyPhysical(pGpu, pIntr) intrStateDestroyPhysical_56cd7a(pGpu, pIntr)
469 #endif //__nvoc_intr_h_disabled
470 
471 #define intrStateDestroyPhysical_HAL(pGpu, pIntr) intrStateDestroyPhysical(pGpu, pIntr)
472 
473 static inline void intrSetInterruptMaskBug1470153War_b3696a(OBJGPU *pGpu, struct Intr *pIntr) {
474     return;
475 }
476 
477 
478 #ifdef __nvoc_intr_h_disabled
479 static inline void intrSetInterruptMaskBug1470153War(OBJGPU *pGpu, struct Intr *pIntr) {
480     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
481 }
482 #else //__nvoc_intr_h_disabled
483 #define intrSetInterruptMaskBug1470153War(pGpu, pIntr) intrSetInterruptMaskBug1470153War_b3696a(pGpu, pIntr)
484 #endif //__nvoc_intr_h_disabled
485 
486 #define intrSetInterruptMaskBug1470153War_HAL(pGpu, pIntr) intrSetInterruptMaskBug1470153War(pGpu, pIntr)
487 
488 NV_STATUS intrGetPendingNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
489 
490 
491 #ifdef __nvoc_intr_h_disabled
492 static inline NV_STATUS intrGetPendingNonStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
493     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
494     return NV_ERR_NOT_SUPPORTED;
495 }
496 #else //__nvoc_intr_h_disabled
497 #define intrGetPendingNonStall(pGpu, pIntr, arg0, arg1) intrGetPendingNonStall_TU102(pGpu, pIntr, arg0, arg1)
498 #endif //__nvoc_intr_h_disabled
499 
500 #define intrGetPendingNonStall_HAL(pGpu, pIntr, arg0, arg1) intrGetPendingNonStall(pGpu, pIntr, arg0, arg1)
501 
502 NV_STATUS intrServiceNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
503 
504 
505 #ifdef __nvoc_intr_h_disabled
506 static inline NV_STATUS intrServiceNonStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
507     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
508     return NV_ERR_NOT_SUPPORTED;
509 }
510 #else //__nvoc_intr_h_disabled
511 #define intrServiceNonStall(pGpu, pIntr, arg0, arg1) intrServiceNonStall_TU102(pGpu, pIntr, arg0, arg1)
512 #endif //__nvoc_intr_h_disabled
513 
514 #define intrServiceNonStall_HAL(pGpu, pIntr, arg0, arg1) intrServiceNonStall(pGpu, pIntr, arg0, arg1)
515 
516 NvU32 intrGetNonStallEnable_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
517 
518 
519 #ifdef __nvoc_intr_h_disabled
520 static inline NvU32 intrGetNonStallEnable(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
521     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
522     return 0;
523 }
524 #else //__nvoc_intr_h_disabled
525 #define intrGetNonStallEnable(pGpu, pIntr, arg0) intrGetNonStallEnable_TU102(pGpu, pIntr, arg0)
526 #endif //__nvoc_intr_h_disabled
527 
528 #define intrGetNonStallEnable_HAL(pGpu, pIntr, arg0) intrGetNonStallEnable(pGpu, pIntr, arg0)
529 
530 void intrDisableNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
531 
532 
533 #ifdef __nvoc_intr_h_disabled
534 static inline void intrDisableNonStall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
535     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
536 }
537 #else //__nvoc_intr_h_disabled
538 #define intrDisableNonStall(pGpu, pIntr, arg0) intrDisableNonStall_TU102(pGpu, pIntr, arg0)
539 #endif //__nvoc_intr_h_disabled
540 
541 #define intrDisableNonStall_HAL(pGpu, pIntr, arg0) intrDisableNonStall(pGpu, pIntr, arg0)
542 
543 void intrRestoreNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
544 
545 
546 #ifdef __nvoc_intr_h_disabled
547 static inline void intrRestoreNonStall(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
548     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
549 }
550 #else //__nvoc_intr_h_disabled
551 #define intrRestoreNonStall(pGpu, pIntr, arg0, arg1) intrRestoreNonStall_TU102(pGpu, pIntr, arg0, arg1)
552 #endif //__nvoc_intr_h_disabled
553 
554 #define intrRestoreNonStall_HAL(pGpu, pIntr, arg0, arg1) intrRestoreNonStall(pGpu, pIntr, arg0, arg1)
555 
556 void intrGetStallInterruptMode_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *pIntrmode, NvBool *pPending);
557 
558 
559 #ifdef __nvoc_intr_h_disabled
560 static inline void intrGetStallInterruptMode(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *pIntrmode, NvBool *pPending) {
561     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
562 }
563 #else //__nvoc_intr_h_disabled
564 #define intrGetStallInterruptMode(pGpu, pIntr, pIntrmode, pPending) intrGetStallInterruptMode_TU102(pGpu, pIntr, pIntrmode, pPending)
565 #endif //__nvoc_intr_h_disabled
566 
567 #define intrGetStallInterruptMode_HAL(pGpu, pIntr, pIntrmode, pPending) intrGetStallInterruptMode(pGpu, pIntr, pIntrmode, pPending)
568 
569 void intrEncodeStallIntrEn_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrEn, NvU32 *pIntrEnSet, NvU32 *pIntrEnClear);
570 
571 
572 #ifdef __nvoc_intr_h_disabled
573 static inline void intrEncodeStallIntrEn(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrEn, NvU32 *pIntrEnSet, NvU32 *pIntrEnClear) {
574     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
575 }
576 #else //__nvoc_intr_h_disabled
577 #define intrEncodeStallIntrEn(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear) intrEncodeStallIntrEn_GP100(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear)
578 #endif //__nvoc_intr_h_disabled
579 
580 #define intrEncodeStallIntrEn_HAL(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear) intrEncodeStallIntrEn(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear)
581 
582 NV_STATUS intrCheckAndServiceNonReplayableFault_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
583 
584 
585 #ifdef __nvoc_intr_h_disabled
586 static inline NV_STATUS intrCheckAndServiceNonReplayableFault(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
587     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
588     return NV_ERR_NOT_SUPPORTED;
589 }
590 #else //__nvoc_intr_h_disabled
591 #define intrCheckAndServiceNonReplayableFault(pGpu, pIntr, arg0) intrCheckAndServiceNonReplayableFault_TU102(pGpu, pIntr, arg0)
592 #endif //__nvoc_intr_h_disabled
593 
594 #define intrCheckAndServiceNonReplayableFault_HAL(pGpu, pIntr, arg0) intrCheckAndServiceNonReplayableFault(pGpu, pIntr, arg0)
595 
596 static inline NvU32 intrGetStallBaseVector_4a4dee(OBJGPU *pGpu, struct Intr *pIntr) {
597     return 0;
598 }
599 
600 NvU32 intrGetStallBaseVector_TU102(OBJGPU *pGpu, struct Intr *pIntr);
601 
602 static inline NvU32 intrGetStallBaseVector_c067f9(OBJGPU *pGpu, struct Intr *pIntr) {
603     NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
604 }
605 
606 
607 #ifdef __nvoc_intr_h_disabled
608 static inline NvU32 intrGetStallBaseVector(OBJGPU *pGpu, struct Intr *pIntr) {
609     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
610     return 0;
611 }
612 #else //__nvoc_intr_h_disabled
613 #define intrGetStallBaseVector(pGpu, pIntr) intrGetStallBaseVector_4a4dee(pGpu, pIntr)
614 #endif //__nvoc_intr_h_disabled
615 
616 #define intrGetStallBaseVector_HAL(pGpu, pIntr) intrGetStallBaseVector(pGpu, pIntr)
617 
618 void intrEnableLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector);
619 
620 
621 #ifdef __nvoc_intr_h_disabled
622 static inline void intrEnableLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector) {
623     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
624 }
625 #else //__nvoc_intr_h_disabled
626 #define intrEnableLeaf(pGpu, pIntr, intrVector) intrEnableLeaf_TU102(pGpu, pIntr, intrVector)
627 #endif //__nvoc_intr_h_disabled
628 
629 #define intrEnableLeaf_HAL(pGpu, pIntr, intrVector) intrEnableLeaf(pGpu, pIntr, intrVector)
630 
631 void intrDisableLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector);
632 
633 
634 #ifdef __nvoc_intr_h_disabled
635 static inline void intrDisableLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector) {
636     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
637 }
638 #else //__nvoc_intr_h_disabled
639 #define intrDisableLeaf(pGpu, pIntr, intrVector) intrDisableLeaf_TU102(pGpu, pIntr, intrVector)
640 #endif //__nvoc_intr_h_disabled
641 
642 #define intrDisableLeaf_HAL(pGpu, pIntr, intrVector) intrDisableLeaf(pGpu, pIntr, intrVector)
643 
644 void intrEnableTopNonstall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState);
645 
646 
647 #ifdef __nvoc_intr_h_disabled
648 static inline void intrEnableTopNonstall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState) {
649     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
650 }
651 #else //__nvoc_intr_h_disabled
652 #define intrEnableTopNonstall(pGpu, pIntr, pThreadState) intrEnableTopNonstall_TU102(pGpu, pIntr, pThreadState)
653 #endif //__nvoc_intr_h_disabled
654 
655 #define intrEnableTopNonstall_HAL(pGpu, pIntr, pThreadState) intrEnableTopNonstall(pGpu, pIntr, pThreadState)
656 
657 void intrDisableTopNonstall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState);
658 
659 
660 #ifdef __nvoc_intr_h_disabled
661 static inline void intrDisableTopNonstall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState) {
662     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
663 }
664 #else //__nvoc_intr_h_disabled
665 #define intrDisableTopNonstall(pGpu, pIntr, pThreadState) intrDisableTopNonstall_TU102(pGpu, pIntr, pThreadState)
666 #endif //__nvoc_intr_h_disabled
667 
668 #define intrDisableTopNonstall_HAL(pGpu, pIntr, pThreadState) intrDisableTopNonstall(pGpu, pIntr, pThreadState)
669 
670 void intrSetStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrType, struct THREAD_STATE_NODE *pThreadState);
671 
672 
673 #ifdef __nvoc_intr_h_disabled
674 static inline void intrSetStall(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrType, struct THREAD_STATE_NODE *pThreadState) {
675     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
676 }
677 #else //__nvoc_intr_h_disabled
678 #define intrSetStall(pGpu, pIntr, intrType, pThreadState) intrSetStall_TU102(pGpu, pIntr, intrType, pThreadState)
679 #endif //__nvoc_intr_h_disabled
680 
681 #define intrSetStall_HAL(pGpu, pIntr, intrType, pThreadState) intrSetStall(pGpu, pIntr, intrType, pThreadState)
682 
683 void intrClearLeafVector_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
684 
685 
686 #ifdef __nvoc_intr_h_disabled
687 static inline void intrClearLeafVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
688     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
689 }
690 #else //__nvoc_intr_h_disabled
691 #define intrClearLeafVector(pGpu, pIntr, vector, pThreadState) intrClearLeafVector_TU102(pGpu, pIntr, vector, pThreadState)
692 #endif //__nvoc_intr_h_disabled
693 
694 #define intrClearLeafVector_HAL(pGpu, pIntr, vector, pThreadState) intrClearLeafVector(pGpu, pIntr, vector, pThreadState)
695 
696 static inline void intrClearCpuLeafVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
697     return;
698 }
699 
700 void intrClearCpuLeafVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
701 
702 
703 #ifdef __nvoc_intr_h_disabled
704 static inline void intrClearCpuLeafVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
705     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
706 }
707 #else //__nvoc_intr_h_disabled
708 #define intrClearCpuLeafVector(pGpu, pIntr, vector, pThreadState) intrClearCpuLeafVector_b3696a(pGpu, pIntr, vector, pThreadState)
709 #endif //__nvoc_intr_h_disabled
710 
711 #define intrClearCpuLeafVector_HAL(pGpu, pIntr, vector, pThreadState) intrClearCpuLeafVector(pGpu, pIntr, vector, pThreadState)
712 
713 static inline void intrWriteCpuRegLeaf_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
714     return;
715 }
716 
717 void intrWriteCpuRegLeaf_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
718 
719 
720 #ifdef __nvoc_intr_h_disabled
721 static inline void intrWriteCpuRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
722     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
723 }
724 #else //__nvoc_intr_h_disabled
725 #define intrWriteCpuRegLeaf(pGpu, pIntr, arg0, arg1, arg2) intrWriteCpuRegLeaf_b3696a(pGpu, pIntr, arg0, arg1, arg2)
726 #endif //__nvoc_intr_h_disabled
727 
728 #define intrWriteCpuRegLeaf_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteCpuRegLeaf(pGpu, pIntr, arg0, arg1, arg2)
729 
730 NvBool intrIsVectorPending_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
731 
732 
733 #ifdef __nvoc_intr_h_disabled
734 static inline NvBool intrIsVectorPending(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
735     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
736     return NV_FALSE;
737 }
738 #else //__nvoc_intr_h_disabled
739 #define intrIsVectorPending(pGpu, pIntr, vector, pThreadState) intrIsVectorPending_TU102(pGpu, pIntr, vector, pThreadState)
740 #endif //__nvoc_intr_h_disabled
741 
742 #define intrIsVectorPending_HAL(pGpu, pIntr, vector, pThreadState) intrIsVectorPending(pGpu, pIntr, vector, pThreadState)
743 
744 NV_STATUS intrSetStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
745 
746 
747 #ifdef __nvoc_intr_h_disabled
748 static inline NV_STATUS intrSetStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
749     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
750     return NV_ERR_NOT_SUPPORTED;
751 }
752 #else //__nvoc_intr_h_disabled
753 #define intrSetStallSWIntr(pGpu, pIntr) intrSetStallSWIntr_TU102(pGpu, pIntr)
754 #endif //__nvoc_intr_h_disabled
755 
756 #define intrSetStallSWIntr_HAL(pGpu, pIntr) intrSetStallSWIntr(pGpu, pIntr)
757 
758 NV_STATUS intrClearStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
759 
760 
761 #ifdef __nvoc_intr_h_disabled
762 static inline NV_STATUS intrClearStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
763     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
764     return NV_ERR_NOT_SUPPORTED;
765 }
766 #else //__nvoc_intr_h_disabled
767 #define intrClearStallSWIntr(pGpu, pIntr) intrClearStallSWIntr_TU102(pGpu, pIntr)
768 #endif //__nvoc_intr_h_disabled
769 
770 #define intrClearStallSWIntr_HAL(pGpu, pIntr) intrClearStallSWIntr(pGpu, pIntr)
771 
772 void intrEnableStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
773 
774 
775 #ifdef __nvoc_intr_h_disabled
776 static inline void intrEnableStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
777     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
778 }
779 #else //__nvoc_intr_h_disabled
780 #define intrEnableStallSWIntr(pGpu, pIntr) intrEnableStallSWIntr_TU102(pGpu, pIntr)
781 #endif //__nvoc_intr_h_disabled
782 
783 #define intrEnableStallSWIntr_HAL(pGpu, pIntr) intrEnableStallSWIntr(pGpu, pIntr)
784 
785 void intrDisableStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
786 
787 
788 #ifdef __nvoc_intr_h_disabled
789 static inline void intrDisableStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
790     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
791 }
792 #else //__nvoc_intr_h_disabled
793 #define intrDisableStallSWIntr(pGpu, pIntr) intrDisableStallSWIntr_TU102(pGpu, pIntr)
794 #endif //__nvoc_intr_h_disabled
795 
796 #define intrDisableStallSWIntr_HAL(pGpu, pIntr) intrDisableStallSWIntr(pGpu, pIntr)
797 
798 void intrServiceVirtual_TU102(OBJGPU *pGpu, struct Intr *pIntr);
799 
800 
801 #ifdef __nvoc_intr_h_disabled
802 static inline void intrServiceVirtual(OBJGPU *pGpu, struct Intr *pIntr) {
803     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
804 }
805 #else //__nvoc_intr_h_disabled
806 #define intrServiceVirtual(pGpu, pIntr) intrServiceVirtual_TU102(pGpu, pIntr)
807 #endif //__nvoc_intr_h_disabled
808 
809 #define intrServiceVirtual_HAL(pGpu, pIntr) intrServiceVirtual(pGpu, pIntr)
810 
811 static inline void intrResetIntrRegistersForVF_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
812     return;
813 }
814 
815 void intrResetIntrRegistersForVF_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
816 
817 
818 #ifdef __nvoc_intr_h_disabled
819 static inline void intrResetIntrRegistersForVF(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
820     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
821 }
822 #else //__nvoc_intr_h_disabled
823 #define intrResetIntrRegistersForVF(pGpu, pIntr, gfid) intrResetIntrRegistersForVF_b3696a(pGpu, pIntr, gfid)
824 #endif //__nvoc_intr_h_disabled
825 
826 #define intrResetIntrRegistersForVF_HAL(pGpu, pIntr, gfid) intrResetIntrRegistersForVF(pGpu, pIntr, gfid)
827 
828 static inline NV_STATUS intrSaveIntrRegValue_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2) {
829     return NV_ERR_NOT_SUPPORTED;
830 }
831 
832 NV_STATUS intrSaveIntrRegValue_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2);
833 
834 
835 #ifdef __nvoc_intr_h_disabled
836 static inline NV_STATUS intrSaveIntrRegValue(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2) {
837     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
838     return NV_ERR_NOT_SUPPORTED;
839 }
840 #else //__nvoc_intr_h_disabled
841 #define intrSaveIntrRegValue(pGpu, pIntr, arg0, arg1, arg2) intrSaveIntrRegValue_46f6a7(pGpu, pIntr, arg0, arg1, arg2)
842 #endif //__nvoc_intr_h_disabled
843 
844 #define intrSaveIntrRegValue_HAL(pGpu, pIntr, arg0, arg1, arg2) intrSaveIntrRegValue(pGpu, pIntr, arg0, arg1, arg2)
845 
846 static inline NV_STATUS intrRestoreIntrRegValue_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, NvU32 *arg2) {
847     return NV_ERR_NOT_SUPPORTED;
848 }
849 
850 NV_STATUS intrRestoreIntrRegValue_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, NvU32 *arg2);
851 
852 
853 #ifdef __nvoc_intr_h_disabled
854 static inline NV_STATUS intrRestoreIntrRegValue(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, NvU32 *arg2) {
855     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
856     return NV_ERR_NOT_SUPPORTED;
857 }
858 #else //__nvoc_intr_h_disabled
859 #define intrRestoreIntrRegValue(pGpu, pIntr, arg0, arg1, arg2) intrRestoreIntrRegValue_46f6a7(pGpu, pIntr, arg0, arg1, arg2)
860 #endif //__nvoc_intr_h_disabled
861 
862 #define intrRestoreIntrRegValue_HAL(pGpu, pIntr, arg0, arg1, arg2) intrRestoreIntrRegValue(pGpu, pIntr, arg0, arg1, arg2)
863 
864 static inline NV_STATUS intrTriggerCpuDoorbellForVF_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
865     return NV_ERR_NOT_SUPPORTED;
866 }
867 
868 NV_STATUS intrTriggerCpuDoorbellForVF_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
869 
870 
871 #ifdef __nvoc_intr_h_disabled
872 static inline NV_STATUS intrTriggerCpuDoorbellForVF(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
873     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
874     return NV_ERR_NOT_SUPPORTED;
875 }
876 #else //__nvoc_intr_h_disabled
877 #define intrTriggerCpuDoorbellForVF(pGpu, pIntr, gfid) intrTriggerCpuDoorbellForVF_46f6a7(pGpu, pIntr, gfid)
878 #endif //__nvoc_intr_h_disabled
879 
880 #define intrTriggerCpuDoorbellForVF_HAL(pGpu, pIntr, gfid) intrTriggerCpuDoorbellForVF(pGpu, pIntr, gfid)
881 
882 NV_STATUS intrTriggerPrivDoorbell_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
883 
884 
885 #ifdef __nvoc_intr_h_disabled
886 static inline NV_STATUS intrTriggerPrivDoorbell(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
887     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
888     return NV_ERR_NOT_SUPPORTED;
889 }
890 #else //__nvoc_intr_h_disabled
891 #define intrTriggerPrivDoorbell(pGpu, pIntr, gfid) intrTriggerPrivDoorbell_TU102(pGpu, pIntr, gfid)
892 #endif //__nvoc_intr_h_disabled
893 
894 #define intrTriggerPrivDoorbell_HAL(pGpu, pIntr, gfid) intrTriggerPrivDoorbell(pGpu, pIntr, gfid)
895 
896 void intrRetriggerTopLevel_TU102(OBJGPU *pGpu, struct Intr *pIntr);
897 
898 
899 #ifdef __nvoc_intr_h_disabled
900 static inline void intrRetriggerTopLevel(OBJGPU *pGpu, struct Intr *pIntr) {
901     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
902 }
903 #else //__nvoc_intr_h_disabled
904 #define intrRetriggerTopLevel(pGpu, pIntr) intrRetriggerTopLevel_TU102(pGpu, pIntr)
905 #endif //__nvoc_intr_h_disabled
906 
907 #define intrRetriggerTopLevel_HAL(pGpu, pIntr) intrRetriggerTopLevel(pGpu, pIntr)
908 
909 NV_STATUS intrGetLeafStatus_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, struct THREAD_STATE_NODE *arg1);
910 
911 
912 #ifdef __nvoc_intr_h_disabled
913 static inline NV_STATUS intrGetLeafStatus(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, struct THREAD_STATE_NODE *arg1) {
914     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
915     return NV_ERR_NOT_SUPPORTED;
916 }
917 #else //__nvoc_intr_h_disabled
918 #define intrGetLeafStatus(pGpu, pIntr, arg0, arg1) intrGetLeafStatus_TU102(pGpu, pIntr, arg0, arg1)
919 #endif //__nvoc_intr_h_disabled
920 
921 #define intrGetLeafStatus_HAL(pGpu, pIntr, arg0, arg1) intrGetLeafStatus(pGpu, pIntr, arg0, arg1)
922 
923 NV_STATUS intrGetPendingDisplayIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines, struct THREAD_STATE_NODE *pThreadState);
924 
925 
926 #ifdef __nvoc_intr_h_disabled
927 static inline NV_STATUS intrGetPendingDisplayIntr(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines, struct THREAD_STATE_NODE *pThreadState) {
928     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
929     return NV_ERR_NOT_SUPPORTED;
930 }
931 #else //__nvoc_intr_h_disabled
932 #define intrGetPendingDisplayIntr(pGpu, pIntr, pEngines, pThreadState) intrGetPendingDisplayIntr_TU102(pGpu, pIntr, pEngines, pThreadState)
933 #endif //__nvoc_intr_h_disabled
934 
935 #define intrGetPendingDisplayIntr_HAL(pGpu, pIntr, pEngines, pThreadState) intrGetPendingDisplayIntr(pGpu, pIntr, pEngines, pThreadState)
936 
937 void intrDumpState_TU102(OBJGPU *pGpu, struct Intr *pIntr);
938 
939 
940 #ifdef __nvoc_intr_h_disabled
941 static inline void intrDumpState(OBJGPU *pGpu, struct Intr *pIntr) {
942     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
943 }
944 #else //__nvoc_intr_h_disabled
945 #define intrDumpState(pGpu, pIntr) intrDumpState_TU102(pGpu, pIntr)
946 #endif //__nvoc_intr_h_disabled
947 
948 #define intrDumpState_HAL(pGpu, pIntr) intrDumpState(pGpu, pIntr)
949 
950 NV_STATUS intrCacheIntrFields_TU102(OBJGPU *pGpu, struct Intr *pIntr);
951 
952 
953 #ifdef __nvoc_intr_h_disabled
954 static inline NV_STATUS intrCacheIntrFields(OBJGPU *pGpu, struct Intr *pIntr) {
955     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
956     return NV_ERR_NOT_SUPPORTED;
957 }
958 #else //__nvoc_intr_h_disabled
959 #define intrCacheIntrFields(pGpu, pIntr) intrCacheIntrFields_TU102(pGpu, pIntr)
960 #endif //__nvoc_intr_h_disabled
961 
962 #define intrCacheIntrFields_HAL(pGpu, pIntr) intrCacheIntrFields(pGpu, pIntr)
963 
964 NvU32 intrReadRegLeafEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
965 
966 
967 #ifdef __nvoc_intr_h_disabled
968 static inline NvU32 intrReadRegLeafEnSet(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
969     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
970     return 0;
971 }
972 #else //__nvoc_intr_h_disabled
973 #define intrReadRegLeafEnSet(pGpu, pIntr, arg0, arg1) intrReadRegLeafEnSet_TU102(pGpu, pIntr, arg0, arg1)
974 #endif //__nvoc_intr_h_disabled
975 
976 #define intrReadRegLeafEnSet_HAL(pGpu, pIntr, arg0, arg1) intrReadRegLeafEnSet(pGpu, pIntr, arg0, arg1)
977 
978 NvU32 intrReadRegLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
979 
980 
981 #ifdef __nvoc_intr_h_disabled
982 static inline NvU32 intrReadRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
983     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
984     return 0;
985 }
986 #else //__nvoc_intr_h_disabled
987 #define intrReadRegLeaf(pGpu, pIntr, arg0, arg1) intrReadRegLeaf_TU102(pGpu, pIntr, arg0, arg1)
988 #endif //__nvoc_intr_h_disabled
989 
990 #define intrReadRegLeaf_HAL(pGpu, pIntr, arg0, arg1) intrReadRegLeaf(pGpu, pIntr, arg0, arg1)
991 
992 NvU32 intrReadRegTop_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
993 
994 
995 #ifdef __nvoc_intr_h_disabled
996 static inline NvU32 intrReadRegTop(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
997     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
998     return 0;
999 }
1000 #else //__nvoc_intr_h_disabled
1001 #define intrReadRegTop(pGpu, pIntr, arg0, arg1) intrReadRegTop_TU102(pGpu, pIntr, arg0, arg1)
1002 #endif //__nvoc_intr_h_disabled
1003 
1004 #define intrReadRegTop_HAL(pGpu, pIntr, arg0, arg1) intrReadRegTop(pGpu, pIntr, arg0, arg1)
1005 
1006 void intrWriteRegLeafEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1007 
1008 
1009 #ifdef __nvoc_intr_h_disabled
1010 static inline void intrWriteRegLeafEnSet(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1011     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1012 }
1013 #else //__nvoc_intr_h_disabled
1014 #define intrWriteRegLeafEnSet(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnSet_TU102(pGpu, pIntr, arg0, arg1, arg2)
1015 #endif //__nvoc_intr_h_disabled
1016 
1017 #define intrWriteRegLeafEnSet_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnSet(pGpu, pIntr, arg0, arg1, arg2)
1018 
1019 void intrWriteRegLeafEnClear_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1020 
1021 
1022 #ifdef __nvoc_intr_h_disabled
1023 static inline void intrWriteRegLeafEnClear(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1024     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1025 }
1026 #else //__nvoc_intr_h_disabled
1027 #define intrWriteRegLeafEnClear(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnClear_TU102(pGpu, pIntr, arg0, arg1, arg2)
1028 #endif //__nvoc_intr_h_disabled
1029 
1030 #define intrWriteRegLeafEnClear_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnClear(pGpu, pIntr, arg0, arg1, arg2)
1031 
1032 void intrWriteRegLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1033 
1034 
1035 #ifdef __nvoc_intr_h_disabled
1036 static inline void intrWriteRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1037     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1038 }
1039 #else //__nvoc_intr_h_disabled
1040 #define intrWriteRegLeaf(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeaf_TU102(pGpu, pIntr, arg0, arg1, arg2)
1041 #endif //__nvoc_intr_h_disabled
1042 
1043 #define intrWriteRegLeaf_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeaf(pGpu, pIntr, arg0, arg1, arg2)
1044 
1045 static inline void intrRouteInterruptsToSystemFirmware_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable) {
1046     return;
1047 }
1048 
1049 void intrRouteInterruptsToSystemFirmware_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable);
1050 
1051 
1052 #ifdef __nvoc_intr_h_disabled
1053 static inline void intrRouteInterruptsToSystemFirmware(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable) {
1054     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1055 }
1056 #else //__nvoc_intr_h_disabled
1057 #define intrRouteInterruptsToSystemFirmware(pGpu, pIntr, bEnable) intrRouteInterruptsToSystemFirmware_b3696a(pGpu, pIntr, bEnable)
1058 #endif //__nvoc_intr_h_disabled
1059 
1060 #define intrRouteInterruptsToSystemFirmware_HAL(pGpu, pIntr, bEnable) intrRouteInterruptsToSystemFirmware(pGpu, pIntr, bEnable)
1061 
1062 static inline NV_STATUS intrInitDynamicInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, struct OBJFIFO *arg0, InterruptTable *arg1, NvU32 initFlags) {
1063     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1064 }
1065 
1066 
1067 #ifdef __nvoc_intr_h_disabled
1068 static inline NV_STATUS intrInitDynamicInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, struct OBJFIFO *arg0, InterruptTable *arg1, NvU32 initFlags) {
1069     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1070     return NV_ERR_NOT_SUPPORTED;
1071 }
1072 #else //__nvoc_intr_h_disabled
1073 #define intrInitDynamicInterruptTable(pGpu, pIntr, arg0, arg1, initFlags) intrInitDynamicInterruptTable_5baef9(pGpu, pIntr, arg0, arg1, initFlags)
1074 #endif //__nvoc_intr_h_disabled
1075 
1076 #define intrInitDynamicInterruptTable_HAL(pGpu, pIntr, arg0, arg1, initFlags) intrInitDynamicInterruptTable(pGpu, pIntr, arg0, arg1, initFlags)
1077 
1078 static inline NV_STATUS intrInitAnyInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1079     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1080 }
1081 
1082 
1083 #ifdef __nvoc_intr_h_disabled
1084 static inline NV_STATUS intrInitAnyInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1085     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1086     return NV_ERR_NOT_SUPPORTED;
1087 }
1088 #else //__nvoc_intr_h_disabled
1089 #define intrInitAnyInterruptTable(pGpu, pIntr, pIntrTable, initFlags) intrInitAnyInterruptTable_5baef9(pGpu, pIntr, pIntrTable, initFlags)
1090 #endif //__nvoc_intr_h_disabled
1091 
1092 #define intrInitAnyInterruptTable_HAL(pGpu, pIntr, pIntrTable, initFlags) intrInitAnyInterruptTable(pGpu, pIntr, pIntrTable, initFlags)
1093 
1094 static inline NV_STATUS intrInitSubtreeMap_395e98(OBJGPU *pGpu, struct Intr *pIntr) {
1095     return NV_ERR_NOT_SUPPORTED;
1096 }
1097 
1098 NV_STATUS intrInitSubtreeMap_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1099 
1100 NV_STATUS intrInitSubtreeMap_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1101 
1102 
1103 #ifdef __nvoc_intr_h_disabled
1104 static inline NV_STATUS intrInitSubtreeMap(OBJGPU *pGpu, struct Intr *pIntr) {
1105     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1106     return NV_ERR_NOT_SUPPORTED;
1107 }
1108 #else //__nvoc_intr_h_disabled
1109 #define intrInitSubtreeMap(pGpu, pIntr) intrInitSubtreeMap_395e98(pGpu, pIntr)
1110 #endif //__nvoc_intr_h_disabled
1111 
1112 #define intrInitSubtreeMap_HAL(pGpu, pIntr) intrInitSubtreeMap(pGpu, pIntr)
1113 
1114 NV_STATUS intrInitInterruptTable_KERNEL(OBJGPU *pGpu, struct Intr *pIntr);
1115 
1116 
1117 #ifdef __nvoc_intr_h_disabled
1118 static inline NV_STATUS intrInitInterruptTable(OBJGPU *pGpu, struct Intr *pIntr) {
1119     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1120     return NV_ERR_NOT_SUPPORTED;
1121 }
1122 #else //__nvoc_intr_h_disabled
1123 #define intrInitInterruptTable(pGpu, pIntr) intrInitInterruptTable_KERNEL(pGpu, pIntr)
1124 #endif //__nvoc_intr_h_disabled
1125 
1126 #define intrInitInterruptTable_HAL(pGpu, pIntr) intrInitInterruptTable(pGpu, pIntr)
1127 
1128 NV_STATUS intrGetInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable **ppIntrTable);
1129 
1130 
1131 #ifdef __nvoc_intr_h_disabled
1132 static inline NV_STATUS intrGetInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable **ppIntrTable) {
1133     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1134     return NV_ERR_NOT_SUPPORTED;
1135 }
1136 #else //__nvoc_intr_h_disabled
1137 #define intrGetInterruptTable(pGpu, pIntr, ppIntrTable) intrGetInterruptTable_IMPL(pGpu, pIntr, ppIntrTable)
1138 #endif //__nvoc_intr_h_disabled
1139 
1140 #define intrGetInterruptTable_HAL(pGpu, pIntr, ppIntrTable) intrGetInterruptTable(pGpu, pIntr, ppIntrTable)
1141 
1142 NV_STATUS intrDestroyInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1143 
1144 
1145 #ifdef __nvoc_intr_h_disabled
1146 static inline NV_STATUS intrDestroyInterruptTable(OBJGPU *pGpu, struct Intr *pIntr) {
1147     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1148     return NV_ERR_NOT_SUPPORTED;
1149 }
1150 #else //__nvoc_intr_h_disabled
1151 #define intrDestroyInterruptTable(pGpu, pIntr) intrDestroyInterruptTable_IMPL(pGpu, pIntr)
1152 #endif //__nvoc_intr_h_disabled
1153 
1154 #define intrDestroyInterruptTable_HAL(pGpu, pIntr) intrDestroyInterruptTable(pGpu, pIntr)
1155 
1156 static inline NV_STATUS intrGetStaticVFmcEngines_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount) {
1157     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1158 }
1159 
1160 NV_STATUS intrGetStaticVFmcEngines_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount);
1161 
1162 NV_STATUS intrGetStaticVFmcEngines_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount);
1163 
1164 
1165 #ifdef __nvoc_intr_h_disabled
1166 static inline NV_STATUS intrGetStaticVFmcEngines(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount) {
1167     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1168     return NV_ERR_NOT_SUPPORTED;
1169 }
1170 #else //__nvoc_intr_h_disabled
1171 #define intrGetStaticVFmcEngines(pGpu, pIntr, ppMcEngines, pCount) intrGetStaticVFmcEngines_5baef9(pGpu, pIntr, ppMcEngines, pCount)
1172 #endif //__nvoc_intr_h_disabled
1173 
1174 #define intrGetStaticVFmcEngines_HAL(pGpu, pIntr, ppMcEngines, pCount) intrGetStaticVFmcEngines(pGpu, pIntr, ppMcEngines, pCount)
1175 
1176 static inline NV_STATUS intrGetStaticInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags) {
1177     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1178 }
1179 
1180 NV_STATUS intrGetStaticInterruptTable_TU102(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1181 
1182 NV_STATUS intrGetStaticInterruptTable_GA100(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1183 
1184 NV_STATUS intrGetStaticInterruptTable_GA102(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1185 
1186 NV_STATUS intrGetStaticInterruptTable_GH100(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags);
1187 
1188 
1189 #ifdef __nvoc_intr_h_disabled
1190 static inline NV_STATUS intrGetStaticInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pTable, NvU32 initFlags) {
1191     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1192     return NV_ERR_NOT_SUPPORTED;
1193 }
1194 #else //__nvoc_intr_h_disabled
1195 #define intrGetStaticInterruptTable(pGpu, pIntr, pTable, initFlags) intrGetStaticInterruptTable_5baef9(pGpu, pIntr, pTable, initFlags)
1196 #endif //__nvoc_intr_h_disabled
1197 
1198 #define intrGetStaticInterruptTable_HAL(pGpu, pIntr, pTable, initFlags) intrGetStaticInterruptTable(pGpu, pIntr, pTable, initFlags)
1199 
1200 static inline NV_STATUS intrInitGPUHostInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1201     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1202 }
1203 
1204 NV_STATUS intrInitGPUHostInterruptTable_GM107(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags);
1205 
1206 NV_STATUS intrInitGPUHostInterruptTable_GA100(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags);
1207 
1208 
1209 #ifdef __nvoc_intr_h_disabled
1210 static inline NV_STATUS intrInitGPUHostInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable, NvU32 initFlags) {
1211     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1212     return NV_ERR_NOT_SUPPORTED;
1213 }
1214 #else //__nvoc_intr_h_disabled
1215 #define intrInitGPUHostInterruptTable(pGpu, pIntr, pIntrTable, initFlags) intrInitGPUHostInterruptTable_5baef9(pGpu, pIntr, pIntrTable, initFlags)
1216 #endif //__nvoc_intr_h_disabled
1217 
1218 #define intrInitGPUHostInterruptTable_HAL(pGpu, pIntr, pIntrTable, initFlags) intrInitGPUHostInterruptTable(pGpu, pIntr, pIntrTable, initFlags)
1219 
1220 static inline NV_STATUS intrInitEngineSchedInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable) {
1221     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1222 }
1223 
1224 
1225 #ifdef __nvoc_intr_h_disabled
1226 static inline NV_STATUS intrInitEngineSchedInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, InterruptTable *pIntrTable) {
1227     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1228     return NV_ERR_NOT_SUPPORTED;
1229 }
1230 #else //__nvoc_intr_h_disabled
1231 #define intrInitEngineSchedInterruptTable(pGpu, pIntr, pIntrTable) intrInitEngineSchedInterruptTable_5baef9(pGpu, pIntr, pIntrTable)
1232 #endif //__nvoc_intr_h_disabled
1233 
1234 #define intrInitEngineSchedInterruptTable_HAL(pGpu, pIntr, pIntrTable) intrInitEngineSchedInterruptTable(pGpu, pIntr, pIntrTable)
1235 
1236 void intrServiceStall_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1237 
1238 
1239 #ifdef __nvoc_intr_h_disabled
1240 static inline void intrServiceStall(OBJGPU *pGpu, struct Intr *pIntr) {
1241     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1242 }
1243 #else //__nvoc_intr_h_disabled
1244 #define intrServiceStall(pGpu, pIntr) intrServiceStall_IMPL(pGpu, pIntr)
1245 #endif //__nvoc_intr_h_disabled
1246 
1247 #define intrServiceStall_HAL(pGpu, pIntr) intrServiceStall(pGpu, pIntr)
1248 
1249 void intrServiceStallList_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1);
1250 
1251 
1252 #ifdef __nvoc_intr_h_disabled
1253 static inline void intrServiceStallList(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1) {
1254     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1255 }
1256 #else //__nvoc_intr_h_disabled
1257 #define intrServiceStallList(pGpu, pIntr, arg0, arg1) intrServiceStallList_IMPL(pGpu, pIntr, arg0, arg1)
1258 #endif //__nvoc_intr_h_disabled
1259 
1260 #define intrServiceStallList_HAL(pGpu, pIntr, arg0, arg1) intrServiceStallList(pGpu, pIntr, arg0, arg1)
1261 
1262 void intrServiceStallSingle_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool arg1);
1263 
1264 
1265 #ifdef __nvoc_intr_h_disabled
1266 static inline void intrServiceStallSingle(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool arg1) {
1267     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1268 }
1269 #else //__nvoc_intr_h_disabled
1270 #define intrServiceStallSingle(pGpu, pIntr, arg0, arg1) intrServiceStallSingle_IMPL(pGpu, pIntr, arg0, arg1)
1271 #endif //__nvoc_intr_h_disabled
1272 
1273 #define intrServiceStallSingle_HAL(pGpu, pIntr, arg0, arg1) intrServiceStallSingle(pGpu, pIntr, arg0, arg1)
1274 
1275 void intrProcessDPCQueue_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1276 
1277 
1278 #ifdef __nvoc_intr_h_disabled
1279 static inline void intrProcessDPCQueue(OBJGPU *pGpu, struct Intr *pIntr) {
1280     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1281 }
1282 #else //__nvoc_intr_h_disabled
1283 #define intrProcessDPCQueue(pGpu, pIntr) intrProcessDPCQueue_IMPL(pGpu, pIntr)
1284 #endif //__nvoc_intr_h_disabled
1285 
1286 #define intrProcessDPCQueue_HAL(pGpu, pIntr) intrProcessDPCQueue(pGpu, pIntr)
1287 
1288 NV_STATUS intrGetIntrMask_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1289 
1290 
1291 #ifdef __nvoc_intr_h_disabled
1292 static inline NV_STATUS intrGetIntrMask(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1293     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1294     return NV_ERR_NOT_SUPPORTED;
1295 }
1296 #else //__nvoc_intr_h_disabled
1297 #define intrGetIntrMask(pGpu, pIntr, arg0, arg1) intrGetIntrMask_GP100(pGpu, pIntr, arg0, arg1)
1298 #endif //__nvoc_intr_h_disabled
1299 
1300 #define intrGetIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrGetIntrMask(pGpu, pIntr, arg0, arg1)
1301 
1302 static inline NV_STATUS intrGetEccIntrMaskOffset_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1303     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1304 }
1305 
1306 NV_STATUS intrGetEccIntrMaskOffset_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1);
1307 
1308 static inline NV_STATUS intrGetEccIntrMaskOffset_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1309     return NV_ERR_NOT_SUPPORTED;
1310 }
1311 
1312 
1313 #ifdef __nvoc_intr_h_disabled
1314 static inline NV_STATUS intrGetEccIntrMaskOffset(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1315     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1316     return NV_ERR_NOT_SUPPORTED;
1317 }
1318 #else //__nvoc_intr_h_disabled
1319 #define intrGetEccIntrMaskOffset(pGpu, pIntr, arg0, arg1) intrGetEccIntrMaskOffset_5baef9(pGpu, pIntr, arg0, arg1)
1320 #endif //__nvoc_intr_h_disabled
1321 
1322 #define intrGetEccIntrMaskOffset_HAL(pGpu, pIntr, arg0, arg1) intrGetEccIntrMaskOffset(pGpu, pIntr, arg0, arg1)
1323 
1324 static inline NV_STATUS intrGetNvlinkIntrMaskOffset_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1325     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1326 }
1327 
1328 NV_STATUS intrGetNvlinkIntrMaskOffset_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1);
1329 
1330 static inline NV_STATUS intrGetNvlinkIntrMaskOffset_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1331     return NV_ERR_NOT_SUPPORTED;
1332 }
1333 
1334 
1335 #ifdef __nvoc_intr_h_disabled
1336 static inline NV_STATUS intrGetNvlinkIntrMaskOffset(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1337     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1338     return NV_ERR_NOT_SUPPORTED;
1339 }
1340 #else //__nvoc_intr_h_disabled
1341 #define intrGetNvlinkIntrMaskOffset(pGpu, pIntr, arg0, arg1) intrGetNvlinkIntrMaskOffset_5baef9(pGpu, pIntr, arg0, arg1)
1342 #endif //__nvoc_intr_h_disabled
1343 
1344 #define intrGetNvlinkIntrMaskOffset_HAL(pGpu, pIntr, arg0, arg1) intrGetNvlinkIntrMaskOffset(pGpu, pIntr, arg0, arg1)
1345 
1346 static inline NV_STATUS intrGetEccVirtualFunctionIntrMask_5baef9(OBJGPU *pGpu, struct Intr *pIntr, struct Device *pDevice, NvU32 *arg0) {
1347     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1348 }
1349 
1350 NV_STATUS intrGetEccVirtualFunctionIntrMask_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct Device *pDevice, NvU32 *arg0);
1351 
1352 NV_STATUS intrGetEccVirtualFunctionIntrMask_GA100(OBJGPU *pGpu, struct Intr *pIntr, struct Device *pDevice, NvU32 *arg0);
1353 
1354 NV_STATUS intrGetEccVirtualFunctionIntrMask_GH100(OBJGPU *pGpu, struct Intr *pIntr, struct Device *pDevice, NvU32 *arg0);
1355 
1356 
1357 #ifdef __nvoc_intr_h_disabled
1358 static inline NV_STATUS intrGetEccVirtualFunctionIntrMask(OBJGPU *pGpu, struct Intr *pIntr, struct Device *pDevice, NvU32 *arg0) {
1359     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1360     return NV_ERR_NOT_SUPPORTED;
1361 }
1362 #else //__nvoc_intr_h_disabled
1363 #define intrGetEccVirtualFunctionIntrMask(pGpu, pIntr, pDevice, arg0) intrGetEccVirtualFunctionIntrMask_5baef9(pGpu, pIntr, pDevice, arg0)
1364 #endif //__nvoc_intr_h_disabled
1365 
1366 #define intrGetEccVirtualFunctionIntrMask_HAL(pGpu, pIntr, pDevice, arg0) intrGetEccVirtualFunctionIntrMask(pGpu, pIntr, pDevice, arg0)
1367 
1368 static inline NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1) {
1369     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1370 }
1371 
1372 NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1);
1373 
1374 NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1);
1375 
1376 NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1);
1377 
1378 
1379 #ifdef __nvoc_intr_h_disabled
1380 static inline NV_STATUS intrGetNvlinkVirtualFunctionIntrMask(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1) {
1381     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1382     return NV_ERR_NOT_SUPPORTED;
1383 }
1384 #else //__nvoc_intr_h_disabled
1385 #define intrGetNvlinkVirtualFunctionIntrMask(pGpu, pIntr, arg0, arg1) intrGetNvlinkVirtualFunctionIntrMask_5baef9(pGpu, pIntr, arg0, arg1)
1386 #endif //__nvoc_intr_h_disabled
1387 
1388 #define intrGetNvlinkVirtualFunctionIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrGetNvlinkVirtualFunctionIntrMask(pGpu, pIntr, arg0, arg1)
1389 
1390 static inline NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_5baef9(OBJGPU *pGpu, struct Intr *pIntr) {
1391     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1392 }
1393 
1394 NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_GA100(OBJGPU *pGpu, struct Intr *pIntr);
1395 
1396 NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_GA102(OBJGPU *pGpu, struct Intr *pIntr);
1397 
1398 static inline NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_4a4dee(OBJGPU *pGpu, struct Intr *pIntr) {
1399     return 0;
1400 }
1401 
1402 
1403 #ifdef __nvoc_intr_h_disabled
1404 static inline NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll(OBJGPU *pGpu, struct Intr *pIntr) {
1405     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1406     return 0;
1407 }
1408 #else //__nvoc_intr_h_disabled
1409 #define intrGetEccVirtualFunctionIntrSmcMaskAll(pGpu, pIntr) intrGetEccVirtualFunctionIntrSmcMaskAll_5baef9(pGpu, pIntr)
1410 #endif //__nvoc_intr_h_disabled
1411 
1412 #define intrGetEccVirtualFunctionIntrSmcMaskAll_HAL(pGpu, pIntr) intrGetEccVirtualFunctionIntrSmcMaskAll(pGpu, pIntr)
1413 
1414 static inline NvBool intrRequiresPossibleErrorNotifier_491d52(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines) {
1415     return ((NvBool)(0 != 0));
1416 }
1417 
1418 NvBool intrRequiresPossibleErrorNotifier_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1419 
1420 NvBool intrRequiresPossibleErrorNotifier_GA100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1421 
1422 NvBool intrRequiresPossibleErrorNotifier_GH100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1423 
1424 
1425 #ifdef __nvoc_intr_h_disabled
1426 static inline NvBool intrRequiresPossibleErrorNotifier(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines) {
1427     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1428     return NV_FALSE;
1429 }
1430 #else //__nvoc_intr_h_disabled
1431 #define intrRequiresPossibleErrorNotifier(pGpu, pIntr, pEngines) intrRequiresPossibleErrorNotifier_491d52(pGpu, pIntr, pEngines)
1432 #endif //__nvoc_intr_h_disabled
1433 
1434 #define intrRequiresPossibleErrorNotifier_HAL(pGpu, pIntr, pEngines) intrRequiresPossibleErrorNotifier(pGpu, pIntr, pEngines)
1435 
1436 static inline NvU32 intrReadErrCont_491d52(OBJGPU *pGpu, struct Intr *pIntr) {
1437     return ((NvBool)(0 != 0));
1438 }
1439 
1440 NvU32 intrReadErrCont_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1441 
1442 
1443 #ifdef __nvoc_intr_h_disabled
1444 static inline NvU32 intrReadErrCont(OBJGPU *pGpu, struct Intr *pIntr) {
1445     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1446     return 0;
1447 }
1448 #else //__nvoc_intr_h_disabled
1449 #define intrReadErrCont(pGpu, pIntr) intrReadErrCont_491d52(pGpu, pIntr)
1450 #endif //__nvoc_intr_h_disabled
1451 
1452 #define intrReadErrCont_HAL(pGpu, pIntr) intrReadErrCont(pGpu, pIntr)
1453 
1454 NV_STATUS intrGetPendingStall_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1455 
1456 
1457 #ifdef __nvoc_intr_h_disabled
1458 static inline NV_STATUS intrGetPendingStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1459     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1460     return NV_ERR_NOT_SUPPORTED;
1461 }
1462 #else //__nvoc_intr_h_disabled
1463 #define intrGetPendingStall(pGpu, pIntr, arg0, arg1) intrGetPendingStall_GP100(pGpu, pIntr, arg0, arg1)
1464 #endif //__nvoc_intr_h_disabled
1465 
1466 #define intrGetPendingStall_HAL(pGpu, pIntr, arg0, arg1) intrGetPendingStall(pGpu, pIntr, arg0, arg1)
1467 
1468 NV_STATUS intrGetPendingStallEngines_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1469 
1470 
1471 #ifdef __nvoc_intr_h_disabled
1472 static inline NV_STATUS intrGetPendingStallEngines(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1473     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1474     return NV_ERR_NOT_SUPPORTED;
1475 }
1476 #else //__nvoc_intr_h_disabled
1477 #define intrGetPendingStallEngines(pGpu, pIntr, arg0, arg1) intrGetPendingStallEngines_TU102(pGpu, pIntr, arg0, arg1)
1478 #endif //__nvoc_intr_h_disabled
1479 
1480 #define intrGetPendingStallEngines_HAL(pGpu, pIntr, arg0, arg1) intrGetPendingStallEngines(pGpu, pIntr, arg0, arg1)
1481 
1482 NvBool intrIsIntrEnabled_IMPL(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
1483 
1484 
1485 #ifdef __nvoc_intr_h_disabled
1486 static inline NvBool intrIsIntrEnabled(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
1487     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1488     return NV_FALSE;
1489 }
1490 #else //__nvoc_intr_h_disabled
1491 #define intrIsIntrEnabled(pGpu, pIntr, arg0) intrIsIntrEnabled_IMPL(pGpu, pIntr, arg0)
1492 #endif //__nvoc_intr_h_disabled
1493 
1494 #define intrIsIntrEnabled_HAL(pGpu, pIntr, arg0) intrIsIntrEnabled(pGpu, pIntr, arg0)
1495 
1496 static inline void intrSetHubLeafIntr_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2, struct THREAD_STATE_NODE *arg3) {
1497     return;
1498 }
1499 
1500 
1501 #ifdef __nvoc_intr_h_disabled
1502 static inline void intrSetHubLeafIntr(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2, struct THREAD_STATE_NODE *arg3) {
1503     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1504 }
1505 #else //__nvoc_intr_h_disabled
1506 #define intrSetHubLeafIntr(pGpu, pIntr, arg0, arg1, arg2, arg3) intrSetHubLeafIntr_b3696a(pGpu, pIntr, arg0, arg1, arg2, arg3)
1507 #endif //__nvoc_intr_h_disabled
1508 
1509 #define intrSetHubLeafIntr_HAL(pGpu, pIntr, arg0, arg1, arg2, arg3) intrSetHubLeafIntr(pGpu, pIntr, arg0, arg1, arg2, arg3)
1510 
1511 void intrGetHubLeafIntrPending_STUB(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1512 
1513 
1514 #ifdef __nvoc_intr_h_disabled
1515 static inline void intrGetHubLeafIntrPending(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1516     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1517 }
1518 #else //__nvoc_intr_h_disabled
1519 #define intrGetHubLeafIntrPending(pGpu, pIntr, arg0, arg1) intrGetHubLeafIntrPending_STUB(pGpu, pIntr, arg0, arg1)
1520 #endif //__nvoc_intr_h_disabled
1521 
1522 #define intrGetHubLeafIntrPending_HAL(pGpu, pIntr, arg0, arg1) intrGetHubLeafIntrPending(pGpu, pIntr, arg0, arg1)
1523 
1524 NV_STATUS intrConstructEngine_IMPL(OBJGPU *pGpu, struct Intr *pIntr, ENGDESCRIPTOR arg0);
1525 
1526 static inline NV_STATUS intrConstructEngine_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, ENGDESCRIPTOR arg0) {
1527     return pIntr->__intrConstructEngine__(pGpu, pIntr, arg0);
1528 }
1529 
1530 NV_STATUS intrStateInitUnlocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1531 
1532 static inline NV_STATUS intrStateInitUnlocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1533     return pIntr->__intrStateInitUnlocked__(pGpu, pIntr);
1534 }
1535 
1536 NV_STATUS intrStateInitLocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1537 
1538 static inline NV_STATUS intrStateInitLocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1539     return pIntr->__intrStateInitLocked__(pGpu, pIntr);
1540 }
1541 
1542 void intrStateDestroy_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1543 
1544 static inline void intrStateDestroy_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1545     pIntr->__intrStateDestroy__(pGpu, pIntr);
1546 }
1547 
1548 NvU32 intrDecodeStallIntrEn_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0);
1549 
1550 static inline NvU32 intrDecodeStallIntrEn_4a4dee(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1551     return 0;
1552 }
1553 
1554 static inline NvU32 intrDecodeStallIntrEn_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1555     return pIntr->__intrDecodeStallIntrEn__(pGpu, pIntr, arg0);
1556 }
1557 
1558 NvU32 intrGetNonStallBaseVector_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1559 
1560 static inline NvU32 intrGetNonStallBaseVector_c067f9(OBJGPU *pGpu, struct Intr *pIntr) {
1561     NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1562 }
1563 
1564 static inline NvU32 intrGetNonStallBaseVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1565     return pIntr->__intrGetNonStallBaseVector__(pGpu, pIntr);
1566 }
1567 
1568 NvU64 intrGetUvmSharedLeafEnDisableMask_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1569 
1570 NvU64 intrGetUvmSharedLeafEnDisableMask_GA100(OBJGPU *pGpu, struct Intr *pIntr);
1571 
1572 static inline NvU64 intrGetUvmSharedLeafEnDisableMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1573     return pIntr->__intrGetUvmSharedLeafEnDisableMask__(pGpu, pIntr);
1574 }
1575 
1576 void intrSetDisplayInterruptEnable_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState);
1577 
1578 static inline void intrSetDisplayInterruptEnable_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState) {
1579     return;
1580 }
1581 
1582 static inline void intrSetDisplayInterruptEnable_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState) {
1583     pIntr->__intrSetDisplayInterruptEnable__(pGpu, pIntr, bEnable, pThreadState);
1584 }
1585 
1586 NvU32 intrReadRegTopEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
1587 
1588 NvU32 intrReadRegTopEnSet_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
1589 
1590 static inline NvU32 intrReadRegTopEnSet_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
1591     return pIntr->__intrReadRegTopEnSet__(pGpu, pIntr, arg0, arg1);
1592 }
1593 
1594 void intrWriteRegTopEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1595 
1596 void intrWriteRegTopEnSet_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1597 
1598 static inline void intrWriteRegTopEnSet_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1599     pIntr->__intrWriteRegTopEnSet__(pGpu, pIntr, arg0, arg1, arg2);
1600 }
1601 
1602 void intrWriteRegTopEnClear_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1603 
1604 void intrWriteRegTopEnClear_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1605 
1606 static inline void intrWriteRegTopEnClear_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1607     pIntr->__intrWriteRegTopEnClear__(pGpu, pIntr, arg0, arg1, arg2);
1608 }
1609 
1610 NvU32 intrGetNumLeaves_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1611 
1612 NvU32 intrGetNumLeaves_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1613 
1614 static inline NvU32 intrGetNumLeaves_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1615     return pIntr->__intrGetNumLeaves__(pGpu, pIntr);
1616 }
1617 
1618 NvU32 intrGetLeafSize_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1619 
1620 NvU32 intrGetLeafSize_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1621 
1622 static inline NvU32 intrGetLeafSize_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1623     return pIntr->__intrGetLeafSize__(pGpu, pIntr);
1624 }
1625 
1626 NvU64 intrGetIntrTopNonStallMask_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1627 
1628 NvU64 intrGetIntrTopNonStallMask_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1629 
1630 static inline NvU64 intrGetIntrTopNonStallMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1631     return pIntr->__intrGetIntrTopNonStallMask__(pGpu, pIntr);
1632 }
1633 
1634 void intrSanityCheckEngineIntrStallVector_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1635 
1636 void intrSanityCheckEngineIntrStallVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1637 
1638 static inline void intrSanityCheckEngineIntrStallVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1639     return;
1640 }
1641 
1642 static inline void intrSanityCheckEngineIntrStallVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1643     pIntr->__intrSanityCheckEngineIntrStallVector__(pGpu, pIntr, vector, mcEngine);
1644 }
1645 
1646 void intrSanityCheckEngineIntrNotificationVector_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1647 
1648 void intrSanityCheckEngineIntrNotificationVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1649 
1650 static inline void intrSanityCheckEngineIntrNotificationVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1651     return;
1652 }
1653 
1654 static inline void intrSanityCheckEngineIntrNotificationVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1655     pIntr->__intrSanityCheckEngineIntrNotificationVector__(pGpu, pIntr, vector, mcEngine);
1656 }
1657 
1658 NV_STATUS intrStateLoad_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0);
1659 
1660 static inline NV_STATUS intrStateLoad_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1661     return pIntr->__intrStateLoad__(pGpu, pIntr, arg0);
1662 }
1663 
1664 NV_STATUS intrStateUnload_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0);
1665 
1666 static inline NV_STATUS intrStateUnload_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1667     return pIntr->__intrStateUnload__(pGpu, pIntr, arg0);
1668 }
1669 
1670 NV_STATUS intrSetIntrMask_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1671 
1672 static inline NV_STATUS intrSetIntrMask_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1673     return NV_ERR_NOT_SUPPORTED;
1674 }
1675 
1676 static inline NV_STATUS intrSetIntrMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1677     return pIntr->__intrSetIntrMask__(pGpu, pIntr, arg0, arg1);
1678 }
1679 
1680 void intrSetIntrEnInHw_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
1681 
1682 static inline void intrSetIntrEnInHw_d44104(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
1683     return;
1684 }
1685 
1686 static inline void intrSetIntrEnInHw_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
1687     pIntr->__intrSetIntrEnInHw__(pGpu, pIntr, arg0, arg1);
1688 }
1689 
1690 NvU32 intrGetIntrEnFromHw_GP100(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
1691 
1692 static inline NvU32 intrGetIntrEnFromHw_b2b553(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
1693     return 0;
1694 }
1695 
1696 static inline NvU32 intrGetIntrEnFromHw_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
1697     return pIntr->__intrGetIntrEnFromHw__(pGpu, pIntr, arg0);
1698 }
1699 
1700 static inline NV_STATUS intrStatePreLoad_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1701     return pEngstate->__intrStatePreLoad__(pGpu, pEngstate, arg0);
1702 }
1703 
1704 static inline NV_STATUS intrStatePostUnload_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1705     return pEngstate->__intrStatePostUnload__(pGpu, pEngstate, arg0);
1706 }
1707 
1708 static inline NV_STATUS intrStatePreUnload_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1709     return pEngstate->__intrStatePreUnload__(pGpu, pEngstate, arg0);
1710 }
1711 
1712 static inline void intrInitMissing_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1713     pEngstate->__intrInitMissing__(pGpu, pEngstate);
1714 }
1715 
1716 static inline NV_STATUS intrStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1717     return pEngstate->__intrStatePreInitLocked__(pGpu, pEngstate);
1718 }
1719 
1720 static inline NV_STATUS intrStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1721     return pEngstate->__intrStatePreInitUnlocked__(pGpu, pEngstate);
1722 }
1723 
1724 static inline NV_STATUS intrStatePostLoad_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1725     return pEngstate->__intrStatePostLoad__(pGpu, pEngstate, arg0);
1726 }
1727 
1728 static inline NvBool intrIsPresent_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1729     return pEngstate->__intrIsPresent__(pGpu, pEngstate);
1730 }
1731 
1732 void intrDestruct_IMPL(struct Intr *pIntr);
1733 
1734 #define __nvoc_intrDestruct(pIntr) intrDestruct_IMPL(pIntr)
1735 NV_STATUS intrServiceNonStallBottomHalf_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1736 
1737 #ifdef __nvoc_intr_h_disabled
1738 static inline NV_STATUS intrServiceNonStallBottomHalf(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1739     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1740     return NV_ERR_NOT_SUPPORTED;
1741 }
1742 #else //__nvoc_intr_h_disabled
1743 #define intrServiceNonStallBottomHalf(pGpu, pIntr, arg0, arg1) intrServiceNonStallBottomHalf_IMPL(pGpu, pIntr, arg0, arg1)
1744 #endif //__nvoc_intr_h_disabled
1745 
1746 NV_STATUS intrServiceNotificationRecords_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineIdx, struct THREAD_STATE_NODE *arg0);
1747 
1748 #ifdef __nvoc_intr_h_disabled
1749 static inline NV_STATUS intrServiceNotificationRecords(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineIdx, struct THREAD_STATE_NODE *arg0) {
1750     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1751     return NV_ERR_NOT_SUPPORTED;
1752 }
1753 #else //__nvoc_intr_h_disabled
1754 #define intrServiceNotificationRecords(pGpu, pIntr, mcEngineIdx, arg0) intrServiceNotificationRecords_IMPL(pGpu, pIntr, mcEngineIdx, arg0)
1755 #endif //__nvoc_intr_h_disabled
1756 
1757 NvU64 intrGetIntrTopLegacyStallMask_IMPL(struct Intr *pIntr);
1758 
1759 #ifdef __nvoc_intr_h_disabled
1760 static inline NvU64 intrGetIntrTopLegacyStallMask(struct Intr *pIntr) {
1761     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1762     return 0;
1763 }
1764 #else //__nvoc_intr_h_disabled
1765 #define intrGetIntrTopLegacyStallMask(pIntr) intrGetIntrTopLegacyStallMask_IMPL(pIntr)
1766 #endif //__nvoc_intr_h_disabled
1767 
1768 NvU64 intrGetIntrTopLockedMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1769 
1770 #ifdef __nvoc_intr_h_disabled
1771 static inline NvU64 intrGetIntrTopLockedMask(OBJGPU *pGpu, struct Intr *pIntr) {
1772     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1773     return 0;
1774 }
1775 #else //__nvoc_intr_h_disabled
1776 #define intrGetIntrTopLockedMask(pGpu, pIntr) intrGetIntrTopLockedMask_IMPL(pGpu, pIntr)
1777 #endif //__nvoc_intr_h_disabled
1778 
1779 NV_STATUS intrGetSubtreeRange_IMPL(struct Intr *pIntr, NV2080_INTR_CATEGORY category, NV2080_INTR_CATEGORY_SUBTREE_MAP *pRange);
1780 
1781 #ifdef __nvoc_intr_h_disabled
1782 static inline NV_STATUS intrGetSubtreeRange(struct Intr *pIntr, NV2080_INTR_CATEGORY category, NV2080_INTR_CATEGORY_SUBTREE_MAP *pRange) {
1783     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1784     return NV_ERR_NOT_SUPPORTED;
1785 }
1786 #else //__nvoc_intr_h_disabled
1787 #define intrGetSubtreeRange(pIntr, category, pRange) intrGetSubtreeRange_IMPL(pIntr, category, pRange)
1788 #endif //__nvoc_intr_h_disabled
1789 
1790 NvU64 intrGetIntrTopCategoryMask_IMPL(struct Intr *pIntr, NV2080_INTR_CATEGORY category);
1791 
1792 #ifdef __nvoc_intr_h_disabled
1793 static inline NvU64 intrGetIntrTopCategoryMask(struct Intr *pIntr, NV2080_INTR_CATEGORY category) {
1794     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1795     return 0;
1796 }
1797 #else //__nvoc_intr_h_disabled
1798 #define intrGetIntrTopCategoryMask(pIntr, category) intrGetIntrTopCategoryMask_IMPL(pIntr, category)
1799 #endif //__nvoc_intr_h_disabled
1800 
1801 NV_STATUS intrSetInterruptEntry_IMPL(struct Intr *pIntr, INTR_TREE tree, NvU32 vector, const InterruptEntry *pEntry);
1802 
1803 #ifdef __nvoc_intr_h_disabled
1804 static inline NV_STATUS intrSetInterruptEntry(struct Intr *pIntr, INTR_TREE tree, NvU32 vector, const InterruptEntry *pEntry) {
1805     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1806     return NV_ERR_NOT_SUPPORTED;
1807 }
1808 #else //__nvoc_intr_h_disabled
1809 #define intrSetInterruptEntry(pIntr, tree, vector, pEntry) intrSetInterruptEntry_IMPL(pIntr, tree, vector, pEntry)
1810 #endif //__nvoc_intr_h_disabled
1811 
1812 void intrServiceStallListAllGpusCond_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1);
1813 
1814 #ifdef __nvoc_intr_h_disabled
1815 static inline void intrServiceStallListAllGpusCond(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1) {
1816     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1817 }
1818 #else //__nvoc_intr_h_disabled
1819 #define intrServiceStallListAllGpusCond(pGpu, pIntr, arg0, arg1) intrServiceStallListAllGpusCond_IMPL(pGpu, pIntr, arg0, arg1)
1820 #endif //__nvoc_intr_h_disabled
1821 
1822 void intrServiceStallListDevice_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1);
1823 
1824 #ifdef __nvoc_intr_h_disabled
1825 static inline void intrServiceStallListDevice(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1) {
1826     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1827 }
1828 #else //__nvoc_intr_h_disabled
1829 #define intrServiceStallListDevice(pGpu, pIntr, arg0, arg1) intrServiceStallListDevice_IMPL(pGpu, pIntr, arg0, arg1)
1830 #endif //__nvoc_intr_h_disabled
1831 
1832 NvU32 intrServiceInterruptRecords_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool *arg1);
1833 
1834 #ifdef __nvoc_intr_h_disabled
1835 static inline NvU32 intrServiceInterruptRecords(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool *arg1) {
1836     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1837     return 0;
1838 }
1839 #else //__nvoc_intr_h_disabled
1840 #define intrServiceInterruptRecords(pGpu, pIntr, arg0, arg1) intrServiceInterruptRecords_IMPL(pGpu, pIntr, arg0, arg1)
1841 #endif //__nvoc_intr_h_disabled
1842 
1843 void intrQueueDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0, DPCNODE *arg1);
1844 
1845 #ifdef __nvoc_intr_h_disabled
1846 static inline void intrQueueDpc(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0, DPCNODE *arg1) {
1847     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1848 }
1849 #else //__nvoc_intr_h_disabled
1850 #define intrQueueDpc(pGpu, pIntr, arg0, arg1) intrQueueDpc_IMPL(pGpu, pIntr, arg0, arg1)
1851 #endif //__nvoc_intr_h_disabled
1852 
1853 DPCNODE *intrDequeueDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0);
1854 
1855 #ifdef __nvoc_intr_h_disabled
1856 static inline DPCNODE *intrDequeueDpc(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0) {
1857     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1858     return NULL;
1859 }
1860 #else //__nvoc_intr_h_disabled
1861 #define intrDequeueDpc(pGpu, pIntr, arg0) intrDequeueDpc_IMPL(pGpu, pIntr, arg0)
1862 #endif //__nvoc_intr_h_disabled
1863 
1864 NvBool intrIsDpcQueueEmpty_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0);
1865 
1866 #ifdef __nvoc_intr_h_disabled
1867 static inline NvBool intrIsDpcQueueEmpty(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0) {
1868     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1869     return NV_FALSE;
1870 }
1871 #else //__nvoc_intr_h_disabled
1872 #define intrIsDpcQueueEmpty(pGpu, pIntr, arg0) intrIsDpcQueueEmpty_IMPL(pGpu, pIntr, arg0)
1873 #endif //__nvoc_intr_h_disabled
1874 
1875 void intrQueueInterruptBasedDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0);
1876 
1877 #ifdef __nvoc_intr_h_disabled
1878 static inline void intrQueueInterruptBasedDpc(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0) {
1879     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1880 }
1881 #else //__nvoc_intr_h_disabled
1882 #define intrQueueInterruptBasedDpc(pGpu, pIntr, arg0) intrQueueInterruptBasedDpc_IMPL(pGpu, pIntr, arg0)
1883 #endif //__nvoc_intr_h_disabled
1884 
1885 NvU32 intrConvertEngineMaskToPmcIntrMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0);
1886 
1887 #ifdef __nvoc_intr_h_disabled
1888 static inline NvU32 intrConvertEngineMaskToPmcIntrMask(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0) {
1889     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1890     return 0;
1891 }
1892 #else //__nvoc_intr_h_disabled
1893 #define intrConvertEngineMaskToPmcIntrMask(pGpu, pIntr, arg0) intrConvertEngineMaskToPmcIntrMask_IMPL(pGpu, pIntr, arg0)
1894 #endif //__nvoc_intr_h_disabled
1895 
1896 void intrConvertPmcIntrMaskToEngineMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, union MC_ENGINE_BITVECTOR *arg1);
1897 
1898 #ifdef __nvoc_intr_h_disabled
1899 static inline void intrConvertPmcIntrMaskToEngineMask(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, union MC_ENGINE_BITVECTOR *arg1) {
1900     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1901 }
1902 #else //__nvoc_intr_h_disabled
1903 #define intrConvertPmcIntrMaskToEngineMask(pGpu, pIntr, arg0, arg1) intrConvertPmcIntrMaskToEngineMask_IMPL(pGpu, pIntr, arg0, arg1)
1904 #endif //__nvoc_intr_h_disabled
1905 
1906 NvU32 intrGetVectorFromEngineId_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall);
1907 
1908 #ifdef __nvoc_intr_h_disabled
1909 static inline NvU32 intrGetVectorFromEngineId(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall) {
1910     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1911     return 0;
1912 }
1913 #else //__nvoc_intr_h_disabled
1914 #define intrGetVectorFromEngineId(pGpu, pIntr, mcEngineId, bNonStall) intrGetVectorFromEngineId_IMPL(pGpu, pIntr, mcEngineId, bNonStall)
1915 #endif //__nvoc_intr_h_disabled
1916 
1917 NV_STATUS intrGetSmallestNotificationVector_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0);
1918 
1919 #ifdef __nvoc_intr_h_disabled
1920 static inline NV_STATUS intrGetSmallestNotificationVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0) {
1921     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1922     return NV_ERR_NOT_SUPPORTED;
1923 }
1924 #else //__nvoc_intr_h_disabled
1925 #define intrGetSmallestNotificationVector(pGpu, pIntr, arg0) intrGetSmallestNotificationVector_IMPL(pGpu, pIntr, arg0)
1926 #endif //__nvoc_intr_h_disabled
1927 
1928 void intrSetIntrMaskUnblocked_IMPL(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0);
1929 
1930 #ifdef __nvoc_intr_h_disabled
1931 static inline void intrSetIntrMaskUnblocked(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0) {
1932     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1933 }
1934 #else //__nvoc_intr_h_disabled
1935 #define intrSetIntrMaskUnblocked(pIntr, arg0) intrSetIntrMaskUnblocked_IMPL(pIntr, arg0)
1936 #endif //__nvoc_intr_h_disabled
1937 
1938 void intrGetIntrMaskUnblocked_IMPL(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0);
1939 
1940 #ifdef __nvoc_intr_h_disabled
1941 static inline void intrGetIntrMaskUnblocked(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0) {
1942     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1943 }
1944 #else //__nvoc_intr_h_disabled
1945 #define intrGetIntrMaskUnblocked(pIntr, arg0) intrGetIntrMaskUnblocked_IMPL(pIntr, arg0)
1946 #endif //__nvoc_intr_h_disabled
1947 
1948 void intrSetIntrMaskFlags_IMPL(struct Intr *pIntr, NvU32 arg0);
1949 
1950 #ifdef __nvoc_intr_h_disabled
1951 static inline void intrSetIntrMaskFlags(struct Intr *pIntr, NvU32 arg0) {
1952     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1953 }
1954 #else //__nvoc_intr_h_disabled
1955 #define intrSetIntrMaskFlags(pIntr, arg0) intrSetIntrMaskFlags_IMPL(pIntr, arg0)
1956 #endif //__nvoc_intr_h_disabled
1957 
1958 NvU32 intrGetIntrMaskFlags_IMPL(struct Intr *pIntr);
1959 
1960 #ifdef __nvoc_intr_h_disabled
1961 static inline NvU32 intrGetIntrMaskFlags(struct Intr *pIntr) {
1962     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1963     return 0;
1964 }
1965 #else //__nvoc_intr_h_disabled
1966 #define intrGetIntrMaskFlags(pIntr) intrGetIntrMaskFlags_IMPL(pIntr)
1967 #endif //__nvoc_intr_h_disabled
1968 
1969 void intrSetDefaultIntrEn_IMPL(struct Intr *pIntr, NvU32 arg0);
1970 
1971 #ifdef __nvoc_intr_h_disabled
1972 static inline void intrSetDefaultIntrEn(struct Intr *pIntr, NvU32 arg0) {
1973     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1974 }
1975 #else //__nvoc_intr_h_disabled
1976 #define intrSetDefaultIntrEn(pIntr, arg0) intrSetDefaultIntrEn_IMPL(pIntr, arg0)
1977 #endif //__nvoc_intr_h_disabled
1978 
1979 NvU32 intrGetDefaultIntrEn_IMPL(struct Intr *pIntr);
1980 
1981 #ifdef __nvoc_intr_h_disabled
1982 static inline NvU32 intrGetDefaultIntrEn(struct Intr *pIntr) {
1983     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1984     return 0;
1985 }
1986 #else //__nvoc_intr_h_disabled
1987 #define intrGetDefaultIntrEn(pIntr) intrGetDefaultIntrEn_IMPL(pIntr)
1988 #endif //__nvoc_intr_h_disabled
1989 
1990 void intrSetIntrEn_IMPL(struct Intr *pIntr, NvU32 arg0);
1991 
1992 #ifdef __nvoc_intr_h_disabled
1993 static inline void intrSetIntrEn(struct Intr *pIntr, NvU32 arg0) {
1994     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1995 }
1996 #else //__nvoc_intr_h_disabled
1997 #define intrSetIntrEn(pIntr, arg0) intrSetIntrEn_IMPL(pIntr, arg0)
1998 #endif //__nvoc_intr_h_disabled
1999 
2000 NvU32 intrGetIntrEn_IMPL(struct Intr *pIntr);
2001 
2002 #ifdef __nvoc_intr_h_disabled
2003 static inline NvU32 intrGetIntrEn(struct Intr *pIntr) {
2004     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
2005     return 0;
2006 }
2007 #else //__nvoc_intr_h_disabled
2008 #define intrGetIntrEn(pIntr) intrGetIntrEn_IMPL(pIntr)
2009 #endif //__nvoc_intr_h_disabled
2010 
2011 void intrSaveIntrEn0FromHw_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
2012 
2013 #ifdef __nvoc_intr_h_disabled
2014 static inline void intrSaveIntrEn0FromHw(OBJGPU *pGpu, struct Intr *pIntr) {
2015     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
2016 }
2017 #else //__nvoc_intr_h_disabled
2018 #define intrSaveIntrEn0FromHw(pGpu, pIntr) intrSaveIntrEn0FromHw_IMPL(pGpu, pIntr)
2019 #endif //__nvoc_intr_h_disabled
2020 
2021 void intrGetGmmuInterrupts_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
2022 
2023 #ifdef __nvoc_intr_h_disabled
2024 static inline void intrGetGmmuInterrupts(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
2025     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
2026 }
2027 #else //__nvoc_intr_h_disabled
2028 #define intrGetGmmuInterrupts(pGpu, pIntr, arg0, arg1) intrGetGmmuInterrupts_IMPL(pGpu, pIntr, arg0, arg1)
2029 #endif //__nvoc_intr_h_disabled
2030 
2031 #undef PRIVATE_FIELD
2032 
2033 
2034 // This mask is used for interrupts that should be masked off in the PMC tree
2035 #define NV_PMC_INTR_INVALID_MASK (0)
2036 
2037 #endif // INTR_H
2038 
2039 #ifdef __cplusplus
2040 } // extern "C"
2041 #endif
2042 
2043 #endif // _G_INTR_NVOC_H_
2044