1 #ifndef _G_INTR_NVOC_H_
2 #define _G_INTR_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2006-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_intr_nvoc.h"
33 
34 #ifndef INTR_H
35 #define INTR_H
36 
37 #include "gpu/gpu.h"
38 #include "gpu/eng_state.h"
39 #include "nvoc/utility.h"
40 #include "utils/nvbitvector.h"
41 #include "dev_ctrl_defines.h"
42 #include "kernel/gpu/intr/engine_idx.h"
43 #include "kernel/gpu/intr/intr_service.h"
44 #include "ctrl/ctrl2080/ctrl2080internal.h"
45 
46 //
47 // Interrupt Type
48 //
49 // Abstraction of the disabled/software/hardware enumeration in NV_PMC_INTR_EN_0_INTA
50 // !!! This enumeration must exactly match NV_PMC_INTR_EN_0_INTA !!!
51 //
52 
53 #define INTERRUPT_TYPE_DISABLED         0
54 #define INTERRUPT_TYPE_HARDWARE         1
55 #define INTERRUPT_TYPE_SOFTWARE         2
56 #define INTERRUPT_TYPE_MULTI            3
57 #define INTERRUPT_TYPE_MAX              INTERRUPT_TYPE_MULTI
58 
59 #define INTERRUPT_MASK_DISABLED          0x00000000
60 #define INTERRUPT_MASK_HARDWARE          0x7fffffff
61 #define INTERRUPT_MASK_SOFTWARE          0x80000000
62 #define INTERRUPT_MASK_ENABLED           0xffffffff
63 
64 /**
65  * @brief Each entry corresponds to a top level interrupt
66  */
67 typedef struct
68 {
69     /** MC_ENGINE_IDX* value */
70     NvU16 mcEngine;
71     /** Bit in top level PMC interrupt registers */
72     NvU32 pmcIntrMask;
73     /** Interrupt vector in CTRL interrupt tree (Turing+). For non-host driven
74      *  engines, this is their single interrupt vector at top level; while for
75      *  host driven engines, this is their stalling interrupt vector
76      */
77     NvU32 intrVector;
78     /** Nonstalling interrupt vector in CTRL interrupt tree (Turing+). Only
79      *  valid for host driven engines. NV_INTR_VECTOR_INVALID signifies
80      *  unavailable
81      */
82     NvU32 intrVectorNonStall;
83     /** Boolean set to NV_TRUE if Nonstalling interrupt is disabled in CTRL
84      *  interrupt tree (Turing+). This may change to Enum in future. We are
85      *  moving towards the direction where GSP-RM is fully aware of MC interrupt
86      *  table and it provisions a subset of entries to CPU RM.
87      */
88     NvBool bDisableNonStall;
89 } INTR_TABLE_ENTRY;
90 
91 #define INTR_TABLE_MAX_INTRS_PER_ENTRY       6
92 
93 //
94 // The max number of interrupts we can fit in the dynamically populated,
95 // but statically sized, interrupt table.
96 //
97 #define INTR_TABLE_MAX_STATIC_KERNEL_INTRS            17
98 
99 #define POPULATE_INTR_TABLE(pTable, numEntries, localMcEngineIdxs, localIntrVectors, localCount, localMax) \
100 NV_ASSERT(numEntries + localCount <= localMax); \
101 for (i = 0; i < localCount; i++)               \
102 {                                              \
103     (pTable)[(numEntries)].mcEngine           = (localMcEngineIdxs)[i];   \
104     (pTable)[(numEntries)].pmcIntrMask        = NV_PMC_INTR_INVALID_MASK; \
105     (pTable)[(numEntries)].intrVector         = (intrVectors)[i];         \
106     (pTable)[(numEntries)].intrVectorNonStall = NV_INTR_VECTOR_INVALID;   \
107     numEntries++;                                                         \
108 }
109 
110 // Default value for intrStuckThreshold
111 #define INTR_STUCK_THRESHOLD 1000
112 
113 #define INTR_TABLE_INIT_KERNEL (1 << 0)
114 #define INTR_TABLE_INIT_PHYSICAL (1 << 1)
115 
116 /**
117  * @brief This enum specifies the type of DPC node
118  *      INTERRUPT_BASED_DPC: DPC queued for an interrupt source
119  *      SPECIAL_DPC        : DPC queued within processing of another interrupt
120  *                           source
121  *
122  * Currently only used on Fermi+.
123  */
124 typedef enum
125 {
126     INTERRUPT_BASED_DPC=0,
127     SPECIAL_DPC
128 } DPCTYPE;
129 
130 /**
131  * @brief This is a structure for a node on the DPC Queue
132  *          dpctype: Type of DPC for processing
133  *          dpcdata: Data required for dpc processing
134  *                   This union will contain dpctype specific data
135  *          pNext  : Pointer to the next DPC node
136  *
137  * Currently only used on Fermi+.
138  */
139 typedef struct _DPCNODE
140 {
141     DPCTYPE dpctype;
142     union _dpcdata
143     {
144         MC_ENGINE_BITVECTOR pendingEngines;
145     } dpcdata;
146 
147     struct _DPCNODE *pNext;
148 } DPCNODE;
149 
150 /**
151  * @brief This is a structure for the DPC Queue
152  *          numEntries: Number of entries currently on DPC queue (debugging purpose)
153  *          pFront    : Front pointer for the queue
154  *          pRear     : Rear pointer for the queue
155  *
156  * Currently only used on Fermi+.
157  */
158 typedef struct
159 {
160     NvU32    numEntries;
161     DPCNODE *pFront;
162     DPCNODE *pRear;
163 } DPCQUEUE;
164 
165 // Data related to PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING
166 typedef struct
167 {
168     NvU32 flags;
169     NvU32 cached;                   // Pascal+, to implement intr mask in SW.
170     MC_ENGINE_BITVECTOR engMaskUnblocked;
171     MC_ENGINE_BITVECTOR engMaskOrig;
172     MC_ENGINE_BITVECTOR engMaskIntrsSeen;
173     MC_ENGINE_BITVECTOR engMaskIntrsDisabled;
174 } INTR_MASK;
175 
176 //
177 // interrupt mask information used for lazy interrupt disable and interrupt
178 // masking for locking.
179 //
180 typedef struct
181 {
182     NvU32 intrEnable;
183     MC_ENGINE_BITVECTOR intrMask;
184 } INTR_MASK_CTX;
185 
186 
187 //
188 // IntrMask Locking Flag Defines
189 //
190 #define INTR_MASK_FLAGS_ISR_SKIP_MASK_UPDATE     NVBIT(0)
191 
192 #ifdef NVOC_INTR_H_PRIVATE_ACCESS_ALLOWED
193 #define PRIVATE_FIELD(x) x
194 #else
195 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
196 #endif
197 struct Intr {
198     const struct NVOC_RTTI *__nvoc_rtti;
199     struct OBJENGSTATE __nvoc_base_OBJENGSTATE;
200     struct Object *__nvoc_pbase_Object;
201     struct OBJENGSTATE *__nvoc_pbase_OBJENGSTATE;
202     struct Intr *__nvoc_pbase_Intr;
203     NV_STATUS (*__intrConstructEngine__)(OBJGPU *, struct Intr *, ENGDESCRIPTOR);
204     NV_STATUS (*__intrStateInitUnlocked__)(OBJGPU *, struct Intr *);
205     NV_STATUS (*__intrStateInitLocked__)(OBJGPU *, struct Intr *);
206     void (*__intrStateDestroy__)(OBJGPU *, struct Intr *);
207     NvU32 (*__intrDecodeStallIntrEn__)(OBJGPU *, struct Intr *, NvU32);
208     NvU32 (*__intrGetNonStallBaseVector__)(OBJGPU *, struct Intr *);
209     NvU64 (*__intrGetUvmSharedLeafEnDisableMask__)(OBJGPU *, struct Intr *);
210     void (*__intrSetDisplayInterruptEnable__)(OBJGPU *, struct Intr *, NvBool, struct THREAD_STATE_NODE *);
211     NvU32 (*__intrReadRegTopEnSet__)(OBJGPU *, struct Intr *, NvU32, struct THREAD_STATE_NODE *);
212     void (*__intrWriteRegTopEnSet__)(OBJGPU *, struct Intr *, NvU32, NvU32, struct THREAD_STATE_NODE *);
213     void (*__intrWriteRegTopEnClear__)(OBJGPU *, struct Intr *, NvU32, NvU32, struct THREAD_STATE_NODE *);
214     NvU32 (*__intrGetNumLeaves__)(OBJGPU *, struct Intr *);
215     NvU32 (*__intrGetLeafSize__)(OBJGPU *, struct Intr *);
216     NvU64 (*__intrGetIntrTopNonStallMask__)(OBJGPU *, struct Intr *);
217     void (*__intrSanityCheckEngineIntrStallVector__)(OBJGPU *, struct Intr *, NvU32, NvU16);
218     void (*__intrSanityCheckEngineIntrNotificationVector__)(OBJGPU *, struct Intr *, NvU32, NvU16);
219     NV_STATUS (*__intrStateLoad__)(OBJGPU *, struct Intr *, NvU32);
220     NV_STATUS (*__intrStateUnload__)(OBJGPU *, struct Intr *, NvU32);
221     NV_STATUS (*__intrSetIntrMask__)(OBJGPU *, struct Intr *, union MC_ENGINE_BITVECTOR *, struct THREAD_STATE_NODE *);
222     void (*__intrSetIntrEnInHw__)(OBJGPU *, struct Intr *, NvU32, struct THREAD_STATE_NODE *);
223     NvU32 (*__intrGetIntrEnFromHw__)(OBJGPU *, struct Intr *, struct THREAD_STATE_NODE *);
224     NV_STATUS (*__intrStatePreLoad__)(POBJGPU, struct Intr *, NvU32);
225     NV_STATUS (*__intrStatePostUnload__)(POBJGPU, struct Intr *, NvU32);
226     NV_STATUS (*__intrStatePreUnload__)(POBJGPU, struct Intr *, NvU32);
227     void (*__intrInitMissing__)(POBJGPU, struct Intr *);
228     NV_STATUS (*__intrStatePreInitLocked__)(POBJGPU, struct Intr *);
229     NV_STATUS (*__intrStatePreInitUnlocked__)(POBJGPU, struct Intr *);
230     NV_STATUS (*__intrStatePostLoad__)(POBJGPU, struct Intr *, NvU32);
231     NvBool (*__intrIsPresent__)(POBJGPU, struct Intr *);
232     NvBool PDB_PROP_INTR_ENABLE_DETAILED_LOGS;
233     NvBool PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC;
234     NvBool PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS;
235     NvBool PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET;
236     NvBool PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE;
237     NvBool PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING;
238     NvBool PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING;
239     NvBool PDB_PROP_INTR_MASK_SUPPORTED;
240     NvU32 nonStallPmcIntrMask;
241     NvU64 uvmSharedCpuLeafEn;
242     NvU64 uvmSharedCpuLeafEnDisableMask;
243     NvU32 replayableFaultIntrVector;
244     NvU32 accessCntrIntrVector;
245     NvU32 displayIntrVector;
246     NvU64 intrTopEnMask;
247     IntrServiceRecord intrServiceTable[166];
248     NvBool bDefaultNonstallNotify;
249     NvU32 intrTableSz;
250     INTR_TABLE_ENTRY *pIntrTable;
251     NV2080_INTR_CATEGORY_SUBTREE_MAP subtreeMap[7];
252     INTR_TABLE_ENTRY pStaticKernelTable[17];
253     NvBool bDpcStarted;
254     union MC_ENGINE_BITVECTOR pmcIntrPending;
255     DPCQUEUE dpcQueue;
256     NvU32 intrStuckThreshold;
257     INTR_MASK intrMask;
258     union MC_ENGINE_BITVECTOR helperEngineMask;
259     NvU32 intrEn0;
260     NvU32 intrCachedEn0;
261     NvU32 intrCachedEnSet;
262     NvU32 intrCachedEnClear;
263     NvU32 intrEn0Orig;
264     NvBool halIntrEnabled;
265     NvU32 saveIntrEn0;
266     NvBool bTablesPopulated;
267     NvU32 numPhysicalEntries;
268     NvU32 numKernelEntries;
269 };
270 
271 #ifndef __NVOC_CLASS_Intr_TYPEDEF__
272 #define __NVOC_CLASS_Intr_TYPEDEF__
273 typedef struct Intr Intr;
274 #endif /* __NVOC_CLASS_Intr_TYPEDEF__ */
275 
276 #ifndef __nvoc_class_id_Intr
277 #define __nvoc_class_id_Intr 0xc06e44
278 #endif /* __nvoc_class_id_Intr */
279 
280 extern const struct NVOC_CLASS_DEF __nvoc_class_def_Intr;
281 
282 #define __staticCast_Intr(pThis) \
283     ((pThis)->__nvoc_pbase_Intr)
284 
285 #ifdef __nvoc_intr_h_disabled
286 #define __dynamicCast_Intr(pThis) ((Intr*)NULL)
287 #else //__nvoc_intr_h_disabled
288 #define __dynamicCast_Intr(pThis) \
289     ((Intr*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(Intr)))
290 #endif //__nvoc_intr_h_disabled
291 
292 #define PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC_BASE_CAST
293 #define PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC_BASE_NAME PDB_PROP_INTR_HOST_DRIVEN_ENGINES_REMOVED_FROM_PMC
294 #define PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE_BASE_CAST
295 #define PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE_BASE_NAME PDB_PROP_INTR_SIMPLIFIED_VBLANK_HANDLING_FOR_CTRL_TREE
296 #define PDB_PROP_INTR_MASK_SUPPORTED_BASE_CAST
297 #define PDB_PROP_INTR_MASK_SUPPORTED_BASE_NAME PDB_PROP_INTR_MASK_SUPPORTED
298 #define PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING_BASE_CAST
299 #define PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING_BASE_NAME PDB_PROP_INTR_USE_INTR_MASK_FOR_LOCKING
300 #define PDB_PROP_INTR_IS_MISSING_BASE_CAST __nvoc_base_OBJENGSTATE.
301 #define PDB_PROP_INTR_IS_MISSING_BASE_NAME PDB_PROP_ENGSTATE_IS_MISSING
302 #define PDB_PROP_INTR_ENABLE_DETAILED_LOGS_BASE_CAST
303 #define PDB_PROP_INTR_ENABLE_DETAILED_LOGS_BASE_NAME PDB_PROP_INTR_ENABLE_DETAILED_LOGS
304 #define PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET_BASE_CAST
305 #define PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET_BASE_NAME PDB_PROP_INTR_ENUMERATIONS_ON_ENGINE_RESET
306 #define PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS_BASE_CAST
307 #define PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS_BASE_NAME PDB_PROP_INTR_READ_ONLY_EVEN_NUMBERED_INTR_LEAF_REGS
308 #define PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING_BASE_CAST
309 #define PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING_BASE_NAME PDB_PROP_INTR_DISABLE_PER_INTR_DPC_QUEUEING
310 
311 NV_STATUS __nvoc_objCreateDynamic_Intr(Intr**, Dynamic*, NvU32, va_list);
312 
313 NV_STATUS __nvoc_objCreate_Intr(Intr**, Dynamic*, NvU32);
314 #define __objCreate_Intr(ppNewObj, pParent, createFlags) \
315     __nvoc_objCreate_Intr((ppNewObj), staticCast((pParent), Dynamic), (createFlags))
316 
317 #define intrConstructEngine(pGpu, pIntr, arg0) intrConstructEngine_DISPATCH(pGpu, pIntr, arg0)
318 #define intrStateInitUnlocked(pGpu, pIntr) intrStateInitUnlocked_DISPATCH(pGpu, pIntr)
319 #define intrStateInitLocked(pGpu, pIntr) intrStateInitLocked_DISPATCH(pGpu, pIntr)
320 #define intrStateDestroy(pGpu, pIntr) intrStateDestroy_DISPATCH(pGpu, pIntr)
321 #define intrDecodeStallIntrEn(pGpu, pIntr, arg0) intrDecodeStallIntrEn_DISPATCH(pGpu, pIntr, arg0)
322 #define intrDecodeStallIntrEn_HAL(pGpu, pIntr, arg0) intrDecodeStallIntrEn_DISPATCH(pGpu, pIntr, arg0)
323 #define intrGetNonStallBaseVector(pGpu, pIntr) intrGetNonStallBaseVector_DISPATCH(pGpu, pIntr)
324 #define intrGetNonStallBaseVector_HAL(pGpu, pIntr) intrGetNonStallBaseVector_DISPATCH(pGpu, pIntr)
325 #define intrGetUvmSharedLeafEnDisableMask(pGpu, pIntr) intrGetUvmSharedLeafEnDisableMask_DISPATCH(pGpu, pIntr)
326 #define intrGetUvmSharedLeafEnDisableMask_HAL(pGpu, pIntr) intrGetUvmSharedLeafEnDisableMask_DISPATCH(pGpu, pIntr)
327 #define intrSetDisplayInterruptEnable(pGpu, pIntr, bEnable, pThreadState) intrSetDisplayInterruptEnable_DISPATCH(pGpu, pIntr, bEnable, pThreadState)
328 #define intrSetDisplayInterruptEnable_HAL(pGpu, pIntr, bEnable, pThreadState) intrSetDisplayInterruptEnable_DISPATCH(pGpu, pIntr, bEnable, pThreadState)
329 #define intrReadRegTopEnSet(pGpu, pIntr, arg0, arg1) intrReadRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1)
330 #define intrReadRegTopEnSet_HAL(pGpu, pIntr, arg0, arg1) intrReadRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1)
331 #define intrWriteRegTopEnSet(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
332 #define intrWriteRegTopEnSet_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnSet_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
333 #define intrWriteRegTopEnClear(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnClear_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
334 #define intrWriteRegTopEnClear_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegTopEnClear_DISPATCH(pGpu, pIntr, arg0, arg1, arg2)
335 #define intrGetNumLeaves(pGpu, pIntr) intrGetNumLeaves_DISPATCH(pGpu, pIntr)
336 #define intrGetNumLeaves_HAL(pGpu, pIntr) intrGetNumLeaves_DISPATCH(pGpu, pIntr)
337 #define intrGetLeafSize(pGpu, pIntr) intrGetLeafSize_DISPATCH(pGpu, pIntr)
338 #define intrGetLeafSize_HAL(pGpu, pIntr) intrGetLeafSize_DISPATCH(pGpu, pIntr)
339 #define intrGetIntrTopNonStallMask(pGpu, pIntr) intrGetIntrTopNonStallMask_DISPATCH(pGpu, pIntr)
340 #define intrGetIntrTopNonStallMask_HAL(pGpu, pIntr) intrGetIntrTopNonStallMask_DISPATCH(pGpu, pIntr)
341 #define intrSanityCheckEngineIntrStallVector(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrStallVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
342 #define intrSanityCheckEngineIntrStallVector_HAL(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrStallVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
343 #define intrSanityCheckEngineIntrNotificationVector(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrNotificationVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
344 #define intrSanityCheckEngineIntrNotificationVector_HAL(pGpu, pIntr, vector, mcEngine) intrSanityCheckEngineIntrNotificationVector_DISPATCH(pGpu, pIntr, vector, mcEngine)
345 #define intrStateLoad(pGpu, pIntr, arg0) intrStateLoad_DISPATCH(pGpu, pIntr, arg0)
346 #define intrStateLoad_HAL(pGpu, pIntr, arg0) intrStateLoad_DISPATCH(pGpu, pIntr, arg0)
347 #define intrStateUnload(pGpu, pIntr, arg0) intrStateUnload_DISPATCH(pGpu, pIntr, arg0)
348 #define intrStateUnload_HAL(pGpu, pIntr, arg0) intrStateUnload_DISPATCH(pGpu, pIntr, arg0)
349 #define intrSetIntrMask(pGpu, pIntr, arg0, arg1) intrSetIntrMask_DISPATCH(pGpu, pIntr, arg0, arg1)
350 #define intrSetIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrSetIntrMask_DISPATCH(pGpu, pIntr, arg0, arg1)
351 #define intrSetIntrEnInHw(pGpu, pIntr, arg0, arg1) intrSetIntrEnInHw_DISPATCH(pGpu, pIntr, arg0, arg1)
352 #define intrSetIntrEnInHw_HAL(pGpu, pIntr, arg0, arg1) intrSetIntrEnInHw_DISPATCH(pGpu, pIntr, arg0, arg1)
353 #define intrGetIntrEnFromHw(pGpu, pIntr, arg0) intrGetIntrEnFromHw_DISPATCH(pGpu, pIntr, arg0)
354 #define intrGetIntrEnFromHw_HAL(pGpu, pIntr, arg0) intrGetIntrEnFromHw_DISPATCH(pGpu, pIntr, arg0)
355 #define intrStatePreLoad(pGpu, pEngstate, arg0) intrStatePreLoad_DISPATCH(pGpu, pEngstate, arg0)
356 #define intrStatePostUnload(pGpu, pEngstate, arg0) intrStatePostUnload_DISPATCH(pGpu, pEngstate, arg0)
357 #define intrStatePreUnload(pGpu, pEngstate, arg0) intrStatePreUnload_DISPATCH(pGpu, pEngstate, arg0)
358 #define intrInitMissing(pGpu, pEngstate) intrInitMissing_DISPATCH(pGpu, pEngstate)
359 #define intrStatePreInitLocked(pGpu, pEngstate) intrStatePreInitLocked_DISPATCH(pGpu, pEngstate)
360 #define intrStatePreInitUnlocked(pGpu, pEngstate) intrStatePreInitUnlocked_DISPATCH(pGpu, pEngstate)
361 #define intrStatePostLoad(pGpu, pEngstate, arg0) intrStatePostLoad_DISPATCH(pGpu, pEngstate, arg0)
362 #define intrIsPresent(pGpu, pEngstate) intrIsPresent_DISPATCH(pGpu, pEngstate)
363 NV_STATUS intrCheckFecsEventbufferPending_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool *arg1);
364 
365 
366 #ifdef __nvoc_intr_h_disabled
367 static inline NV_STATUS intrCheckFecsEventbufferPending(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool *arg1) {
368     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
369     return NV_ERR_NOT_SUPPORTED;
370 }
371 #else //__nvoc_intr_h_disabled
372 #define intrCheckFecsEventbufferPending(pGpu, pIntr, arg0, arg1) intrCheckFecsEventbufferPending_IMPL(pGpu, pIntr, arg0, arg1)
373 #endif //__nvoc_intr_h_disabled
374 
375 #define intrCheckFecsEventbufferPending_HAL(pGpu, pIntr, arg0, arg1) intrCheckFecsEventbufferPending(pGpu, pIntr, arg0, arg1)
376 
377 NV_STATUS intrCheckAndServiceFecsEventbuffer_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
378 
379 
380 #ifdef __nvoc_intr_h_disabled
381 static inline NV_STATUS intrCheckAndServiceFecsEventbuffer(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
382     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
383     return NV_ERR_NOT_SUPPORTED;
384 }
385 #else //__nvoc_intr_h_disabled
386 #define intrCheckAndServiceFecsEventbuffer(pGpu, pIntr, arg0, arg1) intrCheckAndServiceFecsEventbuffer_IMPL(pGpu, pIntr, arg0, arg1)
387 #endif //__nvoc_intr_h_disabled
388 
389 #define intrCheckAndServiceFecsEventbuffer_HAL(pGpu, pIntr, arg0, arg1) intrCheckAndServiceFecsEventbuffer(pGpu, pIntr, arg0, arg1)
390 
391 static inline NV_STATUS intrStateDestroyPhysical_56cd7a(OBJGPU *pGpu, struct Intr *pIntr) {
392     return NV_OK;
393 }
394 
395 NV_STATUS intrStateDestroyPhysical_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
396 
397 
398 #ifdef __nvoc_intr_h_disabled
399 static inline NV_STATUS intrStateDestroyPhysical(OBJGPU *pGpu, struct Intr *pIntr) {
400     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
401     return NV_ERR_NOT_SUPPORTED;
402 }
403 #else //__nvoc_intr_h_disabled
404 #define intrStateDestroyPhysical(pGpu, pIntr) intrStateDestroyPhysical_56cd7a(pGpu, pIntr)
405 #endif //__nvoc_intr_h_disabled
406 
407 #define intrStateDestroyPhysical_HAL(pGpu, pIntr) intrStateDestroyPhysical(pGpu, pIntr)
408 
409 static inline void intrSetInterruptMaskBug1470153War_b3696a(OBJGPU *pGpu, struct Intr *pIntr) {
410     return;
411 }
412 
413 
414 #ifdef __nvoc_intr_h_disabled
415 static inline void intrSetInterruptMaskBug1470153War(OBJGPU *pGpu, struct Intr *pIntr) {
416     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
417 }
418 #else //__nvoc_intr_h_disabled
419 #define intrSetInterruptMaskBug1470153War(pGpu, pIntr) intrSetInterruptMaskBug1470153War_b3696a(pGpu, pIntr)
420 #endif //__nvoc_intr_h_disabled
421 
422 #define intrSetInterruptMaskBug1470153War_HAL(pGpu, pIntr) intrSetInterruptMaskBug1470153War(pGpu, pIntr)
423 
424 NV_STATUS intrGetPendingNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
425 
426 
427 #ifdef __nvoc_intr_h_disabled
428 static inline NV_STATUS intrGetPendingNonStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
429     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
430     return NV_ERR_NOT_SUPPORTED;
431 }
432 #else //__nvoc_intr_h_disabled
433 #define intrGetPendingNonStall(pGpu, pIntr, arg0, arg1) intrGetPendingNonStall_TU102(pGpu, pIntr, arg0, arg1)
434 #endif //__nvoc_intr_h_disabled
435 
436 #define intrGetPendingNonStall_HAL(pGpu, pIntr, arg0, arg1) intrGetPendingNonStall(pGpu, pIntr, arg0, arg1)
437 
438 NV_STATUS intrServiceNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
439 
440 
441 #ifdef __nvoc_intr_h_disabled
442 static inline NV_STATUS intrServiceNonStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
443     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
444     return NV_ERR_NOT_SUPPORTED;
445 }
446 #else //__nvoc_intr_h_disabled
447 #define intrServiceNonStall(pGpu, pIntr, arg0, arg1) intrServiceNonStall_TU102(pGpu, pIntr, arg0, arg1)
448 #endif //__nvoc_intr_h_disabled
449 
450 #define intrServiceNonStall_HAL(pGpu, pIntr, arg0, arg1) intrServiceNonStall(pGpu, pIntr, arg0, arg1)
451 
452 NvU32 intrGetNonStallEnable_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
453 
454 
455 #ifdef __nvoc_intr_h_disabled
456 static inline NvU32 intrGetNonStallEnable(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
457     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
458     return 0;
459 }
460 #else //__nvoc_intr_h_disabled
461 #define intrGetNonStallEnable(pGpu, pIntr, arg0) intrGetNonStallEnable_TU102(pGpu, pIntr, arg0)
462 #endif //__nvoc_intr_h_disabled
463 
464 #define intrGetNonStallEnable_HAL(pGpu, pIntr, arg0) intrGetNonStallEnable(pGpu, pIntr, arg0)
465 
466 void intrDisableNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
467 
468 
469 #ifdef __nvoc_intr_h_disabled
470 static inline void intrDisableNonStall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
471     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
472 }
473 #else //__nvoc_intr_h_disabled
474 #define intrDisableNonStall(pGpu, pIntr, arg0) intrDisableNonStall_TU102(pGpu, pIntr, arg0)
475 #endif //__nvoc_intr_h_disabled
476 
477 #define intrDisableNonStall_HAL(pGpu, pIntr, arg0) intrDisableNonStall(pGpu, pIntr, arg0)
478 
479 void intrRestoreNonStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
480 
481 
482 #ifdef __nvoc_intr_h_disabled
483 static inline void intrRestoreNonStall(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
484     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
485 }
486 #else //__nvoc_intr_h_disabled
487 #define intrRestoreNonStall(pGpu, pIntr, arg0, arg1) intrRestoreNonStall_TU102(pGpu, pIntr, arg0, arg1)
488 #endif //__nvoc_intr_h_disabled
489 
490 #define intrRestoreNonStall_HAL(pGpu, pIntr, arg0, arg1) intrRestoreNonStall(pGpu, pIntr, arg0, arg1)
491 
492 void intrGetStallInterruptMode_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *pIntrmode, NvBool *pPending);
493 
494 
495 #ifdef __nvoc_intr_h_disabled
496 static inline void intrGetStallInterruptMode(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *pIntrmode, NvBool *pPending) {
497     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
498 }
499 #else //__nvoc_intr_h_disabled
500 #define intrGetStallInterruptMode(pGpu, pIntr, pIntrmode, pPending) intrGetStallInterruptMode_TU102(pGpu, pIntr, pIntrmode, pPending)
501 #endif //__nvoc_intr_h_disabled
502 
503 #define intrGetStallInterruptMode_HAL(pGpu, pIntr, pIntrmode, pPending) intrGetStallInterruptMode(pGpu, pIntr, pIntrmode, pPending)
504 
505 void intrEncodeStallIntrEn_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrEn, NvU32 *pIntrEnSet, NvU32 *pIntrEnClear);
506 
507 
508 #ifdef __nvoc_intr_h_disabled
509 static inline void intrEncodeStallIntrEn(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrEn, NvU32 *pIntrEnSet, NvU32 *pIntrEnClear) {
510     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
511 }
512 #else //__nvoc_intr_h_disabled
513 #define intrEncodeStallIntrEn(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear) intrEncodeStallIntrEn_GP100(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear)
514 #endif //__nvoc_intr_h_disabled
515 
516 #define intrEncodeStallIntrEn_HAL(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear) intrEncodeStallIntrEn(pGpu, pIntr, intrEn, pIntrEnSet, pIntrEnClear)
517 
518 NV_STATUS intrCheckAndServiceNonReplayableFault_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
519 
520 
521 #ifdef __nvoc_intr_h_disabled
522 static inline NV_STATUS intrCheckAndServiceNonReplayableFault(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
523     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
524     return NV_ERR_NOT_SUPPORTED;
525 }
526 #else //__nvoc_intr_h_disabled
527 #define intrCheckAndServiceNonReplayableFault(pGpu, pIntr, arg0) intrCheckAndServiceNonReplayableFault_TU102(pGpu, pIntr, arg0)
528 #endif //__nvoc_intr_h_disabled
529 
530 #define intrCheckAndServiceNonReplayableFault_HAL(pGpu, pIntr, arg0) intrCheckAndServiceNonReplayableFault(pGpu, pIntr, arg0)
531 
532 static inline NvU32 intrGetStallBaseVector_4a4dee(OBJGPU *pGpu, struct Intr *pIntr) {
533     return 0;
534 }
535 
536 NvU32 intrGetStallBaseVector_TU102(OBJGPU *pGpu, struct Intr *pIntr);
537 
538 static inline NvU32 intrGetStallBaseVector_c067f9(OBJGPU *pGpu, struct Intr *pIntr) {
539     NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
540 }
541 
542 
543 #ifdef __nvoc_intr_h_disabled
544 static inline NvU32 intrGetStallBaseVector(OBJGPU *pGpu, struct Intr *pIntr) {
545     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
546     return 0;
547 }
548 #else //__nvoc_intr_h_disabled
549 #define intrGetStallBaseVector(pGpu, pIntr) intrGetStallBaseVector_4a4dee(pGpu, pIntr)
550 #endif //__nvoc_intr_h_disabled
551 
552 #define intrGetStallBaseVector_HAL(pGpu, pIntr) intrGetStallBaseVector(pGpu, pIntr)
553 
554 void intrEnableLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector);
555 
556 
557 #ifdef __nvoc_intr_h_disabled
558 static inline void intrEnableLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector) {
559     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
560 }
561 #else //__nvoc_intr_h_disabled
562 #define intrEnableLeaf(pGpu, pIntr, intrVector) intrEnableLeaf_TU102(pGpu, pIntr, intrVector)
563 #endif //__nvoc_intr_h_disabled
564 
565 #define intrEnableLeaf_HAL(pGpu, pIntr, intrVector) intrEnableLeaf(pGpu, pIntr, intrVector)
566 
567 void intrDisableLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector);
568 
569 
570 #ifdef __nvoc_intr_h_disabled
571 static inline void intrDisableLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrVector) {
572     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
573 }
574 #else //__nvoc_intr_h_disabled
575 #define intrDisableLeaf(pGpu, pIntr, intrVector) intrDisableLeaf_TU102(pGpu, pIntr, intrVector)
576 #endif //__nvoc_intr_h_disabled
577 
578 #define intrDisableLeaf_HAL(pGpu, pIntr, intrVector) intrDisableLeaf(pGpu, pIntr, intrVector)
579 
580 void intrEnableTopNonstall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState);
581 
582 
583 #ifdef __nvoc_intr_h_disabled
584 static inline void intrEnableTopNonstall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState) {
585     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
586 }
587 #else //__nvoc_intr_h_disabled
588 #define intrEnableTopNonstall(pGpu, pIntr, pThreadState) intrEnableTopNonstall_TU102(pGpu, pIntr, pThreadState)
589 #endif //__nvoc_intr_h_disabled
590 
591 #define intrEnableTopNonstall_HAL(pGpu, pIntr, pThreadState) intrEnableTopNonstall(pGpu, pIntr, pThreadState)
592 
593 void intrDisableTopNonstall_TU102(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState);
594 
595 
596 #ifdef __nvoc_intr_h_disabled
597 static inline void intrDisableTopNonstall(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *pThreadState) {
598     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
599 }
600 #else //__nvoc_intr_h_disabled
601 #define intrDisableTopNonstall(pGpu, pIntr, pThreadState) intrDisableTopNonstall_TU102(pGpu, pIntr, pThreadState)
602 #endif //__nvoc_intr_h_disabled
603 
604 #define intrDisableTopNonstall_HAL(pGpu, pIntr, pThreadState) intrDisableTopNonstall(pGpu, pIntr, pThreadState)
605 
606 void intrSetStall_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrType, struct THREAD_STATE_NODE *pThreadState);
607 
608 
609 #ifdef __nvoc_intr_h_disabled
610 static inline void intrSetStall(OBJGPU *pGpu, struct Intr *pIntr, NvU32 intrType, struct THREAD_STATE_NODE *pThreadState) {
611     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
612 }
613 #else //__nvoc_intr_h_disabled
614 #define intrSetStall(pGpu, pIntr, intrType, pThreadState) intrSetStall_TU102(pGpu, pIntr, intrType, pThreadState)
615 #endif //__nvoc_intr_h_disabled
616 
617 #define intrSetStall_HAL(pGpu, pIntr, intrType, pThreadState) intrSetStall(pGpu, pIntr, intrType, pThreadState)
618 
619 void intrClearLeafVector_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
620 
621 
622 #ifdef __nvoc_intr_h_disabled
623 static inline void intrClearLeafVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
624     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
625 }
626 #else //__nvoc_intr_h_disabled
627 #define intrClearLeafVector(pGpu, pIntr, vector, pThreadState) intrClearLeafVector_TU102(pGpu, pIntr, vector, pThreadState)
628 #endif //__nvoc_intr_h_disabled
629 
630 #define intrClearLeafVector_HAL(pGpu, pIntr, vector, pThreadState) intrClearLeafVector(pGpu, pIntr, vector, pThreadState)
631 
632 static inline void intrClearCpuLeafVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
633     return;
634 }
635 
636 void intrClearCpuLeafVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
637 
638 
639 #ifdef __nvoc_intr_h_disabled
640 static inline void intrClearCpuLeafVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
641     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
642 }
643 #else //__nvoc_intr_h_disabled
644 #define intrClearCpuLeafVector(pGpu, pIntr, vector, pThreadState) intrClearCpuLeafVector_b3696a(pGpu, pIntr, vector, pThreadState)
645 #endif //__nvoc_intr_h_disabled
646 
647 #define intrClearCpuLeafVector_HAL(pGpu, pIntr, vector, pThreadState) intrClearCpuLeafVector(pGpu, pIntr, vector, pThreadState)
648 
649 static inline void intrWriteCpuRegLeaf_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
650     return;
651 }
652 
653 void intrWriteCpuRegLeaf_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
654 
655 
656 #ifdef __nvoc_intr_h_disabled
657 static inline void intrWriteCpuRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
658     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
659 }
660 #else //__nvoc_intr_h_disabled
661 #define intrWriteCpuRegLeaf(pGpu, pIntr, arg0, arg1, arg2) intrWriteCpuRegLeaf_b3696a(pGpu, pIntr, arg0, arg1, arg2)
662 #endif //__nvoc_intr_h_disabled
663 
664 #define intrWriteCpuRegLeaf_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteCpuRegLeaf(pGpu, pIntr, arg0, arg1, arg2)
665 
666 NvBool intrIsVectorPending_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState);
667 
668 
669 #ifdef __nvoc_intr_h_disabled
670 static inline NvBool intrIsVectorPending(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, struct THREAD_STATE_NODE *pThreadState) {
671     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
672     return NV_FALSE;
673 }
674 #else //__nvoc_intr_h_disabled
675 #define intrIsVectorPending(pGpu, pIntr, vector, pThreadState) intrIsVectorPending_TU102(pGpu, pIntr, vector, pThreadState)
676 #endif //__nvoc_intr_h_disabled
677 
678 #define intrIsVectorPending_HAL(pGpu, pIntr, vector, pThreadState) intrIsVectorPending(pGpu, pIntr, vector, pThreadState)
679 
680 NV_STATUS intrSetStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
681 
682 
683 #ifdef __nvoc_intr_h_disabled
684 static inline NV_STATUS intrSetStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
685     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
686     return NV_ERR_NOT_SUPPORTED;
687 }
688 #else //__nvoc_intr_h_disabled
689 #define intrSetStallSWIntr(pGpu, pIntr) intrSetStallSWIntr_TU102(pGpu, pIntr)
690 #endif //__nvoc_intr_h_disabled
691 
692 #define intrSetStallSWIntr_HAL(pGpu, pIntr) intrSetStallSWIntr(pGpu, pIntr)
693 
694 NV_STATUS intrClearStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
695 
696 
697 #ifdef __nvoc_intr_h_disabled
698 static inline NV_STATUS intrClearStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
699     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
700     return NV_ERR_NOT_SUPPORTED;
701 }
702 #else //__nvoc_intr_h_disabled
703 #define intrClearStallSWIntr(pGpu, pIntr) intrClearStallSWIntr_TU102(pGpu, pIntr)
704 #endif //__nvoc_intr_h_disabled
705 
706 #define intrClearStallSWIntr_HAL(pGpu, pIntr) intrClearStallSWIntr(pGpu, pIntr)
707 
708 void intrEnableStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
709 
710 
711 #ifdef __nvoc_intr_h_disabled
712 static inline void intrEnableStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
713     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
714 }
715 #else //__nvoc_intr_h_disabled
716 #define intrEnableStallSWIntr(pGpu, pIntr) intrEnableStallSWIntr_TU102(pGpu, pIntr)
717 #endif //__nvoc_intr_h_disabled
718 
719 #define intrEnableStallSWIntr_HAL(pGpu, pIntr) intrEnableStallSWIntr(pGpu, pIntr)
720 
721 void intrDisableStallSWIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr);
722 
723 
724 #ifdef __nvoc_intr_h_disabled
725 static inline void intrDisableStallSWIntr(OBJGPU *pGpu, struct Intr *pIntr) {
726     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
727 }
728 #else //__nvoc_intr_h_disabled
729 #define intrDisableStallSWIntr(pGpu, pIntr) intrDisableStallSWIntr_TU102(pGpu, pIntr)
730 #endif //__nvoc_intr_h_disabled
731 
732 #define intrDisableStallSWIntr_HAL(pGpu, pIntr) intrDisableStallSWIntr(pGpu, pIntr)
733 
734 void intrServiceVirtual_TU102(OBJGPU *pGpu, struct Intr *pIntr);
735 
736 
737 #ifdef __nvoc_intr_h_disabled
738 static inline void intrServiceVirtual(OBJGPU *pGpu, struct Intr *pIntr) {
739     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
740 }
741 #else //__nvoc_intr_h_disabled
742 #define intrServiceVirtual(pGpu, pIntr) intrServiceVirtual_TU102(pGpu, pIntr)
743 #endif //__nvoc_intr_h_disabled
744 
745 #define intrServiceVirtual_HAL(pGpu, pIntr) intrServiceVirtual(pGpu, pIntr)
746 
747 static inline void intrResetIntrRegistersForVF_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
748     return;
749 }
750 
751 void intrResetIntrRegistersForVF_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
752 
753 
754 #ifdef __nvoc_intr_h_disabled
755 static inline void intrResetIntrRegistersForVF(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
756     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
757 }
758 #else //__nvoc_intr_h_disabled
759 #define intrResetIntrRegistersForVF(pGpu, pIntr, gfid) intrResetIntrRegistersForVF_b3696a(pGpu, pIntr, gfid)
760 #endif //__nvoc_intr_h_disabled
761 
762 #define intrResetIntrRegistersForVF_HAL(pGpu, pIntr, gfid) intrResetIntrRegistersForVF(pGpu, pIntr, gfid)
763 
764 static inline NV_STATUS intrSaveIntrRegValue_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2) {
765     return NV_ERR_NOT_SUPPORTED;
766 }
767 
768 NV_STATUS intrSaveIntrRegValue_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2);
769 
770 
771 #ifdef __nvoc_intr_h_disabled
772 static inline NV_STATUS intrSaveIntrRegValue(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2) {
773     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
774     return NV_ERR_NOT_SUPPORTED;
775 }
776 #else //__nvoc_intr_h_disabled
777 #define intrSaveIntrRegValue(pGpu, pIntr, arg0, arg1, arg2) intrSaveIntrRegValue_46f6a7(pGpu, pIntr, arg0, arg1, arg2)
778 #endif //__nvoc_intr_h_disabled
779 
780 #define intrSaveIntrRegValue_HAL(pGpu, pIntr, arg0, arg1, arg2) intrSaveIntrRegValue(pGpu, pIntr, arg0, arg1, arg2)
781 
782 static inline NV_STATUS intrRestoreIntrRegValue_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, NvU32 *arg2) {
783     return NV_ERR_NOT_SUPPORTED;
784 }
785 
786 NV_STATUS intrRestoreIntrRegValue_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, NvU32 *arg2);
787 
788 
789 #ifdef __nvoc_intr_h_disabled
790 static inline NV_STATUS intrRestoreIntrRegValue(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, NvU32 *arg2) {
791     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
792     return NV_ERR_NOT_SUPPORTED;
793 }
794 #else //__nvoc_intr_h_disabled
795 #define intrRestoreIntrRegValue(pGpu, pIntr, arg0, arg1, arg2) intrRestoreIntrRegValue_46f6a7(pGpu, pIntr, arg0, arg1, arg2)
796 #endif //__nvoc_intr_h_disabled
797 
798 #define intrRestoreIntrRegValue_HAL(pGpu, pIntr, arg0, arg1, arg2) intrRestoreIntrRegValue(pGpu, pIntr, arg0, arg1, arg2)
799 
800 static inline NV_STATUS intrTriggerCpuDoorbellForVF_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
801     return NV_ERR_NOT_SUPPORTED;
802 }
803 
804 NV_STATUS intrTriggerCpuDoorbellForVF_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
805 
806 
807 #ifdef __nvoc_intr_h_disabled
808 static inline NV_STATUS intrTriggerCpuDoorbellForVF(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
809     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
810     return NV_ERR_NOT_SUPPORTED;
811 }
812 #else //__nvoc_intr_h_disabled
813 #define intrTriggerCpuDoorbellForVF(pGpu, pIntr, gfid) intrTriggerCpuDoorbellForVF_46f6a7(pGpu, pIntr, gfid)
814 #endif //__nvoc_intr_h_disabled
815 
816 #define intrTriggerCpuDoorbellForVF_HAL(pGpu, pIntr, gfid) intrTriggerCpuDoorbellForVF(pGpu, pIntr, gfid)
817 
818 NV_STATUS intrTriggerPrivDoorbell_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid);
819 
820 
821 #ifdef __nvoc_intr_h_disabled
822 static inline NV_STATUS intrTriggerPrivDoorbell(OBJGPU *pGpu, struct Intr *pIntr, NvU32 gfid) {
823     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
824     return NV_ERR_NOT_SUPPORTED;
825 }
826 #else //__nvoc_intr_h_disabled
827 #define intrTriggerPrivDoorbell(pGpu, pIntr, gfid) intrTriggerPrivDoorbell_TU102(pGpu, pIntr, gfid)
828 #endif //__nvoc_intr_h_disabled
829 
830 #define intrTriggerPrivDoorbell_HAL(pGpu, pIntr, gfid) intrTriggerPrivDoorbell(pGpu, pIntr, gfid)
831 
832 void intrRetriggerTopLevel_TU102(OBJGPU *pGpu, struct Intr *pIntr);
833 
834 
835 #ifdef __nvoc_intr_h_disabled
836 static inline void intrRetriggerTopLevel(OBJGPU *pGpu, struct Intr *pIntr) {
837     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
838 }
839 #else //__nvoc_intr_h_disabled
840 #define intrRetriggerTopLevel(pGpu, pIntr) intrRetriggerTopLevel_TU102(pGpu, pIntr)
841 #endif //__nvoc_intr_h_disabled
842 
843 #define intrRetriggerTopLevel_HAL(pGpu, pIntr) intrRetriggerTopLevel(pGpu, pIntr)
844 
845 NV_STATUS intrGetLeafStatus_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, struct THREAD_STATE_NODE *arg1);
846 
847 
848 #ifdef __nvoc_intr_h_disabled
849 static inline NV_STATUS intrGetLeafStatus(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, struct THREAD_STATE_NODE *arg1) {
850     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
851     return NV_ERR_NOT_SUPPORTED;
852 }
853 #else //__nvoc_intr_h_disabled
854 #define intrGetLeafStatus(pGpu, pIntr, arg0, arg1) intrGetLeafStatus_TU102(pGpu, pIntr, arg0, arg1)
855 #endif //__nvoc_intr_h_disabled
856 
857 #define intrGetLeafStatus_HAL(pGpu, pIntr, arg0, arg1) intrGetLeafStatus(pGpu, pIntr, arg0, arg1)
858 
859 NV_STATUS intrGetPendingDisplayIntr_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines, struct THREAD_STATE_NODE *pThreadState);
860 
861 
862 #ifdef __nvoc_intr_h_disabled
863 static inline NV_STATUS intrGetPendingDisplayIntr(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines, struct THREAD_STATE_NODE *pThreadState) {
864     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
865     return NV_ERR_NOT_SUPPORTED;
866 }
867 #else //__nvoc_intr_h_disabled
868 #define intrGetPendingDisplayIntr(pGpu, pIntr, pEngines, pThreadState) intrGetPendingDisplayIntr_TU102(pGpu, pIntr, pEngines, pThreadState)
869 #endif //__nvoc_intr_h_disabled
870 
871 #define intrGetPendingDisplayIntr_HAL(pGpu, pIntr, pEngines, pThreadState) intrGetPendingDisplayIntr(pGpu, pIntr, pEngines, pThreadState)
872 
873 void intrDumpState_TU102(OBJGPU *pGpu, struct Intr *pIntr);
874 
875 
876 #ifdef __nvoc_intr_h_disabled
877 static inline void intrDumpState(OBJGPU *pGpu, struct Intr *pIntr) {
878     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
879 }
880 #else //__nvoc_intr_h_disabled
881 #define intrDumpState(pGpu, pIntr) intrDumpState_TU102(pGpu, pIntr)
882 #endif //__nvoc_intr_h_disabled
883 
884 #define intrDumpState_HAL(pGpu, pIntr) intrDumpState(pGpu, pIntr)
885 
886 NV_STATUS intrCacheIntrFields_TU102(OBJGPU *pGpu, struct Intr *pIntr);
887 
888 
889 #ifdef __nvoc_intr_h_disabled
890 static inline NV_STATUS intrCacheIntrFields(OBJGPU *pGpu, struct Intr *pIntr) {
891     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
892     return NV_ERR_NOT_SUPPORTED;
893 }
894 #else //__nvoc_intr_h_disabled
895 #define intrCacheIntrFields(pGpu, pIntr) intrCacheIntrFields_TU102(pGpu, pIntr)
896 #endif //__nvoc_intr_h_disabled
897 
898 #define intrCacheIntrFields_HAL(pGpu, pIntr) intrCacheIntrFields(pGpu, pIntr)
899 
900 NvU32 intrReadRegLeafEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
901 
902 
903 #ifdef __nvoc_intr_h_disabled
904 static inline NvU32 intrReadRegLeafEnSet(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
905     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
906     return 0;
907 }
908 #else //__nvoc_intr_h_disabled
909 #define intrReadRegLeafEnSet(pGpu, pIntr, arg0, arg1) intrReadRegLeafEnSet_TU102(pGpu, pIntr, arg0, arg1)
910 #endif //__nvoc_intr_h_disabled
911 
912 #define intrReadRegLeafEnSet_HAL(pGpu, pIntr, arg0, arg1) intrReadRegLeafEnSet(pGpu, pIntr, arg0, arg1)
913 
914 NvU32 intrReadRegLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
915 
916 
917 #ifdef __nvoc_intr_h_disabled
918 static inline NvU32 intrReadRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
919     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
920     return 0;
921 }
922 #else //__nvoc_intr_h_disabled
923 #define intrReadRegLeaf(pGpu, pIntr, arg0, arg1) intrReadRegLeaf_TU102(pGpu, pIntr, arg0, arg1)
924 #endif //__nvoc_intr_h_disabled
925 
926 #define intrReadRegLeaf_HAL(pGpu, pIntr, arg0, arg1) intrReadRegLeaf(pGpu, pIntr, arg0, arg1)
927 
928 NvU32 intrReadRegTop_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
929 
930 
931 #ifdef __nvoc_intr_h_disabled
932 static inline NvU32 intrReadRegTop(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
933     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
934     return 0;
935 }
936 #else //__nvoc_intr_h_disabled
937 #define intrReadRegTop(pGpu, pIntr, arg0, arg1) intrReadRegTop_TU102(pGpu, pIntr, arg0, arg1)
938 #endif //__nvoc_intr_h_disabled
939 
940 #define intrReadRegTop_HAL(pGpu, pIntr, arg0, arg1) intrReadRegTop(pGpu, pIntr, arg0, arg1)
941 
942 void intrWriteRegLeafEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
943 
944 
945 #ifdef __nvoc_intr_h_disabled
946 static inline void intrWriteRegLeafEnSet(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
947     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
948 }
949 #else //__nvoc_intr_h_disabled
950 #define intrWriteRegLeafEnSet(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnSet_TU102(pGpu, pIntr, arg0, arg1, arg2)
951 #endif //__nvoc_intr_h_disabled
952 
953 #define intrWriteRegLeafEnSet_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnSet(pGpu, pIntr, arg0, arg1, arg2)
954 
955 void intrWriteRegLeafEnClear_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
956 
957 
958 #ifdef __nvoc_intr_h_disabled
959 static inline void intrWriteRegLeafEnClear(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
960     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
961 }
962 #else //__nvoc_intr_h_disabled
963 #define intrWriteRegLeafEnClear(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnClear_TU102(pGpu, pIntr, arg0, arg1, arg2)
964 #endif //__nvoc_intr_h_disabled
965 
966 #define intrWriteRegLeafEnClear_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeafEnClear(pGpu, pIntr, arg0, arg1, arg2)
967 
968 void intrWriteRegLeaf_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
969 
970 
971 #ifdef __nvoc_intr_h_disabled
972 static inline void intrWriteRegLeaf(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
973     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
974 }
975 #else //__nvoc_intr_h_disabled
976 #define intrWriteRegLeaf(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeaf_TU102(pGpu, pIntr, arg0, arg1, arg2)
977 #endif //__nvoc_intr_h_disabled
978 
979 #define intrWriteRegLeaf_HAL(pGpu, pIntr, arg0, arg1, arg2) intrWriteRegLeaf(pGpu, pIntr, arg0, arg1, arg2)
980 
981 static inline void intrRouteInterruptsToSystemFirmware_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable) {
982     return;
983 }
984 
985 void intrRouteInterruptsToSystemFirmware_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable);
986 
987 
988 #ifdef __nvoc_intr_h_disabled
989 static inline void intrRouteInterruptsToSystemFirmware(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable) {
990     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
991 }
992 #else //__nvoc_intr_h_disabled
993 #define intrRouteInterruptsToSystemFirmware(pGpu, pIntr, bEnable) intrRouteInterruptsToSystemFirmware_b3696a(pGpu, pIntr, bEnable)
994 #endif //__nvoc_intr_h_disabled
995 
996 #define intrRouteInterruptsToSystemFirmware_HAL(pGpu, pIntr, bEnable) intrRouteInterruptsToSystemFirmware(pGpu, pIntr, bEnable)
997 
998 static inline NV_STATUS intrInitDynamicInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, struct OBJFIFO *arg0, INTR_TABLE_ENTRY *arg1, NvU32 arg2, NvU32 initFlags) {
999     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1000 }
1001 
1002 
1003 #ifdef __nvoc_intr_h_disabled
1004 static inline NV_STATUS intrInitDynamicInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, struct OBJFIFO *arg0, INTR_TABLE_ENTRY *arg1, NvU32 arg2, NvU32 initFlags) {
1005     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1006     return NV_ERR_NOT_SUPPORTED;
1007 }
1008 #else //__nvoc_intr_h_disabled
1009 #define intrInitDynamicInterruptTable(pGpu, pIntr, arg0, arg1, arg2, initFlags) intrInitDynamicInterruptTable_5baef9(pGpu, pIntr, arg0, arg1, arg2, initFlags)
1010 #endif //__nvoc_intr_h_disabled
1011 
1012 #define intrInitDynamicInterruptTable_HAL(pGpu, pIntr, arg0, arg1, arg2, initFlags) intrInitDynamicInterruptTable(pGpu, pIntr, arg0, arg1, arg2, initFlags)
1013 
1014 static inline NV_STATUS intrInitAnyInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY **ppIntrTable, NvU32 *pIntrTableSz, NvU32 initFlags) {
1015     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1016 }
1017 
1018 
1019 #ifdef __nvoc_intr_h_disabled
1020 static inline NV_STATUS intrInitAnyInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY **ppIntrTable, NvU32 *pIntrTableSz, NvU32 initFlags) {
1021     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1022     return NV_ERR_NOT_SUPPORTED;
1023 }
1024 #else //__nvoc_intr_h_disabled
1025 #define intrInitAnyInterruptTable(pGpu, pIntr, ppIntrTable, pIntrTableSz, initFlags) intrInitAnyInterruptTable_5baef9(pGpu, pIntr, ppIntrTable, pIntrTableSz, initFlags)
1026 #endif //__nvoc_intr_h_disabled
1027 
1028 #define intrInitAnyInterruptTable_HAL(pGpu, pIntr, ppIntrTable, pIntrTableSz, initFlags) intrInitAnyInterruptTable(pGpu, pIntr, ppIntrTable, pIntrTableSz, initFlags)
1029 
1030 static inline NV_STATUS intrInitSubtreeMap_395e98(OBJGPU *pGpu, struct Intr *pIntr) {
1031     return NV_ERR_NOT_SUPPORTED;
1032 }
1033 
1034 NV_STATUS intrInitSubtreeMap_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1035 
1036 NV_STATUS intrInitSubtreeMap_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1037 
1038 
1039 #ifdef __nvoc_intr_h_disabled
1040 static inline NV_STATUS intrInitSubtreeMap(OBJGPU *pGpu, struct Intr *pIntr) {
1041     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1042     return NV_ERR_NOT_SUPPORTED;
1043 }
1044 #else //__nvoc_intr_h_disabled
1045 #define intrInitSubtreeMap(pGpu, pIntr) intrInitSubtreeMap_395e98(pGpu, pIntr)
1046 #endif //__nvoc_intr_h_disabled
1047 
1048 #define intrInitSubtreeMap_HAL(pGpu, pIntr) intrInitSubtreeMap(pGpu, pIntr)
1049 
1050 NV_STATUS intrInitInterruptTable_KERNEL(OBJGPU *pGpu, struct Intr *pIntr);
1051 
1052 
1053 #ifdef __nvoc_intr_h_disabled
1054 static inline NV_STATUS intrInitInterruptTable(OBJGPU *pGpu, struct Intr *pIntr) {
1055     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1056     return NV_ERR_NOT_SUPPORTED;
1057 }
1058 #else //__nvoc_intr_h_disabled
1059 #define intrInitInterruptTable(pGpu, pIntr) intrInitInterruptTable_KERNEL(pGpu, pIntr)
1060 #endif //__nvoc_intr_h_disabled
1061 
1062 #define intrInitInterruptTable_HAL(pGpu, pIntr) intrInitInterruptTable(pGpu, pIntr)
1063 
1064 NV_STATUS intrGetInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY **arg0, NvU32 *arg1);
1065 
1066 
1067 #ifdef __nvoc_intr_h_disabled
1068 static inline NV_STATUS intrGetInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY **arg0, NvU32 *arg1) {
1069     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1070     return NV_ERR_NOT_SUPPORTED;
1071 }
1072 #else //__nvoc_intr_h_disabled
1073 #define intrGetInterruptTable(pGpu, pIntr, arg0, arg1) intrGetInterruptTable_IMPL(pGpu, pIntr, arg0, arg1)
1074 #endif //__nvoc_intr_h_disabled
1075 
1076 #define intrGetInterruptTable_HAL(pGpu, pIntr, arg0, arg1) intrGetInterruptTable(pGpu, pIntr, arg0, arg1)
1077 
1078 NV_STATUS intrDestroyInterruptTable_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1079 
1080 
1081 #ifdef __nvoc_intr_h_disabled
1082 static inline NV_STATUS intrDestroyInterruptTable(OBJGPU *pGpu, struct Intr *pIntr) {
1083     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1084     return NV_ERR_NOT_SUPPORTED;
1085 }
1086 #else //__nvoc_intr_h_disabled
1087 #define intrDestroyInterruptTable(pGpu, pIntr) intrDestroyInterruptTable_IMPL(pGpu, pIntr)
1088 #endif //__nvoc_intr_h_disabled
1089 
1090 #define intrDestroyInterruptTable_HAL(pGpu, pIntr) intrDestroyInterruptTable(pGpu, pIntr)
1091 
1092 static inline NV_STATUS intrGetStaticVFmcEngines_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount) {
1093     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1094 }
1095 
1096 NV_STATUS intrGetStaticVFmcEngines_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount);
1097 
1098 NV_STATUS intrGetStaticVFmcEngines_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount);
1099 
1100 
1101 #ifdef __nvoc_intr_h_disabled
1102 static inline NV_STATUS intrGetStaticVFmcEngines(OBJGPU *pGpu, struct Intr *pIntr, NvU16 **ppMcEngines, NvU32 *pCount) {
1103     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1104     return NV_ERR_NOT_SUPPORTED;
1105 }
1106 #else //__nvoc_intr_h_disabled
1107 #define intrGetStaticVFmcEngines(pGpu, pIntr, ppMcEngines, pCount) intrGetStaticVFmcEngines_5baef9(pGpu, pIntr, ppMcEngines, pCount)
1108 #endif //__nvoc_intr_h_disabled
1109 
1110 #define intrGetStaticVFmcEngines_HAL(pGpu, pIntr, ppMcEngines, pCount) intrGetStaticVFmcEngines(pGpu, pIntr, ppMcEngines, pCount)
1111 
1112 static inline NV_STATUS intrGetStaticInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *pTable, NvU32 *pCount, NvU32 maxCount, NvU32 initFlags) {
1113     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1114 }
1115 
1116 NV_STATUS intrGetStaticInterruptTable_TU102(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *pTable, NvU32 *pCount, NvU32 maxCount, NvU32 initFlags);
1117 
1118 NV_STATUS intrGetStaticInterruptTable_GA100(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *pTable, NvU32 *pCount, NvU32 maxCount, NvU32 initFlags);
1119 
1120 NV_STATUS intrGetStaticInterruptTable_GA102(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *pTable, NvU32 *pCount, NvU32 maxCount, NvU32 initFlags);
1121 
1122 NV_STATUS intrGetStaticInterruptTable_GH100(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *pTable, NvU32 *pCount, NvU32 maxCount, NvU32 initFlags);
1123 
1124 
1125 #ifdef __nvoc_intr_h_disabled
1126 static inline NV_STATUS intrGetStaticInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *pTable, NvU32 *pCount, NvU32 maxCount, NvU32 initFlags) {
1127     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1128     return NV_ERR_NOT_SUPPORTED;
1129 }
1130 #else //__nvoc_intr_h_disabled
1131 #define intrGetStaticInterruptTable(pGpu, pIntr, pTable, pCount, maxCount, initFlags) intrGetStaticInterruptTable_5baef9(pGpu, pIntr, pTable, pCount, maxCount, initFlags)
1132 #endif //__nvoc_intr_h_disabled
1133 
1134 #define intrGetStaticInterruptTable_HAL(pGpu, pIntr, pTable, pCount, maxCount, initFlags) intrGetStaticInterruptTable(pGpu, pIntr, pTable, pCount, maxCount, initFlags)
1135 
1136 static inline NvU32 intrGetGPUHostInterruptTableSize_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 initFlags) {
1137     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1138 }
1139 
1140 NvU32 intrGetGPUHostInterruptTableSize_GM107(OBJGPU *pGpu, struct Intr *pIntr, NvU32 initFlags);
1141 
1142 NvU32 intrGetGPUHostInterruptTableSize_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 initFlags);
1143 
1144 
1145 #ifdef __nvoc_intr_h_disabled
1146 static inline NvU32 intrGetGPUHostInterruptTableSize(OBJGPU *pGpu, struct Intr *pIntr, NvU32 initFlags) {
1147     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1148     return 0;
1149 }
1150 #else //__nvoc_intr_h_disabled
1151 #define intrGetGPUHostInterruptTableSize(pGpu, pIntr, initFlags) intrGetGPUHostInterruptTableSize_5baef9(pGpu, pIntr, initFlags)
1152 #endif //__nvoc_intr_h_disabled
1153 
1154 #define intrGetGPUHostInterruptTableSize_HAL(pGpu, pIntr, initFlags) intrGetGPUHostInterruptTableSize(pGpu, pIntr, initFlags)
1155 
1156 static inline NV_STATUS intrInitGPUHostInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *arg0, NvU32 arg1, NvU32 initFlags) {
1157     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1158 }
1159 
1160 NV_STATUS intrInitGPUHostInterruptTable_GM107(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *arg0, NvU32 arg1, NvU32 initFlags);
1161 
1162 NV_STATUS intrInitGPUHostInterruptTable_GA100(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *arg0, NvU32 arg1, NvU32 initFlags);
1163 
1164 
1165 #ifdef __nvoc_intr_h_disabled
1166 static inline NV_STATUS intrInitGPUHostInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *arg0, NvU32 arg1, NvU32 initFlags) {
1167     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1168     return NV_ERR_NOT_SUPPORTED;
1169 }
1170 #else //__nvoc_intr_h_disabled
1171 #define intrInitGPUHostInterruptTable(pGpu, pIntr, arg0, arg1, initFlags) intrInitGPUHostInterruptTable_5baef9(pGpu, pIntr, arg0, arg1, initFlags)
1172 #endif //__nvoc_intr_h_disabled
1173 
1174 #define intrInitGPUHostInterruptTable_HAL(pGpu, pIntr, arg0, arg1, initFlags) intrInitGPUHostInterruptTable(pGpu, pIntr, arg0, arg1, initFlags)
1175 
1176 static inline NV_STATUS intrInitEngineSchedInterruptTable_5baef9(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *arg0, NvU32 arg1) {
1177     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1178 }
1179 
1180 
1181 #ifdef __nvoc_intr_h_disabled
1182 static inline NV_STATUS intrInitEngineSchedInterruptTable(OBJGPU *pGpu, struct Intr *pIntr, INTR_TABLE_ENTRY *arg0, NvU32 arg1) {
1183     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1184     return NV_ERR_NOT_SUPPORTED;
1185 }
1186 #else //__nvoc_intr_h_disabled
1187 #define intrInitEngineSchedInterruptTable(pGpu, pIntr, arg0, arg1) intrInitEngineSchedInterruptTable_5baef9(pGpu, pIntr, arg0, arg1)
1188 #endif //__nvoc_intr_h_disabled
1189 
1190 #define intrInitEngineSchedInterruptTable_HAL(pGpu, pIntr, arg0, arg1) intrInitEngineSchedInterruptTable(pGpu, pIntr, arg0, arg1)
1191 
1192 void intrServiceStall_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1193 
1194 
1195 #ifdef __nvoc_intr_h_disabled
1196 static inline void intrServiceStall(OBJGPU *pGpu, struct Intr *pIntr) {
1197     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1198 }
1199 #else //__nvoc_intr_h_disabled
1200 #define intrServiceStall(pGpu, pIntr) intrServiceStall_IMPL(pGpu, pIntr)
1201 #endif //__nvoc_intr_h_disabled
1202 
1203 #define intrServiceStall_HAL(pGpu, pIntr) intrServiceStall(pGpu, pIntr)
1204 
1205 void intrServiceStallList_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1);
1206 
1207 
1208 #ifdef __nvoc_intr_h_disabled
1209 static inline void intrServiceStallList(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1) {
1210     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1211 }
1212 #else //__nvoc_intr_h_disabled
1213 #define intrServiceStallList(pGpu, pIntr, arg0, arg1) intrServiceStallList_IMPL(pGpu, pIntr, arg0, arg1)
1214 #endif //__nvoc_intr_h_disabled
1215 
1216 #define intrServiceStallList_HAL(pGpu, pIntr, arg0, arg1) intrServiceStallList(pGpu, pIntr, arg0, arg1)
1217 
1218 void intrServiceStallSingle_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool arg1);
1219 
1220 
1221 #ifdef __nvoc_intr_h_disabled
1222 static inline void intrServiceStallSingle(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool arg1) {
1223     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1224 }
1225 #else //__nvoc_intr_h_disabled
1226 #define intrServiceStallSingle(pGpu, pIntr, arg0, arg1) intrServiceStallSingle_IMPL(pGpu, pIntr, arg0, arg1)
1227 #endif //__nvoc_intr_h_disabled
1228 
1229 #define intrServiceStallSingle_HAL(pGpu, pIntr, arg0, arg1) intrServiceStallSingle(pGpu, pIntr, arg0, arg1)
1230 
1231 void intrProcessDPCQueue_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1232 
1233 
1234 #ifdef __nvoc_intr_h_disabled
1235 static inline void intrProcessDPCQueue(OBJGPU *pGpu, struct Intr *pIntr) {
1236     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1237 }
1238 #else //__nvoc_intr_h_disabled
1239 #define intrProcessDPCQueue(pGpu, pIntr) intrProcessDPCQueue_IMPL(pGpu, pIntr)
1240 #endif //__nvoc_intr_h_disabled
1241 
1242 #define intrProcessDPCQueue_HAL(pGpu, pIntr) intrProcessDPCQueue(pGpu, pIntr)
1243 
1244 NV_STATUS intrGetIntrMask_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1245 
1246 
1247 #ifdef __nvoc_intr_h_disabled
1248 static inline NV_STATUS intrGetIntrMask(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1249     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1250     return NV_ERR_NOT_SUPPORTED;
1251 }
1252 #else //__nvoc_intr_h_disabled
1253 #define intrGetIntrMask(pGpu, pIntr, arg0, arg1) intrGetIntrMask_GP100(pGpu, pIntr, arg0, arg1)
1254 #endif //__nvoc_intr_h_disabled
1255 
1256 #define intrGetIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrGetIntrMask(pGpu, pIntr, arg0, arg1)
1257 
1258 static inline NV_STATUS intrGetEccIntrMaskOffset_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1259     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1260 }
1261 
1262 NV_STATUS intrGetEccIntrMaskOffset_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1);
1263 
1264 static inline NV_STATUS intrGetEccIntrMaskOffset_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1265     return NV_ERR_NOT_SUPPORTED;
1266 }
1267 
1268 
1269 #ifdef __nvoc_intr_h_disabled
1270 static inline NV_STATUS intrGetEccIntrMaskOffset(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1271     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1272     return NV_ERR_NOT_SUPPORTED;
1273 }
1274 #else //__nvoc_intr_h_disabled
1275 #define intrGetEccIntrMaskOffset(pGpu, pIntr, arg0, arg1) intrGetEccIntrMaskOffset_5baef9(pGpu, pIntr, arg0, arg1)
1276 #endif //__nvoc_intr_h_disabled
1277 
1278 #define intrGetEccIntrMaskOffset_HAL(pGpu, pIntr, arg0, arg1) intrGetEccIntrMaskOffset(pGpu, pIntr, arg0, arg1)
1279 
1280 static inline NV_STATUS intrGetNvlinkIntrMaskOffset_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1281     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1282 }
1283 
1284 NV_STATUS intrGetNvlinkIntrMaskOffset_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1);
1285 
1286 static inline NV_STATUS intrGetNvlinkIntrMaskOffset_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1287     return NV_ERR_NOT_SUPPORTED;
1288 }
1289 
1290 
1291 #ifdef __nvoc_intr_h_disabled
1292 static inline NV_STATUS intrGetNvlinkIntrMaskOffset(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0, NvU32 *arg1) {
1293     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1294     return NV_ERR_NOT_SUPPORTED;
1295 }
1296 #else //__nvoc_intr_h_disabled
1297 #define intrGetNvlinkIntrMaskOffset(pGpu, pIntr, arg0, arg1) intrGetNvlinkIntrMaskOffset_5baef9(pGpu, pIntr, arg0, arg1)
1298 #endif //__nvoc_intr_h_disabled
1299 
1300 #define intrGetNvlinkIntrMaskOffset_HAL(pGpu, pIntr, arg0, arg1) intrGetNvlinkIntrMaskOffset(pGpu, pIntr, arg0, arg1)
1301 
1302 static inline NV_STATUS intrGetEccVirtualFunctionIntrMask_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvHandle arg0, NvU32 *arg1) {
1303     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1304 }
1305 
1306 NV_STATUS intrGetEccVirtualFunctionIntrMask_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvHandle arg0, NvU32 *arg1);
1307 
1308 NV_STATUS intrGetEccVirtualFunctionIntrMask_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvHandle arg0, NvU32 *arg1);
1309 
1310 NV_STATUS intrGetEccVirtualFunctionIntrMask_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvHandle arg0, NvU32 *arg1);
1311 
1312 
1313 #ifdef __nvoc_intr_h_disabled
1314 static inline NV_STATUS intrGetEccVirtualFunctionIntrMask(OBJGPU *pGpu, struct Intr *pIntr, NvHandle arg0, NvU32 *arg1) {
1315     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1316     return NV_ERR_NOT_SUPPORTED;
1317 }
1318 #else //__nvoc_intr_h_disabled
1319 #define intrGetEccVirtualFunctionIntrMask(pGpu, pIntr, arg0, arg1) intrGetEccVirtualFunctionIntrMask_5baef9(pGpu, pIntr, arg0, arg1)
1320 #endif //__nvoc_intr_h_disabled
1321 
1322 #define intrGetEccVirtualFunctionIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrGetEccVirtualFunctionIntrMask(pGpu, pIntr, arg0, arg1)
1323 
1324 static inline NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_5baef9(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1) {
1325     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1326 }
1327 
1328 NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1);
1329 
1330 NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1);
1331 
1332 NV_STATUS intrGetNvlinkVirtualFunctionIntrMask_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1);
1333 
1334 
1335 #ifdef __nvoc_intr_h_disabled
1336 static inline NV_STATUS intrGetNvlinkVirtualFunctionIntrMask(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1) {
1337     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1338     return NV_ERR_NOT_SUPPORTED;
1339 }
1340 #else //__nvoc_intr_h_disabled
1341 #define intrGetNvlinkVirtualFunctionIntrMask(pGpu, pIntr, arg0, arg1) intrGetNvlinkVirtualFunctionIntrMask_5baef9(pGpu, pIntr, arg0, arg1)
1342 #endif //__nvoc_intr_h_disabled
1343 
1344 #define intrGetNvlinkVirtualFunctionIntrMask_HAL(pGpu, pIntr, arg0, arg1) intrGetNvlinkVirtualFunctionIntrMask(pGpu, pIntr, arg0, arg1)
1345 
1346 static inline NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_5baef9(OBJGPU *pGpu, struct Intr *pIntr) {
1347     NV_ASSERT_OR_RETURN_PRECOMP(0, NV_ERR_NOT_SUPPORTED);
1348 }
1349 
1350 NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_GA100(OBJGPU *pGpu, struct Intr *pIntr);
1351 
1352 NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_GA102(OBJGPU *pGpu, struct Intr *pIntr);
1353 
1354 static inline NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll_4a4dee(OBJGPU *pGpu, struct Intr *pIntr) {
1355     return 0;
1356 }
1357 
1358 
1359 #ifdef __nvoc_intr_h_disabled
1360 static inline NvU32 intrGetEccVirtualFunctionIntrSmcMaskAll(OBJGPU *pGpu, struct Intr *pIntr) {
1361     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1362     return 0;
1363 }
1364 #else //__nvoc_intr_h_disabled
1365 #define intrGetEccVirtualFunctionIntrSmcMaskAll(pGpu, pIntr) intrGetEccVirtualFunctionIntrSmcMaskAll_5baef9(pGpu, pIntr)
1366 #endif //__nvoc_intr_h_disabled
1367 
1368 #define intrGetEccVirtualFunctionIntrSmcMaskAll_HAL(pGpu, pIntr) intrGetEccVirtualFunctionIntrSmcMaskAll(pGpu, pIntr)
1369 
1370 static inline NvBool intrRequiresPossibleErrorNotifier_491d52(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines) {
1371     return ((NvBool)(0 != 0));
1372 }
1373 
1374 NvBool intrRequiresPossibleErrorNotifier_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1375 
1376 NvBool intrRequiresPossibleErrorNotifier_GA100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1377 
1378 NvBool intrRequiresPossibleErrorNotifier_GH100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines);
1379 
1380 
1381 #ifdef __nvoc_intr_h_disabled
1382 static inline NvBool intrRequiresPossibleErrorNotifier(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *pEngines) {
1383     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1384     return NV_FALSE;
1385 }
1386 #else //__nvoc_intr_h_disabled
1387 #define intrRequiresPossibleErrorNotifier(pGpu, pIntr, pEngines) intrRequiresPossibleErrorNotifier_491d52(pGpu, pIntr, pEngines)
1388 #endif //__nvoc_intr_h_disabled
1389 
1390 #define intrRequiresPossibleErrorNotifier_HAL(pGpu, pIntr, pEngines) intrRequiresPossibleErrorNotifier(pGpu, pIntr, pEngines)
1391 
1392 static inline NvU32 intrReadErrCont_491d52(OBJGPU *pGpu, struct Intr *pIntr) {
1393     return ((NvBool)(0 != 0));
1394 }
1395 
1396 NvU32 intrReadErrCont_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1397 
1398 
1399 #ifdef __nvoc_intr_h_disabled
1400 static inline NvU32 intrReadErrCont(OBJGPU *pGpu, struct Intr *pIntr) {
1401     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1402     return 0;
1403 }
1404 #else //__nvoc_intr_h_disabled
1405 #define intrReadErrCont(pGpu, pIntr) intrReadErrCont_491d52(pGpu, pIntr)
1406 #endif //__nvoc_intr_h_disabled
1407 
1408 #define intrReadErrCont_HAL(pGpu, pIntr) intrReadErrCont(pGpu, pIntr)
1409 
1410 NV_STATUS intrGetPendingStall_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1411 
1412 
1413 #ifdef __nvoc_intr_h_disabled
1414 static inline NV_STATUS intrGetPendingStall(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1415     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1416     return NV_ERR_NOT_SUPPORTED;
1417 }
1418 #else //__nvoc_intr_h_disabled
1419 #define intrGetPendingStall(pGpu, pIntr, arg0, arg1) intrGetPendingStall_GP100(pGpu, pIntr, arg0, arg1)
1420 #endif //__nvoc_intr_h_disabled
1421 
1422 #define intrGetPendingStall_HAL(pGpu, pIntr, arg0, arg1) intrGetPendingStall(pGpu, pIntr, arg0, arg1)
1423 
1424 NV_STATUS intrGetPendingStallEngines_TU102(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1425 
1426 
1427 #ifdef __nvoc_intr_h_disabled
1428 static inline NV_STATUS intrGetPendingStallEngines(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1429     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1430     return NV_ERR_NOT_SUPPORTED;
1431 }
1432 #else //__nvoc_intr_h_disabled
1433 #define intrGetPendingStallEngines(pGpu, pIntr, arg0, arg1) intrGetPendingStallEngines_TU102(pGpu, pIntr, arg0, arg1)
1434 #endif //__nvoc_intr_h_disabled
1435 
1436 #define intrGetPendingStallEngines_HAL(pGpu, pIntr, arg0, arg1) intrGetPendingStallEngines(pGpu, pIntr, arg0, arg1)
1437 
1438 NvBool intrIsIntrEnabled_IMPL(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
1439 
1440 
1441 #ifdef __nvoc_intr_h_disabled
1442 static inline NvBool intrIsIntrEnabled(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
1443     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1444     return NV_FALSE;
1445 }
1446 #else //__nvoc_intr_h_disabled
1447 #define intrIsIntrEnabled(pGpu, pIntr, arg0) intrIsIntrEnabled_IMPL(pGpu, pIntr, arg0)
1448 #endif //__nvoc_intr_h_disabled
1449 
1450 #define intrIsIntrEnabled_HAL(pGpu, pIntr, arg0) intrIsIntrEnabled(pGpu, pIntr, arg0)
1451 
1452 static inline void intrSetHubLeafIntr_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2, struct THREAD_STATE_NODE *arg3) {
1453     return;
1454 }
1455 
1456 
1457 #ifdef __nvoc_intr_h_disabled
1458 static inline void intrSetHubLeafIntr(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 *arg1, NvU32 *arg2, struct THREAD_STATE_NODE *arg3) {
1459     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1460 }
1461 #else //__nvoc_intr_h_disabled
1462 #define intrSetHubLeafIntr(pGpu, pIntr, arg0, arg1, arg2, arg3) intrSetHubLeafIntr_b3696a(pGpu, pIntr, arg0, arg1, arg2, arg3)
1463 #endif //__nvoc_intr_h_disabled
1464 
1465 #define intrSetHubLeafIntr_HAL(pGpu, pIntr, arg0, arg1, arg2, arg3) intrSetHubLeafIntr(pGpu, pIntr, arg0, arg1, arg2, arg3)
1466 
1467 void intrGetHubLeafIntrPending_STUB(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1468 
1469 
1470 #ifdef __nvoc_intr_h_disabled
1471 static inline void intrGetHubLeafIntrPending(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1472     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1473 }
1474 #else //__nvoc_intr_h_disabled
1475 #define intrGetHubLeafIntrPending(pGpu, pIntr, arg0, arg1) intrGetHubLeafIntrPending_STUB(pGpu, pIntr, arg0, arg1)
1476 #endif //__nvoc_intr_h_disabled
1477 
1478 #define intrGetHubLeafIntrPending_HAL(pGpu, pIntr, arg0, arg1) intrGetHubLeafIntrPending(pGpu, pIntr, arg0, arg1)
1479 
1480 NV_STATUS intrConstructEngine_IMPL(OBJGPU *pGpu, struct Intr *pIntr, ENGDESCRIPTOR arg0);
1481 
1482 static inline NV_STATUS intrConstructEngine_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, ENGDESCRIPTOR arg0) {
1483     return pIntr->__intrConstructEngine__(pGpu, pIntr, arg0);
1484 }
1485 
1486 NV_STATUS intrStateInitUnlocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1487 
1488 static inline NV_STATUS intrStateInitUnlocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1489     return pIntr->__intrStateInitUnlocked__(pGpu, pIntr);
1490 }
1491 
1492 NV_STATUS intrStateInitLocked_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1493 
1494 static inline NV_STATUS intrStateInitLocked_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1495     return pIntr->__intrStateInitLocked__(pGpu, pIntr);
1496 }
1497 
1498 void intrStateDestroy_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1499 
1500 static inline void intrStateDestroy_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1501     pIntr->__intrStateDestroy__(pGpu, pIntr);
1502 }
1503 
1504 NvU32 intrDecodeStallIntrEn_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0);
1505 
1506 static inline NvU32 intrDecodeStallIntrEn_4a4dee(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1507     return 0;
1508 }
1509 
1510 static inline NvU32 intrDecodeStallIntrEn_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1511     return pIntr->__intrDecodeStallIntrEn__(pGpu, pIntr, arg0);
1512 }
1513 
1514 NvU32 intrGetNonStallBaseVector_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1515 
1516 static inline NvU32 intrGetNonStallBaseVector_c067f9(OBJGPU *pGpu, struct Intr *pIntr) {
1517     NV_ASSERT_OR_RETURN_PRECOMP(0, 0);
1518 }
1519 
1520 static inline NvU32 intrGetNonStallBaseVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1521     return pIntr->__intrGetNonStallBaseVector__(pGpu, pIntr);
1522 }
1523 
1524 NvU64 intrGetUvmSharedLeafEnDisableMask_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1525 
1526 NvU64 intrGetUvmSharedLeafEnDisableMask_GA100(OBJGPU *pGpu, struct Intr *pIntr);
1527 
1528 static inline NvU64 intrGetUvmSharedLeafEnDisableMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1529     return pIntr->__intrGetUvmSharedLeafEnDisableMask__(pGpu, pIntr);
1530 }
1531 
1532 void intrSetDisplayInterruptEnable_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState);
1533 
1534 static inline void intrSetDisplayInterruptEnable_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState) {
1535     return;
1536 }
1537 
1538 static inline void intrSetDisplayInterruptEnable_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvBool bEnable, struct THREAD_STATE_NODE *pThreadState) {
1539     pIntr->__intrSetDisplayInterruptEnable__(pGpu, pIntr, bEnable, pThreadState);
1540 }
1541 
1542 NvU32 intrReadRegTopEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
1543 
1544 NvU32 intrReadRegTopEnSet_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
1545 
1546 static inline NvU32 intrReadRegTopEnSet_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
1547     return pIntr->__intrReadRegTopEnSet__(pGpu, pIntr, arg0, arg1);
1548 }
1549 
1550 void intrWriteRegTopEnSet_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1551 
1552 void intrWriteRegTopEnSet_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1553 
1554 static inline void intrWriteRegTopEnSet_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1555     pIntr->__intrWriteRegTopEnSet__(pGpu, pIntr, arg0, arg1, arg2);
1556 }
1557 
1558 void intrWriteRegTopEnClear_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1559 
1560 void intrWriteRegTopEnClear_GA102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2);
1561 
1562 static inline void intrWriteRegTopEnClear_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, NvU32 arg1, struct THREAD_STATE_NODE *arg2) {
1563     pIntr->__intrWriteRegTopEnClear__(pGpu, pIntr, arg0, arg1, arg2);
1564 }
1565 
1566 NvU32 intrGetNumLeaves_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1567 
1568 NvU32 intrGetNumLeaves_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1569 
1570 static inline NvU32 intrGetNumLeaves_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1571     return pIntr->__intrGetNumLeaves__(pGpu, pIntr);
1572 }
1573 
1574 NvU32 intrGetLeafSize_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1575 
1576 NvU32 intrGetLeafSize_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1577 
1578 static inline NvU32 intrGetLeafSize_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1579     return pIntr->__intrGetLeafSize__(pGpu, pIntr);
1580 }
1581 
1582 NvU64 intrGetIntrTopNonStallMask_TU102(OBJGPU *pGpu, struct Intr *pIntr);
1583 
1584 NvU64 intrGetIntrTopNonStallMask_GH100(OBJGPU *pGpu, struct Intr *pIntr);
1585 
1586 static inline NvU64 intrGetIntrTopNonStallMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr) {
1587     return pIntr->__intrGetIntrTopNonStallMask__(pGpu, pIntr);
1588 }
1589 
1590 void intrSanityCheckEngineIntrStallVector_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1591 
1592 void intrSanityCheckEngineIntrStallVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1593 
1594 static inline void intrSanityCheckEngineIntrStallVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1595     return;
1596 }
1597 
1598 static inline void intrSanityCheckEngineIntrStallVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1599     pIntr->__intrSanityCheckEngineIntrStallVector__(pGpu, pIntr, vector, mcEngine);
1600 }
1601 
1602 void intrSanityCheckEngineIntrNotificationVector_GA100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1603 
1604 void intrSanityCheckEngineIntrNotificationVector_GH100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine);
1605 
1606 static inline void intrSanityCheckEngineIntrNotificationVector_b3696a(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1607     return;
1608 }
1609 
1610 static inline void intrSanityCheckEngineIntrNotificationVector_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 vector, NvU16 mcEngine) {
1611     pIntr->__intrSanityCheckEngineIntrNotificationVector__(pGpu, pIntr, vector, mcEngine);
1612 }
1613 
1614 NV_STATUS intrStateLoad_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0);
1615 
1616 static inline NV_STATUS intrStateLoad_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1617     return pIntr->__intrStateLoad__(pGpu, pIntr, arg0);
1618 }
1619 
1620 NV_STATUS intrStateUnload_TU102(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0);
1621 
1622 static inline NV_STATUS intrStateUnload_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0) {
1623     return pIntr->__intrStateUnload__(pGpu, pIntr, arg0);
1624 }
1625 
1626 NV_STATUS intrSetIntrMask_GP100(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1627 
1628 static inline NV_STATUS intrSetIntrMask_46f6a7(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1629     return NV_ERR_NOT_SUPPORTED;
1630 }
1631 
1632 static inline NV_STATUS intrSetIntrMask_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1633     return pIntr->__intrSetIntrMask__(pGpu, pIntr, arg0, arg1);
1634 }
1635 
1636 void intrSetIntrEnInHw_GP100(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1);
1637 
1638 static inline void intrSetIntrEnInHw_d44104(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
1639     return;
1640 }
1641 
1642 static inline void intrSetIntrEnInHw_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, struct THREAD_STATE_NODE *arg1) {
1643     pIntr->__intrSetIntrEnInHw__(pGpu, pIntr, arg0, arg1);
1644 }
1645 
1646 NvU32 intrGetIntrEnFromHw_GP100(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0);
1647 
1648 static inline NvU32 intrGetIntrEnFromHw_b2b553(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
1649     return 0;
1650 }
1651 
1652 static inline NvU32 intrGetIntrEnFromHw_DISPATCH(OBJGPU *pGpu, struct Intr *pIntr, struct THREAD_STATE_NODE *arg0) {
1653     return pIntr->__intrGetIntrEnFromHw__(pGpu, pIntr, arg0);
1654 }
1655 
1656 static inline NV_STATUS intrStatePreLoad_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1657     return pEngstate->__intrStatePreLoad__(pGpu, pEngstate, arg0);
1658 }
1659 
1660 static inline NV_STATUS intrStatePostUnload_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1661     return pEngstate->__intrStatePostUnload__(pGpu, pEngstate, arg0);
1662 }
1663 
1664 static inline NV_STATUS intrStatePreUnload_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1665     return pEngstate->__intrStatePreUnload__(pGpu, pEngstate, arg0);
1666 }
1667 
1668 static inline void intrInitMissing_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1669     pEngstate->__intrInitMissing__(pGpu, pEngstate);
1670 }
1671 
1672 static inline NV_STATUS intrStatePreInitLocked_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1673     return pEngstate->__intrStatePreInitLocked__(pGpu, pEngstate);
1674 }
1675 
1676 static inline NV_STATUS intrStatePreInitUnlocked_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1677     return pEngstate->__intrStatePreInitUnlocked__(pGpu, pEngstate);
1678 }
1679 
1680 static inline NV_STATUS intrStatePostLoad_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate, NvU32 arg0) {
1681     return pEngstate->__intrStatePostLoad__(pGpu, pEngstate, arg0);
1682 }
1683 
1684 static inline NvBool intrIsPresent_DISPATCH(POBJGPU pGpu, struct Intr *pEngstate) {
1685     return pEngstate->__intrIsPresent__(pGpu, pEngstate);
1686 }
1687 
1688 void intrDestruct_IMPL(struct Intr *pIntr);
1689 
1690 #define __nvoc_intrDestruct(pIntr) intrDestruct_IMPL(pIntr)
1691 NV_STATUS intrServiceNonStallBottomHalf_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1692 
1693 #ifdef __nvoc_intr_h_disabled
1694 static inline NV_STATUS intrServiceNonStallBottomHalf(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1695     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1696     return NV_ERR_NOT_SUPPORTED;
1697 }
1698 #else //__nvoc_intr_h_disabled
1699 #define intrServiceNonStallBottomHalf(pGpu, pIntr, arg0, arg1) intrServiceNonStallBottomHalf_IMPL(pGpu, pIntr, arg0, arg1)
1700 #endif //__nvoc_intr_h_disabled
1701 
1702 NV_STATUS intrServiceNotificationRecords_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineIdx, struct THREAD_STATE_NODE *arg0);
1703 
1704 #ifdef __nvoc_intr_h_disabled
1705 static inline NV_STATUS intrServiceNotificationRecords(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineIdx, struct THREAD_STATE_NODE *arg0) {
1706     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1707     return NV_ERR_NOT_SUPPORTED;
1708 }
1709 #else //__nvoc_intr_h_disabled
1710 #define intrServiceNotificationRecords(pGpu, pIntr, mcEngineIdx, arg0) intrServiceNotificationRecords_IMPL(pGpu, pIntr, mcEngineIdx, arg0)
1711 #endif //__nvoc_intr_h_disabled
1712 
1713 NvU64 intrGetIntrTopLegacyStallMask_IMPL(struct Intr *pIntr);
1714 
1715 #ifdef __nvoc_intr_h_disabled
1716 static inline NvU64 intrGetIntrTopLegacyStallMask(struct Intr *pIntr) {
1717     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1718     return 0;
1719 }
1720 #else //__nvoc_intr_h_disabled
1721 #define intrGetIntrTopLegacyStallMask(pIntr) intrGetIntrTopLegacyStallMask_IMPL(pIntr)
1722 #endif //__nvoc_intr_h_disabled
1723 
1724 NvU64 intrGetIntrTopLockedMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1725 
1726 #ifdef __nvoc_intr_h_disabled
1727 static inline NvU64 intrGetIntrTopLockedMask(OBJGPU *pGpu, struct Intr *pIntr) {
1728     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1729     return 0;
1730 }
1731 #else //__nvoc_intr_h_disabled
1732 #define intrGetIntrTopLockedMask(pGpu, pIntr) intrGetIntrTopLockedMask_IMPL(pGpu, pIntr)
1733 #endif //__nvoc_intr_h_disabled
1734 
1735 NV_STATUS intrGetSubtreeRange_IMPL(struct Intr *pIntr, NV2080_INTR_CATEGORY category, NV2080_INTR_CATEGORY_SUBTREE_MAP *pRange);
1736 
1737 #ifdef __nvoc_intr_h_disabled
1738 static inline NV_STATUS intrGetSubtreeRange(struct Intr *pIntr, NV2080_INTR_CATEGORY category, NV2080_INTR_CATEGORY_SUBTREE_MAP *pRange) {
1739     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1740     return NV_ERR_NOT_SUPPORTED;
1741 }
1742 #else //__nvoc_intr_h_disabled
1743 #define intrGetSubtreeRange(pIntr, category, pRange) intrGetSubtreeRange_IMPL(pIntr, category, pRange)
1744 #endif //__nvoc_intr_h_disabled
1745 
1746 NvU64 intrGetIntrTopCategoryMask_IMPL(struct Intr *pIntr, NV2080_INTR_CATEGORY category);
1747 
1748 #ifdef __nvoc_intr_h_disabled
1749 static inline NvU64 intrGetIntrTopCategoryMask(struct Intr *pIntr, NV2080_INTR_CATEGORY category) {
1750     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1751     return 0;
1752 }
1753 #else //__nvoc_intr_h_disabled
1754 #define intrGetIntrTopCategoryMask(pIntr, category) intrGetIntrTopCategoryMask_IMPL(pIntr, category)
1755 #endif //__nvoc_intr_h_disabled
1756 
1757 void intrServiceStallListAllGpusCond_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1);
1758 
1759 #ifdef __nvoc_intr_h_disabled
1760 static inline void intrServiceStallListAllGpusCond(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1) {
1761     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1762 }
1763 #else //__nvoc_intr_h_disabled
1764 #define intrServiceStallListAllGpusCond(pGpu, pIntr, arg0, arg1) intrServiceStallListAllGpusCond_IMPL(pGpu, pIntr, arg0, arg1)
1765 #endif //__nvoc_intr_h_disabled
1766 
1767 void intrServiceStallListDevice_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1);
1768 
1769 #ifdef __nvoc_intr_h_disabled
1770 static inline void intrServiceStallListDevice(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, NvBool arg1) {
1771     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1772 }
1773 #else //__nvoc_intr_h_disabled
1774 #define intrServiceStallListDevice(pGpu, pIntr, arg0, arg1) intrServiceStallListDevice_IMPL(pGpu, pIntr, arg0, arg1)
1775 #endif //__nvoc_intr_h_disabled
1776 
1777 NvU32 intrServiceInterruptRecords_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool *arg1);
1778 
1779 #ifdef __nvoc_intr_h_disabled
1780 static inline NvU32 intrServiceInterruptRecords(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0, NvBool *arg1) {
1781     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1782     return 0;
1783 }
1784 #else //__nvoc_intr_h_disabled
1785 #define intrServiceInterruptRecords(pGpu, pIntr, arg0, arg1) intrServiceInterruptRecords_IMPL(pGpu, pIntr, arg0, arg1)
1786 #endif //__nvoc_intr_h_disabled
1787 
1788 void intrQueueDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0, DPCNODE *arg1);
1789 
1790 #ifdef __nvoc_intr_h_disabled
1791 static inline void intrQueueDpc(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0, DPCNODE *arg1) {
1792     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1793 }
1794 #else //__nvoc_intr_h_disabled
1795 #define intrQueueDpc(pGpu, pIntr, arg0, arg1) intrQueueDpc_IMPL(pGpu, pIntr, arg0, arg1)
1796 #endif //__nvoc_intr_h_disabled
1797 
1798 DPCNODE *intrDequeueDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0);
1799 
1800 #ifdef __nvoc_intr_h_disabled
1801 static inline DPCNODE *intrDequeueDpc(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0) {
1802     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1803     return NULL;
1804 }
1805 #else //__nvoc_intr_h_disabled
1806 #define intrDequeueDpc(pGpu, pIntr, arg0) intrDequeueDpc_IMPL(pGpu, pIntr, arg0)
1807 #endif //__nvoc_intr_h_disabled
1808 
1809 NvBool intrIsDpcQueueEmpty_IMPL(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0);
1810 
1811 #ifdef __nvoc_intr_h_disabled
1812 static inline NvBool intrIsDpcQueueEmpty(OBJGPU *pGpu, struct Intr *pIntr, DPCQUEUE *arg0) {
1813     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1814     return NV_FALSE;
1815 }
1816 #else //__nvoc_intr_h_disabled
1817 #define intrIsDpcQueueEmpty(pGpu, pIntr, arg0) intrIsDpcQueueEmpty_IMPL(pGpu, pIntr, arg0)
1818 #endif //__nvoc_intr_h_disabled
1819 
1820 void intrQueueInterruptBasedDpc_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0);
1821 
1822 #ifdef __nvoc_intr_h_disabled
1823 static inline void intrQueueInterruptBasedDpc(OBJGPU *pGpu, struct Intr *pIntr, NvU16 arg0) {
1824     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1825 }
1826 #else //__nvoc_intr_h_disabled
1827 #define intrQueueInterruptBasedDpc(pGpu, pIntr, arg0) intrQueueInterruptBasedDpc_IMPL(pGpu, pIntr, arg0)
1828 #endif //__nvoc_intr_h_disabled
1829 
1830 NvU32 intrConvertEngineMaskToPmcIntrMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0);
1831 
1832 #ifdef __nvoc_intr_h_disabled
1833 static inline NvU32 intrConvertEngineMaskToPmcIntrMask(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0) {
1834     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1835     return 0;
1836 }
1837 #else //__nvoc_intr_h_disabled
1838 #define intrConvertEngineMaskToPmcIntrMask(pGpu, pIntr, arg0) intrConvertEngineMaskToPmcIntrMask_IMPL(pGpu, pIntr, arg0)
1839 #endif //__nvoc_intr_h_disabled
1840 
1841 void intrConvertPmcIntrMaskToEngineMask_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, union MC_ENGINE_BITVECTOR *arg1);
1842 
1843 #ifdef __nvoc_intr_h_disabled
1844 static inline void intrConvertPmcIntrMaskToEngineMask(OBJGPU *pGpu, struct Intr *pIntr, NvU32 arg0, union MC_ENGINE_BITVECTOR *arg1) {
1845     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1846 }
1847 #else //__nvoc_intr_h_disabled
1848 #define intrConvertPmcIntrMaskToEngineMask(pGpu, pIntr, arg0, arg1) intrConvertPmcIntrMaskToEngineMask_IMPL(pGpu, pIntr, arg0, arg1)
1849 #endif //__nvoc_intr_h_disabled
1850 
1851 NvU32 intrGetVectorFromEngineId_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall);
1852 
1853 #ifdef __nvoc_intr_h_disabled
1854 static inline NvU32 intrGetVectorFromEngineId(OBJGPU *pGpu, struct Intr *pIntr, NvU16 mcEngineId, NvBool bNonStall) {
1855     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1856     return 0;
1857 }
1858 #else //__nvoc_intr_h_disabled
1859 #define intrGetVectorFromEngineId(pGpu, pIntr, mcEngineId, bNonStall) intrGetVectorFromEngineId_IMPL(pGpu, pIntr, mcEngineId, bNonStall)
1860 #endif //__nvoc_intr_h_disabled
1861 
1862 NV_STATUS intrGetSmallestNotificationVector_IMPL(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0);
1863 
1864 #ifdef __nvoc_intr_h_disabled
1865 static inline NV_STATUS intrGetSmallestNotificationVector(OBJGPU *pGpu, struct Intr *pIntr, NvU32 *arg0) {
1866     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1867     return NV_ERR_NOT_SUPPORTED;
1868 }
1869 #else //__nvoc_intr_h_disabled
1870 #define intrGetSmallestNotificationVector(pGpu, pIntr, arg0) intrGetSmallestNotificationVector_IMPL(pGpu, pIntr, arg0)
1871 #endif //__nvoc_intr_h_disabled
1872 
1873 void intrSetIntrMaskUnblocked_IMPL(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0);
1874 
1875 #ifdef __nvoc_intr_h_disabled
1876 static inline void intrSetIntrMaskUnblocked(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0) {
1877     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1878 }
1879 #else //__nvoc_intr_h_disabled
1880 #define intrSetIntrMaskUnblocked(pIntr, arg0) intrSetIntrMaskUnblocked_IMPL(pIntr, arg0)
1881 #endif //__nvoc_intr_h_disabled
1882 
1883 void intrGetIntrMaskUnblocked_IMPL(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0);
1884 
1885 #ifdef __nvoc_intr_h_disabled
1886 static inline void intrGetIntrMaskUnblocked(struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0) {
1887     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1888 }
1889 #else //__nvoc_intr_h_disabled
1890 #define intrGetIntrMaskUnblocked(pIntr, arg0) intrGetIntrMaskUnblocked_IMPL(pIntr, arg0)
1891 #endif //__nvoc_intr_h_disabled
1892 
1893 void intrSetIntrMaskFlags_IMPL(struct Intr *pIntr, NvU32 arg0);
1894 
1895 #ifdef __nvoc_intr_h_disabled
1896 static inline void intrSetIntrMaskFlags(struct Intr *pIntr, NvU32 arg0) {
1897     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1898 }
1899 #else //__nvoc_intr_h_disabled
1900 #define intrSetIntrMaskFlags(pIntr, arg0) intrSetIntrMaskFlags_IMPL(pIntr, arg0)
1901 #endif //__nvoc_intr_h_disabled
1902 
1903 NvU32 intrGetIntrMaskFlags_IMPL(struct Intr *pIntr);
1904 
1905 #ifdef __nvoc_intr_h_disabled
1906 static inline NvU32 intrGetIntrMaskFlags(struct Intr *pIntr) {
1907     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1908     return 0;
1909 }
1910 #else //__nvoc_intr_h_disabled
1911 #define intrGetIntrMaskFlags(pIntr) intrGetIntrMaskFlags_IMPL(pIntr)
1912 #endif //__nvoc_intr_h_disabled
1913 
1914 void intrSetDefaultIntrEn_IMPL(struct Intr *pIntr, NvU32 arg0);
1915 
1916 #ifdef __nvoc_intr_h_disabled
1917 static inline void intrSetDefaultIntrEn(struct Intr *pIntr, NvU32 arg0) {
1918     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1919 }
1920 #else //__nvoc_intr_h_disabled
1921 #define intrSetDefaultIntrEn(pIntr, arg0) intrSetDefaultIntrEn_IMPL(pIntr, arg0)
1922 #endif //__nvoc_intr_h_disabled
1923 
1924 NvU32 intrGetDefaultIntrEn_IMPL(struct Intr *pIntr);
1925 
1926 #ifdef __nvoc_intr_h_disabled
1927 static inline NvU32 intrGetDefaultIntrEn(struct Intr *pIntr) {
1928     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1929     return 0;
1930 }
1931 #else //__nvoc_intr_h_disabled
1932 #define intrGetDefaultIntrEn(pIntr) intrGetDefaultIntrEn_IMPL(pIntr)
1933 #endif //__nvoc_intr_h_disabled
1934 
1935 void intrSetIntrEn_IMPL(struct Intr *pIntr, NvU32 arg0);
1936 
1937 #ifdef __nvoc_intr_h_disabled
1938 static inline void intrSetIntrEn(struct Intr *pIntr, NvU32 arg0) {
1939     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1940 }
1941 #else //__nvoc_intr_h_disabled
1942 #define intrSetIntrEn(pIntr, arg0) intrSetIntrEn_IMPL(pIntr, arg0)
1943 #endif //__nvoc_intr_h_disabled
1944 
1945 NvU32 intrGetIntrEn_IMPL(struct Intr *pIntr);
1946 
1947 #ifdef __nvoc_intr_h_disabled
1948 static inline NvU32 intrGetIntrEn(struct Intr *pIntr) {
1949     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1950     return 0;
1951 }
1952 #else //__nvoc_intr_h_disabled
1953 #define intrGetIntrEn(pIntr) intrGetIntrEn_IMPL(pIntr)
1954 #endif //__nvoc_intr_h_disabled
1955 
1956 void intrSaveIntrEn0FromHw_IMPL(OBJGPU *pGpu, struct Intr *pIntr);
1957 
1958 #ifdef __nvoc_intr_h_disabled
1959 static inline void intrSaveIntrEn0FromHw(OBJGPU *pGpu, struct Intr *pIntr) {
1960     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1961 }
1962 #else //__nvoc_intr_h_disabled
1963 #define intrSaveIntrEn0FromHw(pGpu, pIntr) intrSaveIntrEn0FromHw_IMPL(pGpu, pIntr)
1964 #endif //__nvoc_intr_h_disabled
1965 
1966 void intrGetGmmuInterrupts_IMPL(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1);
1967 
1968 #ifdef __nvoc_intr_h_disabled
1969 static inline void intrGetGmmuInterrupts(OBJGPU *pGpu, struct Intr *pIntr, union MC_ENGINE_BITVECTOR *arg0, struct THREAD_STATE_NODE *arg1) {
1970     NV_ASSERT_FAILED_PRECOMP("Intr was disabled!");
1971 }
1972 #else //__nvoc_intr_h_disabled
1973 #define intrGetGmmuInterrupts(pGpu, pIntr, arg0, arg1) intrGetGmmuInterrupts_IMPL(pGpu, pIntr, arg0, arg1)
1974 #endif //__nvoc_intr_h_disabled
1975 
1976 #undef PRIVATE_FIELD
1977 
1978 
1979 // This mask is used for interrupts that should be masked off in the PMC tree
1980 #define NV_PMC_INTR_INVALID_MASK (0)
1981 
1982 #define INTR_WRITE_TABLE(status, pTable, maxCount, count, entry)             \
1983     do {                                                                     \
1984         if ((count) < (maxCount))                                            \
1985         {                                                                    \
1986             (pTable)[count] = entry;                                         \
1987         }                                                                    \
1988         else                                                                 \
1989         {                                                                    \
1990             status = NV_ERR_BUFFER_TOO_SMALL;                                \
1991         }                                                                    \
1992         count += 1;                                                          \
1993     } while(0)
1994 
1995 #endif // INTR_H
1996 
1997 #ifdef __cplusplus
1998 } // extern "C"
1999 #endif
2000 #endif // _G_INTR_NVOC_H_
2001