1 #ifndef _G_KERNEL_CHANNEL_NVOC_H_
2 #define _G_KERNEL_CHANNEL_NVOC_H_
3 #include "nvoc/runtime.h"
4 
5 #ifdef __cplusplus
6 extern "C" {
7 #endif
8 
9 /*
10  * SPDX-FileCopyrightText: Copyright (c) 2020-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
11  * SPDX-License-Identifier: MIT
12  *
13  * Permission is hereby granted, free of charge, to any person obtaining a
14  * copy of this software and associated documentation files (the "Software"),
15  * to deal in the Software without restriction, including without limitation
16  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
17  * and/or sell copies of the Software, and to permit persons to whom the
18  * Software is furnished to do so, subject to the following conditions:
19  *
20  * The above copyright notice and this permission notice shall be included in
21  * all copies or substantial portions of the Software.
22  *
23  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
26  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
28  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
29  * DEALINGS IN THE SOFTWARE.
30  */
31 
32 #include "g_kernel_channel_nvoc.h"
33 
34 #ifndef KERNEL_CHANNEL_H
35 #define KERNEL_CHANNEL_H
36 
37 #include "core/core.h"
38 #include "os/os.h"
39 #include "resserv/resserv.h"
40 #include "nvoc/prelude.h"
41 #include "gpu/gpu_resource.h"
42 #include "kernel/gpu/gpu_engine_type.h"
43 #include "kernel/gpu/fifo/kernel_ctxshare.h"
44 #include "kernel/gpu/fifo/kernel_fifo.h"
45 #include "kernel/gpu/gr/kernel_graphics_context.h"
46 #include "kernel/gpu/intr/intr_service.h"
47 #include "kernel/gpu/mig_mgr/kernel_mig_manager.h"
48 
49 #include "ctrl/ctrl0090.h"
50 #include "ctrl/ctrl208f/ctrl208ffifo.h"
51 #include "ctrl/ctrl506f.h"
52 #include "ctrl/ctrl906f.h"
53 #include "ctrl/ctrla06f.h"
54 #include "ctrl/ctrla16f.h"
55 #include "ctrl/ctrla26f.h"
56 #include "ctrl/ctrlb06f.h"
57 #include "ctrl/ctrlc06f.h"
58 #include "ctrl/ctrlc36f.h"
59 #include "ctrl/ctrlc56f.h"
60 
61 struct OBJGPU;
62 
63 #ifndef __NVOC_CLASS_OBJGPU_TYPEDEF__
64 #define __NVOC_CLASS_OBJGPU_TYPEDEF__
65 typedef struct OBJGPU OBJGPU;
66 #endif /* __NVOC_CLASS_OBJGPU_TYPEDEF__ */
67 
68 #ifndef __nvoc_class_id_OBJGPU
69 #define __nvoc_class_id_OBJGPU 0x7ef3cb
70 #endif /* __nvoc_class_id_OBJGPU */
71 
72 
73 struct UserInfo;
74 
75 #ifndef __NVOC_CLASS_UserInfo_TYPEDEF__
76 #define __NVOC_CLASS_UserInfo_TYPEDEF__
77 typedef struct UserInfo UserInfo;
78 #endif /* __NVOC_CLASS_UserInfo_TYPEDEF__ */
79 
80 #ifndef __nvoc_class_id_UserInfo
81 #define __nvoc_class_id_UserInfo 0x21d236
82 #endif /* __nvoc_class_id_UserInfo */
83 
84 
85 /*!
86  * @brief Type of hErrorContext or hEccErrorContext
87  *
88  * This is RPCed to GSP in #NV_CHANNEL_ALLOC_PARAMS.internalFlags
89  * along with the actual memdesc in
90  * #NV_CHANNEL_ALLOC_PARAMS.errorNotifierMem and
91  * #NV_CHANNEL_ALLOC_PARAMS.eccErrorNotifierMem.
92  */
93 typedef enum {
94     /*!
95      * Initial state as passed in NV_CHANNEL_ALLOC_PARAMS by
96      * kernel CPU-RM clients.
97      */
98     ERROR_NOTIFIER_TYPE_UNKNOWN = 0,
99     /*! @brief Error notifier is explicitly not set.
100      *
101      * The corresponding hErrorContext or hEccErrorContext must be
102      * NV01_NULL_OBJECT.
103      */
104     ERROR_NOTIFIER_TYPE_NONE,
105     /*! @brief Error notifier is a ContextDma */
106     ERROR_NOTIFIER_TYPE_CTXDMA,
107     /*! @brief Error notifier is a NvNotification array in sysmem/vidmem */
108     ERROR_NOTIFIER_TYPE_MEMORY
109 } ErrorNotifierType;
110 
111 //
112 // Iterates over the ChannelDescendants on a channel
113 // Uses an RS_ORDERED_ITERATOR and filters it by EngineID / ClassID
114 //
115 typedef struct {
116     RS_ORDERED_ITERATOR rsIter;
117     RM_ENGINE_TYPE engineID;
118     NvU32 classID;
119 } KernelChannelChildIterator;
120 
121 typedef enum
122 {
123     CHANNEL_CLASS_TYPE_DMA,
124     CHANNEL_CLASS_TYPE_GPFIFO,
125 } CHANNEL_CLASS_TYPE;
126 
127 //
128 // Channel class info structure.
129 //
130 // Filled in by CliGetChannelClassInfo() routine.
131 //
132 typedef struct
133 {
134     NvU32 notifiersMaxCount;    // max# of notifiers for class
135     NvU32 eventActionDisable;   // event disable action cmd value
136     NvU32 eventActionSingle;    // event single-shot enable action cmd value
137     NvU32 eventActionRepeat;    // event repeat enable action cmd value
138     NvU32 rcNotifierIndex;      // RC notifier index differs depending on the channel class
139     CHANNEL_CLASS_TYPE classType;
140 } CLI_CHANNEL_CLASS_INFO;
141 
142 void CliGetChannelClassInfo(NvU32, CLI_CHANNEL_CLASS_INFO*);
143 
144 /*!
145  * This structure represents an iterator for all objects
146  * with given class number or engine tag on a channel or TSG.
147  * It is created by function @ref kchannelGetChildIterOverGroup.
148  */
149 typedef struct
150 {
151     NvU32 engDesc;
152     NvU32 classNum;
153 
154     //
155     // During iteration, a copy of the current channel/TSG as well as the
156     // next object node to start iterating from is tracked.
157     //
158     CHANNEL_NODE channelNode;
159     KernelChannelChildIterator kchannelIter;
160 } KernelChannelChildIterOverGroup;
161 
162 typedef struct _def_instance_block
163 {
164     MEMORY_DESCRIPTOR  *pInstanceBlockDesc;
165     MEMORY_DESCRIPTOR  *pRamfcDesc;
166     /*!
167      * Used only for Suspend Resume RM internal channel.
168      * Will be moved to the Host context RL infolist.
169      */
170     MEMORY_DESCRIPTOR  *pRLMemDesc;
171 } FIFO_INSTANCE_BLOCK;
172 
173 /* Bitfields in NV_CHANNEL_ALLOC_PARAMS.internalFlags */
174 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE                       1:0
175 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_USER                  0x0
176 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_ADMIN                 0x1
177 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_PRIVILEGE_KERNEL                0x2
178 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE             3:2
179 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_UNKNOWN     ERROR_NOTIFIER_TYPE_UNKNOWN
180 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_NONE        ERROR_NOTIFIER_TYPE_NONE
181 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_CTXDMA      ERROR_NOTIFIER_TYPE_CTXDMA
182 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ERROR_NOTIFIER_TYPE_MEMORY      ERROR_NOTIFIER_TYPE_MEMORY
183 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE         5:4
184 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_UNKNOWN ERROR_NOTIFIER_TYPE_UNKNOWN
185 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_NONE    ERROR_NOTIFIER_TYPE_NONE
186 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_CTXDMA  ERROR_NOTIFIER_TYPE_CTXDMA
187 #define NV_KERNELCHANNEL_ALLOC_INTERNALFLAGS_ECC_ERROR_NOTIFIER_TYPE_MEMORY  ERROR_NOTIFIER_TYPE_MEMORY
188 
189 /*!
190  * Class for the kernel side of a Channel object.
191  */
192 #ifdef NVOC_KERNEL_CHANNEL_H_PRIVATE_ACCESS_ALLOWED
193 #define PRIVATE_FIELD(x) x
194 #else
195 #define PRIVATE_FIELD(x) NVOC_PRIVATE_FIELD(x)
196 #endif
197 struct KernelChannel {
198     const struct NVOC_RTTI *__nvoc_rtti;
199     struct GpuResource __nvoc_base_GpuResource;
200     struct Notifier __nvoc_base_Notifier;
201     struct Object *__nvoc_pbase_Object;
202     struct RsResource *__nvoc_pbase_RsResource;
203     struct RmResourceCommon *__nvoc_pbase_RmResourceCommon;
204     struct RmResource *__nvoc_pbase_RmResource;
205     struct GpuResource *__nvoc_pbase_GpuResource;
206     struct INotifier *__nvoc_pbase_INotifier;
207     struct Notifier *__nvoc_pbase_Notifier;
208     struct KernelChannel *__nvoc_pbase_KernelChannel;
209     NV_STATUS (*__kchannelMap__)(struct KernelChannel *, CALL_CONTEXT *, struct RS_CPU_MAP_PARAMS *, RsCpuMapping *);
210     NV_STATUS (*__kchannelUnmap__)(struct KernelChannel *, CALL_CONTEXT *, RsCpuMapping *);
211     NV_STATUS (*__kchannelGetMapAddrSpace__)(struct KernelChannel *, CALL_CONTEXT *, NvU32, NV_ADDRESS_SPACE *);
212     NV_STATUS (*__kchannelGetMemInterMapParams__)(struct KernelChannel *, RMRES_MEM_INTER_MAP_PARAMS *);
213     NV_STATUS (*__kchannelCheckMemInterUnmap__)(struct KernelChannel *, NvBool);
214     NV_STATUS (*__kchannelCreateUserMemDesc__)(struct OBJGPU *, struct KernelChannel *);
215     NvBool (*__kchannelIsUserdAddrSizeValid__)(struct KernelChannel *, NvU32, NvU32);
216     NV_STATUS (*__kchannelCtrlCmdResetIsolatedChannel__)(struct KernelChannel *, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *);
217     NV_STATUS (*__kchannelCtrlCmdGetClassEngineid__)(struct KernelChannel *, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
218     NV_STATUS (*__kchannelCtrlCmdResetChannel__)(struct KernelChannel *, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
219     NV_STATUS (*__kchannelCtrlCmdGetDeferRCState__)(struct KernelChannel *, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *);
220     NV_STATUS (*__kchannelCtrlCmdGetMmuFaultInfo__)(struct KernelChannel *, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *);
221     NV_STATUS (*__kchannelCtrlCmdEventSetNotification__)(struct KernelChannel *, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *);
222     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidA06F__)(struct KernelChannel *, NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
223     NV_STATUS (*__kchannelCtrlCmdResetChannelA06F__)(struct KernelChannel *, NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
224     NV_STATUS (*__kchannelCtrlCmdGpFifoSchedule__)(struct KernelChannel *, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
225     NV_STATUS (*__kchannelCtrlCmdBind__)(struct KernelChannel *, NVA06F_CTRL_BIND_PARAMS *);
226     NV_STATUS (*__kchannelCtrlCmdGetMmuFaultInfoA06F__)(struct KernelChannel *, NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS *);
227     NV_STATUS (*__kchannelCtrlCmdSetErrorNotifier__)(struct KernelChannel *, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *);
228     NV_STATUS (*__kchannelCtrlCmdSetInterleaveLevel__)(struct KernelChannel *, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *);
229     NV_STATUS (*__kchannelCtrlCmdRestartRunlist__)(struct KernelChannel *, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *);
230     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidA16F__)(struct KernelChannel *, NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
231     NV_STATUS (*__kchannelCtrlCmdResetChannelA16F__)(struct KernelChannel *, NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
232     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleA16F__)(struct KernelChannel *, NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
233     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidA26F__)(struct KernelChannel *, NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
234     NV_STATUS (*__kchannelCtrlCmdResetChannelA26F__)(struct KernelChannel *, NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
235     NV_STATUS (*__kchannelFCtrlCmdGpFifoScheduleA26F__)(struct KernelChannel *, NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
236     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidB06F__)(struct KernelChannel *, NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
237     NV_STATUS (*__kchannelCtrlCmdResetChannelB06F__)(struct KernelChannel *, NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
238     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleB06F__)(struct KernelChannel *, NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
239     NV_STATUS (*__kchannelCtrlCmdBindB06F__)(struct KernelChannel *, NVB06F_CTRL_BIND_PARAMS *);
240     NV_STATUS (*__kchannelCtrlCmdGetEngineCtxSize__)(struct KernelChannel *, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *);
241     NV_STATUS (*__kchannelCtrlCmdGetEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *);
242     NV_STATUS (*__kchannelCtrlCmdMigrateEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *);
243     NV_STATUS (*__kchannelCtrlCmdGetEngineCtxState__)(struct KernelChannel *, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *);
244     NV_STATUS (*__kchannelCtrlCmdGetChannelHwState__)(struct KernelChannel *, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *);
245     NV_STATUS (*__kchannelCtrlCmdSetChannelHwState__)(struct KernelChannel *, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *);
246     NV_STATUS (*__kchannelCtrlCmdSaveEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *);
247     NV_STATUS (*__kchannelCtrlCmdRestoreEngineCtxData__)(struct KernelChannel *, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *);
248     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidC06F__)(struct KernelChannel *, NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
249     NV_STATUS (*__kchannelCtrlCmdResetChannelC06F__)(struct KernelChannel *, NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
250     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleC06F__)(struct KernelChannel *, NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
251     NV_STATUS (*__kchannelCtrlCmdBindC06F__)(struct KernelChannel *, NVC06F_CTRL_BIND_PARAMS *);
252     NV_STATUS (*__kchannelCtrlCmdGetClassEngineidC36F__)(struct KernelChannel *, NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS *);
253     NV_STATUS (*__kchannelCtrlCmdResetChannelC36F__)(struct KernelChannel *, NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS *);
254     NV_STATUS (*__kchannelCtrlCmdGpFifoScheduleC36F__)(struct KernelChannel *, NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS *);
255     NV_STATUS (*__kchannelCtrlCmdBindC36F__)(struct KernelChannel *, NVC36F_CTRL_BIND_PARAMS *);
256     NV_STATUS (*__kchannelCtrlCmdGpfifoGetWorkSubmitToken__)(struct KernelChannel *, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *);
257     NV_STATUS (*__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__)(struct KernelChannel *, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *);
258     NV_STATUS (*__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__)(struct KernelChannel *, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *);
259     NV_STATUS (*__kchannelCtrlCmdStopChannel__)(struct KernelChannel *, NVA06F_CTRL_STOP_CHANNEL_PARAMS *);
260     NV_STATUS (*__kchannelCtrlGetTpcPartitionMode__)(struct KernelChannel *, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *);
261     NV_STATUS (*__kchannelCtrlSetTpcPartitionMode__)(struct KernelChannel *, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *);
262     NV_STATUS (*__kchannelCtrlGetMMUDebugMode__)(struct KernelChannel *, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *);
263     NV_STATUS (*__kchannelCtrlProgramVidmemPromote__)(struct KernelChannel *, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *);
264     NvBool (*__kchannelShareCallback__)(struct KernelChannel *, struct RsClient *, struct RsResourceRef *, RS_SHARE_POLICY *);
265     NV_STATUS (*__kchannelGetOrAllocNotifShare__)(struct KernelChannel *, NvHandle, NvHandle, struct NotifShare **);
266     NV_STATUS (*__kchannelMapTo__)(struct KernelChannel *, RS_RES_MAP_TO_PARAMS *);
267     void (*__kchannelSetNotificationShare__)(struct KernelChannel *, struct NotifShare *);
268     NvU32 (*__kchannelGetRefCount__)(struct KernelChannel *);
269     void (*__kchannelAddAdditionalDependants__)(struct RsClient *, struct KernelChannel *, RsResourceRef *);
270     NV_STATUS (*__kchannelControl_Prologue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
271     NV_STATUS (*__kchannelGetRegBaseOffsetAndSize__)(struct KernelChannel *, struct OBJGPU *, NvU32 *, NvU32 *);
272     NV_STATUS (*__kchannelInternalControlForward__)(struct KernelChannel *, NvU32, void *, NvU32);
273     NV_STATUS (*__kchannelUnmapFrom__)(struct KernelChannel *, RS_RES_UNMAP_FROM_PARAMS *);
274     void (*__kchannelControl_Epilogue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
275     NV_STATUS (*__kchannelControlLookup__)(struct KernelChannel *, struct RS_RES_CONTROL_PARAMS_INTERNAL *, const struct NVOC_EXPORTED_METHOD_DEF **);
276     NvHandle (*__kchannelGetInternalObjectHandle__)(struct KernelChannel *);
277     NV_STATUS (*__kchannelControl__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
278     NV_STATUS (*__kchannelGetMemoryMappingDescriptor__)(struct KernelChannel *, struct MEMORY_DESCRIPTOR **);
279     NV_STATUS (*__kchannelControlFilter__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
280     NV_STATUS (*__kchannelUnregisterEvent__)(struct KernelChannel *, NvHandle, NvHandle, NvHandle, NvHandle);
281     NV_STATUS (*__kchannelControlSerialization_Prologue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
282     NvBool (*__kchannelCanCopy__)(struct KernelChannel *);
283     void (*__kchannelPreDestruct__)(struct KernelChannel *);
284     NV_STATUS (*__kchannelIsDuplicate__)(struct KernelChannel *, NvHandle, NvBool *);
285     void (*__kchannelControlSerialization_Epilogue__)(struct KernelChannel *, struct CALL_CONTEXT *, struct RS_RES_CONTROL_PARAMS_INTERNAL *);
286     PEVENTNOTIFICATION *(*__kchannelGetNotificationListPtr__)(struct KernelChannel *);
287     struct NotifShare *(*__kchannelGetNotificationShare__)(struct KernelChannel *);
288     NvBool (*__kchannelAccessCallback__)(struct KernelChannel *, struct RsClient *, void *, RsAccessRight);
289     NvU16 nextObjectClassID;
290     struct KernelChannel *pNextBindKernelChannel;
291     NvHandle hErrorContext;
292     MEMORY_DESCRIPTOR *pErrContextMemDesc;
293     ErrorNotifierType errorContextType;
294     NvU64 errorContextOffset;
295     NvHandle hEccErrorContext;
296     MEMORY_DESCRIPTOR *pEccErrContextMemDesc;
297     ErrorNotifierType eccErrorContextType;
298     NvU64 eccErrorContextOffset;
299     struct UserInfo *pUserInfo;
300     NvHandle hVASpace;
301     struct OBJVASPACE *pVAS;
302     NvHandle hKernelGraphicsContext;
303     NvU8 privilegeLevel;
304     NvU32 runlistId;
305     NvU32 ChID;
306     struct KernelChannelGroupApi *pKernelChannelGroupApi;
307     struct KernelCtxShareApi *pKernelCtxShareApi;
308     NvU32 refCount;
309     NvBool bIsContextBound;
310     FIFO_INSTANCE_BLOCK *pFifoHalData[8];
311     MEMORY_DESCRIPTOR *pInstSubDeviceMemDesc[8];
312     MEMORY_DESCRIPTOR *pUserdSubDeviceMemDesc[8];
313     NvBool bClientAllocatedUserD;
314     NvU32 swState[8];
315     NvU32 ProcessID;
316     NvU32 SubProcessID;
317     NvU32 bcStateCurrent;
318     NvU32 notifyIndex[2];
319     NvU32 *pNotifyActions;
320     NvU64 userdLength;
321     NvBool bSkipCtxBufferAlloc;
322     NvU32 subctxId;
323     NvU32 cid;
324     struct MIG_INSTANCE_REF partitionRef;
325     NvU32 runqueue;
326     RM_ENGINE_TYPE engineType;
327 };
328 
329 #ifndef __NVOC_CLASS_KernelChannel_TYPEDEF__
330 #define __NVOC_CLASS_KernelChannel_TYPEDEF__
331 typedef struct KernelChannel KernelChannel;
332 #endif /* __NVOC_CLASS_KernelChannel_TYPEDEF__ */
333 
334 #ifndef __nvoc_class_id_KernelChannel
335 #define __nvoc_class_id_KernelChannel 0x5d8d70
336 #endif /* __nvoc_class_id_KernelChannel */
337 
338 extern const struct NVOC_CLASS_DEF __nvoc_class_def_KernelChannel;
339 
340 #define __staticCast_KernelChannel(pThis) \
341     ((pThis)->__nvoc_pbase_KernelChannel)
342 
343 #ifdef __nvoc_kernel_channel_h_disabled
344 #define __dynamicCast_KernelChannel(pThis) ((KernelChannel*)NULL)
345 #else //__nvoc_kernel_channel_h_disabled
346 #define __dynamicCast_KernelChannel(pThis) \
347     ((KernelChannel*)__nvoc_dynamicCast(staticCast((pThis), Dynamic), classInfo(KernelChannel)))
348 #endif //__nvoc_kernel_channel_h_disabled
349 
350 
351 NV_STATUS __nvoc_objCreateDynamic_KernelChannel(KernelChannel**, Dynamic*, NvU32, va_list);
352 
353 NV_STATUS __nvoc_objCreate_KernelChannel(KernelChannel**, Dynamic*, NvU32, CALL_CONTEXT * arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL * arg_pParams);
354 #define __objCreate_KernelChannel(ppNewObj, pParent, createFlags, arg_pCallContext, arg_pParams) \
355     __nvoc_objCreate_KernelChannel((ppNewObj), staticCast((pParent), Dynamic), (createFlags), arg_pCallContext, arg_pParams)
356 
357 #define kchannelMap(pKernelChannel, pCallContext, pParams, pCpuMapping) kchannelMap_DISPATCH(pKernelChannel, pCallContext, pParams, pCpuMapping)
358 #define kchannelUnmap(pKernelChannel, pCallContext, pCpuMapping) kchannelUnmap_DISPATCH(pKernelChannel, pCallContext, pCpuMapping)
359 #define kchannelGetMapAddrSpace(pKernelChannel, pCallContext, mapFlags, pAddrSpace) kchannelGetMapAddrSpace_DISPATCH(pKernelChannel, pCallContext, mapFlags, pAddrSpace)
360 #define kchannelGetMemInterMapParams(pKernelChannel, pParams) kchannelGetMemInterMapParams_DISPATCH(pKernelChannel, pParams)
361 #define kchannelCheckMemInterUnmap(pKernelChannel, bSubdeviceHandleProvided) kchannelCheckMemInterUnmap_DISPATCH(pKernelChannel, bSubdeviceHandleProvided)
362 #define kchannelCreateUserMemDesc(pGpu, arg0) kchannelCreateUserMemDesc_DISPATCH(pGpu, arg0)
363 #define kchannelCreateUserMemDesc_HAL(pGpu, arg0) kchannelCreateUserMemDesc_DISPATCH(pGpu, arg0)
364 #define kchannelIsUserdAddrSizeValid(pKernelChannel, userdAddrLo, userdAddrHi) kchannelIsUserdAddrSizeValid_DISPATCH(pKernelChannel, userdAddrLo, userdAddrHi)
365 #define kchannelIsUserdAddrSizeValid_HAL(pKernelChannel, userdAddrLo, userdAddrHi) kchannelIsUserdAddrSizeValid_DISPATCH(pKernelChannel, userdAddrLo, userdAddrHi)
366 #define kchannelCtrlCmdResetIsolatedChannel(pKernelChannel, pResetParams) kchannelCtrlCmdResetIsolatedChannel_DISPATCH(pKernelChannel, pResetParams)
367 #define kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineid_DISPATCH(pKernelChannel, pParams)
368 #define kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannel_DISPATCH(pKernelChannel, pResetChannelParams)
369 #define kchannelCtrlCmdGetDeferRCState(pKernelChannel, pStateParams) kchannelCtrlCmdGetDeferRCState_DISPATCH(pKernelChannel, pStateParams)
370 #define kchannelCtrlCmdGetMmuFaultInfo(pKernelChannel, pFaultInfoParams) kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(pKernelChannel, pFaultInfoParams)
371 #define kchannelCtrlCmdEventSetNotification(pKernelChannel, pSetEventParams) kchannelCtrlCmdEventSetNotification_DISPATCH(pKernelChannel, pSetEventParams)
372 #define kchannelCtrlCmdGetClassEngineidA06F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidA06F_DISPATCH(pKernelChannel, pParams)
373 #define kchannelCtrlCmdResetChannelA06F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelA06F_DISPATCH(pKernelChannel, pResetChannelParams)
374 #define kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoSchedule_DISPATCH(pKernelChannel, pSchedParams)
375 #define kchannelCtrlCmdBind(pKernelChannel, pParams) kchannelCtrlCmdBind_DISPATCH(pKernelChannel, pParams)
376 #define kchannelCtrlCmdGetMmuFaultInfoA06F(pKernelChannel, pFaultInfoParams) kchannelCtrlCmdGetMmuFaultInfoA06F_DISPATCH(pKernelChannel, pFaultInfoParams)
377 #define kchannelCtrlCmdSetErrorNotifier(pKernelChannel, pSetErrorNotifierParams) kchannelCtrlCmdSetErrorNotifier_DISPATCH(pKernelChannel, pSetErrorNotifierParams)
378 #define kchannelCtrlCmdSetInterleaveLevel(pKernelChannel, pParams) kchannelCtrlCmdSetInterleaveLevel_DISPATCH(pKernelChannel, pParams)
379 #define kchannelCtrlCmdRestartRunlist(pKernelChannel, pParams) kchannelCtrlCmdRestartRunlist_DISPATCH(pKernelChannel, pParams)
380 #define kchannelCtrlCmdGetClassEngineidA16F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidA16F_DISPATCH(pKernelChannel, pParams)
381 #define kchannelCtrlCmdResetChannelA16F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelA16F_DISPATCH(pKernelChannel, pResetChannelParams)
382 #define kchannelCtrlCmdGpFifoScheduleA16F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleA16F_DISPATCH(pKernelChannel, pSchedParams)
383 #define kchannelCtrlCmdGetClassEngineidA26F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidA26F_DISPATCH(pKernelChannel, pParams)
384 #define kchannelCtrlCmdResetChannelA26F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelA26F_DISPATCH(pKernelChannel, pResetChannelParams)
385 #define kchannelFCtrlCmdGpFifoScheduleA26F(pKernelChannel, pSchedParams) kchannelFCtrlCmdGpFifoScheduleA26F_DISPATCH(pKernelChannel, pSchedParams)
386 #define kchannelCtrlCmdGetClassEngineidB06F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidB06F_DISPATCH(pKernelChannel, pParams)
387 #define kchannelCtrlCmdResetChannelB06F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelB06F_DISPATCH(pKernelChannel, pResetChannelParams)
388 #define kchannelCtrlCmdGpFifoScheduleB06F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleB06F_DISPATCH(pKernelChannel, pSchedParams)
389 #define kchannelCtrlCmdBindB06F(pKernelChannel, pParams) kchannelCtrlCmdBindB06F_DISPATCH(pKernelChannel, pParams)
390 #define kchannelCtrlCmdGetEngineCtxSize(pKernelChannel, pCtxSizeParams) kchannelCtrlCmdGetEngineCtxSize_DISPATCH(pKernelChannel, pCtxSizeParams)
391 #define kchannelCtrlCmdGetEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdGetEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
392 #define kchannelCtrlCmdMigrateEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
393 #define kchannelCtrlCmdGetEngineCtxState(pKernelChannel, pCtxStateParams) kchannelCtrlCmdGetEngineCtxState_DISPATCH(pKernelChannel, pCtxStateParams)
394 #define kchannelCtrlCmdGetChannelHwState(pKernelChannel, pParams) kchannelCtrlCmdGetChannelHwState_DISPATCH(pKernelChannel, pParams)
395 #define kchannelCtrlCmdSetChannelHwState(pKernelChannel, pParams) kchannelCtrlCmdSetChannelHwState_DISPATCH(pKernelChannel, pParams)
396 #define kchannelCtrlCmdSaveEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdSaveEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
397 #define kchannelCtrlCmdRestoreEngineCtxData(pKernelChannel, pCtxBuffParams) kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(pKernelChannel, pCtxBuffParams)
398 #define kchannelCtrlCmdGetClassEngineidC06F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidC06F_DISPATCH(pKernelChannel, pParams)
399 #define kchannelCtrlCmdResetChannelC06F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelC06F_DISPATCH(pKernelChannel, pResetChannelParams)
400 #define kchannelCtrlCmdGpFifoScheduleC06F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleC06F_DISPATCH(pKernelChannel, pSchedParams)
401 #define kchannelCtrlCmdBindC06F(pKernelChannel, pParams) kchannelCtrlCmdBindC06F_DISPATCH(pKernelChannel, pParams)
402 #define kchannelCtrlCmdGetClassEngineidC36F(pKernelChannel, pParams) kchannelCtrlCmdGetClassEngineidC36F_DISPATCH(pKernelChannel, pParams)
403 #define kchannelCtrlCmdResetChannelC36F(pKernelChannel, pResetChannelParams) kchannelCtrlCmdResetChannelC36F_DISPATCH(pKernelChannel, pResetChannelParams)
404 #define kchannelCtrlCmdGpFifoScheduleC36F(pKernelChannel, pSchedParams) kchannelCtrlCmdGpFifoScheduleC36F_DISPATCH(pKernelChannel, pSchedParams)
405 #define kchannelCtrlCmdBindC36F(pKernelChannel, pParams) kchannelCtrlCmdBindC36F_DISPATCH(pKernelChannel, pParams)
406 #define kchannelCtrlCmdGpfifoGetWorkSubmitToken(pKernelChannel, pTokenParams) kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(pKernelChannel, pTokenParams)
407 #define kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer(pKernelChannel, pFaultMthdBufferParams) kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(pKernelChannel, pFaultMthdBufferParams)
408 #define kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex(pKernelChannel, pParams) kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(pKernelChannel, pParams)
409 #define kchannelCtrlCmdStopChannel(pKernelChannel, pStopChannelParams) kchannelCtrlCmdStopChannel_DISPATCH(pKernelChannel, pStopChannelParams)
410 #define kchannelCtrlGetTpcPartitionMode(pKernelChannel, pParams) kchannelCtrlGetTpcPartitionMode_DISPATCH(pKernelChannel, pParams)
411 #define kchannelCtrlSetTpcPartitionMode(pKernelChannel, pParams) kchannelCtrlSetTpcPartitionMode_DISPATCH(pKernelChannel, pParams)
412 #define kchannelCtrlGetMMUDebugMode(pKernelChannel, pParams) kchannelCtrlGetMMUDebugMode_DISPATCH(pKernelChannel, pParams)
413 #define kchannelCtrlProgramVidmemPromote(pKernelChannel, pParams) kchannelCtrlProgramVidmemPromote_DISPATCH(pKernelChannel, pParams)
414 #define kchannelShareCallback(pGpuResource, pInvokingClient, pParentRef, pSharePolicy) kchannelShareCallback_DISPATCH(pGpuResource, pInvokingClient, pParentRef, pSharePolicy)
415 #define kchannelGetOrAllocNotifShare(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare) kchannelGetOrAllocNotifShare_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare)
416 #define kchannelMapTo(pResource, pParams) kchannelMapTo_DISPATCH(pResource, pParams)
417 #define kchannelSetNotificationShare(pNotifier, pNotifShare) kchannelSetNotificationShare_DISPATCH(pNotifier, pNotifShare)
418 #define kchannelGetRefCount(pResource) kchannelGetRefCount_DISPATCH(pResource)
419 #define kchannelAddAdditionalDependants(pClient, pResource, pReference) kchannelAddAdditionalDependants_DISPATCH(pClient, pResource, pReference)
420 #define kchannelControl_Prologue(pResource, pCallContext, pParams) kchannelControl_Prologue_DISPATCH(pResource, pCallContext, pParams)
421 #define kchannelGetRegBaseOffsetAndSize(pGpuResource, pGpu, pOffset, pSize) kchannelGetRegBaseOffsetAndSize_DISPATCH(pGpuResource, pGpu, pOffset, pSize)
422 #define kchannelInternalControlForward(pGpuResource, command, pParams, size) kchannelInternalControlForward_DISPATCH(pGpuResource, command, pParams, size)
423 #define kchannelUnmapFrom(pResource, pParams) kchannelUnmapFrom_DISPATCH(pResource, pParams)
424 #define kchannelControl_Epilogue(pResource, pCallContext, pParams) kchannelControl_Epilogue_DISPATCH(pResource, pCallContext, pParams)
425 #define kchannelControlLookup(pResource, pParams, ppEntry) kchannelControlLookup_DISPATCH(pResource, pParams, ppEntry)
426 #define kchannelGetInternalObjectHandle(pGpuResource) kchannelGetInternalObjectHandle_DISPATCH(pGpuResource)
427 #define kchannelControl(pGpuResource, pCallContext, pParams) kchannelControl_DISPATCH(pGpuResource, pCallContext, pParams)
428 #define kchannelGetMemoryMappingDescriptor(pRmResource, ppMemDesc) kchannelGetMemoryMappingDescriptor_DISPATCH(pRmResource, ppMemDesc)
429 #define kchannelControlFilter(pResource, pCallContext, pParams) kchannelControlFilter_DISPATCH(pResource, pCallContext, pParams)
430 #define kchannelUnregisterEvent(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent) kchannelUnregisterEvent_DISPATCH(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent)
431 #define kchannelControlSerialization_Prologue(pResource, pCallContext, pParams) kchannelControlSerialization_Prologue_DISPATCH(pResource, pCallContext, pParams)
432 #define kchannelCanCopy(pResource) kchannelCanCopy_DISPATCH(pResource)
433 #define kchannelPreDestruct(pResource) kchannelPreDestruct_DISPATCH(pResource)
434 #define kchannelIsDuplicate(pResource, hMemory, pDuplicate) kchannelIsDuplicate_DISPATCH(pResource, hMemory, pDuplicate)
435 #define kchannelControlSerialization_Epilogue(pResource, pCallContext, pParams) kchannelControlSerialization_Epilogue_DISPATCH(pResource, pCallContext, pParams)
436 #define kchannelGetNotificationListPtr(pNotifier) kchannelGetNotificationListPtr_DISPATCH(pNotifier)
437 #define kchannelGetNotificationShare(pNotifier) kchannelGetNotificationShare_DISPATCH(pNotifier)
438 #define kchannelAccessCallback(pResource, pInvokingClient, pAllocParams, accessRight) kchannelAccessCallback_DISPATCH(pResource, pInvokingClient, pAllocParams, accessRight)
439 NV_STATUS kchannelNotifyRc_IMPL(struct KernelChannel *pKernelChannel);
440 
441 
442 #ifdef __nvoc_kernel_channel_h_disabled
443 static inline NV_STATUS kchannelNotifyRc(struct KernelChannel *pKernelChannel) {
444     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
445     return NV_ERR_NOT_SUPPORTED;
446 }
447 #else //__nvoc_kernel_channel_h_disabled
448 #define kchannelNotifyRc(pKernelChannel) kchannelNotifyRc_IMPL(pKernelChannel)
449 #endif //__nvoc_kernel_channel_h_disabled
450 
451 #define kchannelNotifyRc_HAL(pKernelChannel) kchannelNotifyRc(pKernelChannel)
452 
453 NvBool kchannelIsSchedulable_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
454 
455 
456 #ifdef __nvoc_kernel_channel_h_disabled
457 static inline NvBool kchannelIsSchedulable(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
458     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
459     return NV_FALSE;
460 }
461 #else //__nvoc_kernel_channel_h_disabled
462 #define kchannelIsSchedulable(pGpu, pKernelChannel) kchannelIsSchedulable_IMPL(pGpu, pKernelChannel)
463 #endif //__nvoc_kernel_channel_h_disabled
464 
465 #define kchannelIsSchedulable_HAL(pGpu, pKernelChannel) kchannelIsSchedulable(pGpu, pKernelChannel)
466 
467 NV_STATUS kchannelAllocMem_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 Flags, NvU32 verifFlags);
468 
469 
470 #ifdef __nvoc_kernel_channel_h_disabled
471 static inline NV_STATUS kchannelAllocMem(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 Flags, NvU32 verifFlags) {
472     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
473     return NV_ERR_NOT_SUPPORTED;
474 }
475 #else //__nvoc_kernel_channel_h_disabled
476 #define kchannelAllocMem(pGpu, pKernelChannel, Flags, verifFlags) kchannelAllocMem_GM107(pGpu, pKernelChannel, Flags, verifFlags)
477 #endif //__nvoc_kernel_channel_h_disabled
478 
479 #define kchannelAllocMem_HAL(pGpu, pKernelChannel, Flags, verifFlags) kchannelAllocMem(pGpu, pKernelChannel, Flags, verifFlags)
480 
481 void kchannelDestroyMem_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
482 
483 
484 #ifdef __nvoc_kernel_channel_h_disabled
485 static inline void kchannelDestroyMem(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
486     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
487 }
488 #else //__nvoc_kernel_channel_h_disabled
489 #define kchannelDestroyMem(pGpu, pKernelChannel) kchannelDestroyMem_GM107(pGpu, pKernelChannel)
490 #endif //__nvoc_kernel_channel_h_disabled
491 
492 #define kchannelDestroyMem_HAL(pGpu, pKernelChannel) kchannelDestroyMem(pGpu, pKernelChannel)
493 
494 NV_STATUS kchannelGetChannelPhysicalState_KERNEL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS *pChannelStateParams);
495 
496 
497 #ifdef __nvoc_kernel_channel_h_disabled
498 static inline NV_STATUS kchannelGetChannelPhysicalState(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NV208F_CTRL_FIFO_GET_CHANNEL_STATE_PARAMS *pChannelStateParams) {
499     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
500     return NV_ERR_NOT_SUPPORTED;
501 }
502 #else //__nvoc_kernel_channel_h_disabled
503 #define kchannelGetChannelPhysicalState(pGpu, pKernelChannel, pChannelStateParams) kchannelGetChannelPhysicalState_KERNEL(pGpu, pKernelChannel, pChannelStateParams)
504 #endif //__nvoc_kernel_channel_h_disabled
505 
506 #define kchannelGetChannelPhysicalState_HAL(pGpu, pKernelChannel, pChannelStateParams) kchannelGetChannelPhysicalState(pGpu, pKernelChannel, pChannelStateParams)
507 
508 static inline NvU32 kchannelEmbedRunlistIDForSMC_13cd8d(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
509     NV_ASSERT_PRECOMP(0);
510     return 0;
511 }
512 
513 
514 #ifdef __nvoc_kernel_channel_h_disabled
515 static inline NvU32 kchannelEmbedRunlistIDForSMC(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
516     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
517     return 0;
518 }
519 #else //__nvoc_kernel_channel_h_disabled
520 #define kchannelEmbedRunlistIDForSMC(pGpu, pKernelChannel) kchannelEmbedRunlistIDForSMC_13cd8d(pGpu, pKernelChannel)
521 #endif //__nvoc_kernel_channel_h_disabled
522 
523 #define kchannelEmbedRunlistIDForSMC_HAL(pGpu, pKernelChannel) kchannelEmbedRunlistIDForSMC(pGpu, pKernelChannel)
524 
525 NV_STATUS kchannelAllocHwID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle hClient, NvU32 Flags, NvU32 verifFlags2, NvU32 ChID);
526 
527 
528 #ifdef __nvoc_kernel_channel_h_disabled
529 static inline NV_STATUS kchannelAllocHwID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle hClient, NvU32 Flags, NvU32 verifFlags2, NvU32 ChID) {
530     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
531     return NV_ERR_NOT_SUPPORTED;
532 }
533 #else //__nvoc_kernel_channel_h_disabled
534 #define kchannelAllocHwID(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID) kchannelAllocHwID_GM107(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID)
535 #endif //__nvoc_kernel_channel_h_disabled
536 
537 #define kchannelAllocHwID_HAL(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID) kchannelAllocHwID(pGpu, pKernelChannel, hClient, Flags, verifFlags2, ChID)
538 
539 NV_STATUS kchannelFreeHwID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
540 
541 
542 #ifdef __nvoc_kernel_channel_h_disabled
543 static inline NV_STATUS kchannelFreeHwID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
544     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
545     return NV_ERR_NOT_SUPPORTED;
546 }
547 #else //__nvoc_kernel_channel_h_disabled
548 #define kchannelFreeHwID(pGpu, pKernelChannel) kchannelFreeHwID_GM107(pGpu, pKernelChannel)
549 #endif //__nvoc_kernel_channel_h_disabled
550 
551 #define kchannelFreeHwID_HAL(pGpu, pKernelChannel) kchannelFreeHwID(pGpu, pKernelChannel)
552 
553 NV_STATUS kchannelGetUserdInfo_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *userBase, NvU64 *offset, NvU64 *length);
554 
555 
556 #ifdef __nvoc_kernel_channel_h_disabled
557 static inline NV_STATUS kchannelGetUserdInfo(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *userBase, NvU64 *offset, NvU64 *length) {
558     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
559     return NV_ERR_NOT_SUPPORTED;
560 }
561 #else //__nvoc_kernel_channel_h_disabled
562 #define kchannelGetUserdInfo(pGpu, arg0, userBase, offset, length) kchannelGetUserdInfo_GM107(pGpu, arg0, userBase, offset, length)
563 #endif //__nvoc_kernel_channel_h_disabled
564 
565 #define kchannelGetUserdInfo_HAL(pGpu, arg0, userBase, offset, length) kchannelGetUserdInfo(pGpu, arg0, userBase, offset, length)
566 
567 NV_STATUS kchannelGetUserdBar1MapOffset_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *bar1Offset, NvU32 *bar1MapSize);
568 
569 
570 #ifdef __nvoc_kernel_channel_h_disabled
571 static inline NV_STATUS kchannelGetUserdBar1MapOffset(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU64 *bar1Offset, NvU32 *bar1MapSize) {
572     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
573     return NV_ERR_NOT_SUPPORTED;
574 }
575 #else //__nvoc_kernel_channel_h_disabled
576 #define kchannelGetUserdBar1MapOffset(pGpu, arg0, bar1Offset, bar1MapSize) kchannelGetUserdBar1MapOffset_GM107(pGpu, arg0, bar1Offset, bar1MapSize)
577 #endif //__nvoc_kernel_channel_h_disabled
578 
579 #define kchannelGetUserdBar1MapOffset_HAL(pGpu, arg0, bar1Offset, bar1MapSize) kchannelGetUserdBar1MapOffset(pGpu, arg0, bar1Offset, bar1MapSize)
580 
581 NV_STATUS kchannelCreateUserdMemDescBc_GV100(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle arg0, NvHandle *arg1, NvU64 *arg2);
582 
583 
584 #ifdef __nvoc_kernel_channel_h_disabled
585 static inline NV_STATUS kchannelCreateUserdMemDescBc(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle arg0, NvHandle *arg1, NvU64 *arg2) {
586     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
587     return NV_ERR_NOT_SUPPORTED;
588 }
589 #else //__nvoc_kernel_channel_h_disabled
590 #define kchannelCreateUserdMemDescBc(pGpu, pKernelChannel, arg0, arg1, arg2) kchannelCreateUserdMemDescBc_GV100(pGpu, pKernelChannel, arg0, arg1, arg2)
591 #endif //__nvoc_kernel_channel_h_disabled
592 
593 #define kchannelCreateUserdMemDescBc_HAL(pGpu, pKernelChannel, arg0, arg1, arg2) kchannelCreateUserdMemDescBc(pGpu, pKernelChannel, arg0, arg1, arg2)
594 
595 NV_STATUS kchannelCreateUserdMemDesc_GV100(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvHandle arg1, NvHandle arg2, NvU64 arg3, NvU64 *arg4, NvU32 *arg5);
596 
597 
598 #ifdef __nvoc_kernel_channel_h_disabled
599 static inline NV_STATUS kchannelCreateUserdMemDesc(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvHandle arg1, NvHandle arg2, NvU64 arg3, NvU64 *arg4, NvU32 *arg5) {
600     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
601     return NV_ERR_NOT_SUPPORTED;
602 }
603 #else //__nvoc_kernel_channel_h_disabled
604 #define kchannelCreateUserdMemDesc(pGpu, arg0, arg1, arg2, arg3, arg4, arg5) kchannelCreateUserdMemDesc_GV100(pGpu, arg0, arg1, arg2, arg3, arg4, arg5)
605 #endif //__nvoc_kernel_channel_h_disabled
606 
607 #define kchannelCreateUserdMemDesc_HAL(pGpu, arg0, arg1, arg2, arg3, arg4, arg5) kchannelCreateUserdMemDesc(pGpu, arg0, arg1, arg2, arg3, arg4, arg5)
608 
609 void kchannelDestroyUserdMemDesc_GV100(struct OBJGPU *pGpu, struct KernelChannel *arg0);
610 
611 
612 #ifdef __nvoc_kernel_channel_h_disabled
613 static inline void kchannelDestroyUserdMemDesc(struct OBJGPU *pGpu, struct KernelChannel *arg0) {
614     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
615 }
616 #else //__nvoc_kernel_channel_h_disabled
617 #define kchannelDestroyUserdMemDesc(pGpu, arg0) kchannelDestroyUserdMemDesc_GV100(pGpu, arg0)
618 #endif //__nvoc_kernel_channel_h_disabled
619 
620 #define kchannelDestroyUserdMemDesc_HAL(pGpu, arg0) kchannelDestroyUserdMemDesc(pGpu, arg0)
621 
622 NV_STATUS kchannelGetEngine_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 *engDesc);
623 
624 
625 #ifdef __nvoc_kernel_channel_h_disabled
626 static inline NV_STATUS kchannelGetEngine(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 *engDesc) {
627     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
628     return NV_ERR_NOT_SUPPORTED;
629 }
630 #else //__nvoc_kernel_channel_h_disabled
631 #define kchannelGetEngine(pGpu, pKernelChannel, engDesc) kchannelGetEngine_GM107(pGpu, pKernelChannel, engDesc)
632 #endif //__nvoc_kernel_channel_h_disabled
633 
634 #define kchannelGetEngine_HAL(pGpu, pKernelChannel, engDesc) kchannelGetEngine(pGpu, pKernelChannel, engDesc)
635 
636 static inline NV_STATUS kchannelFwdToInternalCtrl_56cd7a(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 internalCmd, RmCtrlParams *pRmCtrlParams) {
637     return NV_OK;
638 }
639 
640 
641 #ifdef __nvoc_kernel_channel_h_disabled
642 static inline NV_STATUS kchannelFwdToInternalCtrl(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 internalCmd, RmCtrlParams *pRmCtrlParams) {
643     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
644     return NV_ERR_NOT_SUPPORTED;
645 }
646 #else //__nvoc_kernel_channel_h_disabled
647 #define kchannelFwdToInternalCtrl(pGpu, pKernelChannel, internalCmd, pRmCtrlParams) kchannelFwdToInternalCtrl_56cd7a(pGpu, pKernelChannel, internalCmd, pRmCtrlParams)
648 #endif //__nvoc_kernel_channel_h_disabled
649 
650 #define kchannelFwdToInternalCtrl_HAL(pGpu, pKernelChannel, internalCmd, pRmCtrlParams) kchannelFwdToInternalCtrl(pGpu, pKernelChannel, internalCmd, pRmCtrlParams)
651 
652 static inline NV_STATUS kchannelAllocChannel_56cd7a(struct KernelChannel *pKernelChannel, NV_CHANNEL_ALLOC_PARAMS *pChannelGpfifoParams) {
653     return NV_OK;
654 }
655 
656 
657 #ifdef __nvoc_kernel_channel_h_disabled
658 static inline NV_STATUS kchannelAllocChannel(struct KernelChannel *pKernelChannel, NV_CHANNEL_ALLOC_PARAMS *pChannelGpfifoParams) {
659     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
660     return NV_ERR_NOT_SUPPORTED;
661 }
662 #else //__nvoc_kernel_channel_h_disabled
663 #define kchannelAllocChannel(pKernelChannel, pChannelGpfifoParams) kchannelAllocChannel_56cd7a(pKernelChannel, pChannelGpfifoParams)
664 #endif //__nvoc_kernel_channel_h_disabled
665 
666 #define kchannelAllocChannel_HAL(pKernelChannel, pChannelGpfifoParams) kchannelAllocChannel(pKernelChannel, pChannelGpfifoParams)
667 
668 static inline NvBool kchannelIsValid_cbe027(struct KernelChannel *pKernelChannel) {
669     return ((NvBool)(0 == 0));
670 }
671 
672 
673 #ifdef __nvoc_kernel_channel_h_disabled
674 static inline NvBool kchannelIsValid(struct KernelChannel *pKernelChannel) {
675     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
676     return NV_FALSE;
677 }
678 #else //__nvoc_kernel_channel_h_disabled
679 #define kchannelIsValid(pKernelChannel) kchannelIsValid_cbe027(pKernelChannel)
680 #endif //__nvoc_kernel_channel_h_disabled
681 
682 #define kchannelIsValid_HAL(pKernelChannel) kchannelIsValid(pKernelChannel)
683 
684 NV_STATUS kchannelGetClassEngineID_GM107(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle handle, NvU32 *classEngineID, NvU32 *classID, RM_ENGINE_TYPE *rmEngineID);
685 
686 
687 #ifdef __nvoc_kernel_channel_h_disabled
688 static inline NV_STATUS kchannelGetClassEngineID(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvHandle handle, NvU32 *classEngineID, NvU32 *classID, RM_ENGINE_TYPE *rmEngineID) {
689     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
690     return NV_ERR_NOT_SUPPORTED;
691 }
692 #else //__nvoc_kernel_channel_h_disabled
693 #define kchannelGetClassEngineID(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID) kchannelGetClassEngineID_GM107(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID)
694 #endif //__nvoc_kernel_channel_h_disabled
695 
696 #define kchannelGetClassEngineID_HAL(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID) kchannelGetClassEngineID(pGpu, pKernelChannel, handle, classEngineID, classID, rmEngineID)
697 
698 NV_STATUS kchannelEnableVirtualContext_GM107(struct KernelChannel *arg0);
699 
700 
701 #ifdef __nvoc_kernel_channel_h_disabled
702 static inline NV_STATUS kchannelEnableVirtualContext(struct KernelChannel *arg0) {
703     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
704     return NV_ERR_NOT_SUPPORTED;
705 }
706 #else //__nvoc_kernel_channel_h_disabled
707 #define kchannelEnableVirtualContext(arg0) kchannelEnableVirtualContext_GM107(arg0)
708 #endif //__nvoc_kernel_channel_h_disabled
709 
710 #define kchannelEnableVirtualContext_HAL(arg0) kchannelEnableVirtualContext(arg0)
711 
712 NV_STATUS kchannelMap_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping);
713 
714 static inline NV_STATUS kchannelMap_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, struct RS_CPU_MAP_PARAMS *pParams, RsCpuMapping *pCpuMapping) {
715     return pKernelChannel->__kchannelMap__(pKernelChannel, pCallContext, pParams, pCpuMapping);
716 }
717 
718 NV_STATUS kchannelUnmap_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping);
719 
720 static inline NV_STATUS kchannelUnmap_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, RsCpuMapping *pCpuMapping) {
721     return pKernelChannel->__kchannelUnmap__(pKernelChannel, pCallContext, pCpuMapping);
722 }
723 
724 NV_STATUS kchannelGetMapAddrSpace_IMPL(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace);
725 
726 static inline NV_STATUS kchannelGetMapAddrSpace_DISPATCH(struct KernelChannel *pKernelChannel, CALL_CONTEXT *pCallContext, NvU32 mapFlags, NV_ADDRESS_SPACE *pAddrSpace) {
727     return pKernelChannel->__kchannelGetMapAddrSpace__(pKernelChannel, pCallContext, mapFlags, pAddrSpace);
728 }
729 
730 NV_STATUS kchannelGetMemInterMapParams_IMPL(struct KernelChannel *pKernelChannel, RMRES_MEM_INTER_MAP_PARAMS *pParams);
731 
732 static inline NV_STATUS kchannelGetMemInterMapParams_DISPATCH(struct KernelChannel *pKernelChannel, RMRES_MEM_INTER_MAP_PARAMS *pParams) {
733     return pKernelChannel->__kchannelGetMemInterMapParams__(pKernelChannel, pParams);
734 }
735 
736 NV_STATUS kchannelCheckMemInterUnmap_IMPL(struct KernelChannel *pKernelChannel, NvBool bSubdeviceHandleProvided);
737 
738 static inline NV_STATUS kchannelCheckMemInterUnmap_DISPATCH(struct KernelChannel *pKernelChannel, NvBool bSubdeviceHandleProvided) {
739     return pKernelChannel->__kchannelCheckMemInterUnmap__(pKernelChannel, bSubdeviceHandleProvided);
740 }
741 
742 NV_STATUS kchannelCreateUserMemDesc_GM107(struct OBJGPU *pGpu, struct KernelChannel *arg0);
743 
744 NV_STATUS kchannelCreateUserMemDesc_GA10B(struct OBJGPU *pGpu, struct KernelChannel *arg0);
745 
746 static inline NV_STATUS kchannelCreateUserMemDesc_DISPATCH(struct OBJGPU *pGpu, struct KernelChannel *arg0) {
747     return arg0->__kchannelCreateUserMemDesc__(pGpu, arg0);
748 }
749 
750 NvBool kchannelIsUserdAddrSizeValid_GV100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
751 
752 NvBool kchannelIsUserdAddrSizeValid_GA100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
753 
754 NvBool kchannelIsUserdAddrSizeValid_GH100(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi);
755 
756 static inline NvBool kchannelIsUserdAddrSizeValid_DISPATCH(struct KernelChannel *pKernelChannel, NvU32 userdAddrLo, NvU32 userdAddrHi) {
757     return pKernelChannel->__kchannelIsUserdAddrSizeValid__(pKernelChannel, userdAddrLo, userdAddrHi);
758 }
759 
760 NV_STATUS kchannelCtrlCmdResetIsolatedChannel_IMPL(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams);
761 
762 static inline NV_STATUS kchannelCtrlCmdResetIsolatedChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV506F_CTRL_CMD_RESET_ISOLATED_CHANNEL_PARAMS *pResetParams) {
763     return pKernelChannel->__kchannelCtrlCmdResetIsolatedChannel__(pKernelChannel, pResetParams);
764 }
765 
766 NV_STATUS kchannelCtrlCmdGetClassEngineid_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams);
767 
768 static inline NV_STATUS kchannelCtrlCmdGetClassEngineid_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
769     return pKernelChannel->__kchannelCtrlCmdGetClassEngineid__(pKernelChannel, pParams);
770 }
771 
772 NV_STATUS kchannelCtrlCmdResetChannel_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams);
773 
774 static inline NV_STATUS kchannelCtrlCmdResetChannel_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
775     return pKernelChannel->__kchannelCtrlCmdResetChannel__(pKernelChannel, pResetChannelParams);
776 }
777 
778 NV_STATUS kchannelCtrlCmdGetDeferRCState_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *pStateParams);
779 
780 static inline NV_STATUS kchannelCtrlCmdGetDeferRCState_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_CMD_GET_DEFER_RC_STATE_PARAMS *pStateParams) {
781     return pKernelChannel->__kchannelCtrlCmdGetDeferRCState__(pKernelChannel, pStateParams);
782 }
783 
784 NV_STATUS kchannelCtrlCmdGetMmuFaultInfo_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams);
785 
786 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfo_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
787     return pKernelChannel->__kchannelCtrlCmdGetMmuFaultInfo__(pKernelChannel, pFaultInfoParams);
788 }
789 
790 NV_STATUS kchannelCtrlCmdEventSetNotification_IMPL(struct KernelChannel *pKernelChannel, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pSetEventParams);
791 
792 static inline NV_STATUS kchannelCtrlCmdEventSetNotification_DISPATCH(struct KernelChannel *pKernelChannel, NV906F_CTRL_EVENT_SET_NOTIFICATION_PARAMS *pSetEventParams) {
793     return pKernelChannel->__kchannelCtrlCmdEventSetNotification__(pKernelChannel, pSetEventParams);
794 }
795 
796 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA06F_6a9a13(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
797     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
798 }
799 
800 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA06F_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
801     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidA06F__(pKernelChannel, pParams);
802 }
803 
804 static inline NV_STATUS kchannelCtrlCmdResetChannelA06F_ef73a1(struct KernelChannel *pKernelChannel, NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
805     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
806 }
807 
808 static inline NV_STATUS kchannelCtrlCmdResetChannelA06F_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
809     return pKernelChannel->__kchannelCtrlCmdResetChannelA06F__(pKernelChannel, pResetChannelParams);
810 }
811 
812 NV_STATUS kchannelCtrlCmdGpFifoSchedule_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams);
813 
814 static inline NV_STATUS kchannelCtrlCmdGpFifoSchedule_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
815     return pKernelChannel->__kchannelCtrlCmdGpFifoSchedule__(pKernelChannel, pSchedParams);
816 }
817 
818 NV_STATUS kchannelCtrlCmdBind_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_BIND_PARAMS *pParams);
819 
820 static inline NV_STATUS kchannelCtrlCmdBind_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_BIND_PARAMS *pParams) {
821     return pKernelChannel->__kchannelCtrlCmdBind__(pKernelChannel, pParams);
822 }
823 
824 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfoA06F_a7f9ac(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
825     return kchannelCtrlCmdGetMmuFaultInfo(pKernelChannel, pFaultInfoParams);
826 }
827 
828 static inline NV_STATUS kchannelCtrlCmdGetMmuFaultInfoA06F_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_GET_MMU_FAULT_INFO_PARAMS *pFaultInfoParams) {
829     return pKernelChannel->__kchannelCtrlCmdGetMmuFaultInfoA06F__(pKernelChannel, pFaultInfoParams);
830 }
831 
832 NV_STATUS kchannelCtrlCmdSetErrorNotifier_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *pSetErrorNotifierParams);
833 
834 static inline NV_STATUS kchannelCtrlCmdSetErrorNotifier_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_SET_ERROR_NOTIFIER_PARAMS *pSetErrorNotifierParams) {
835     return pKernelChannel->__kchannelCtrlCmdSetErrorNotifier__(pKernelChannel, pSetErrorNotifierParams);
836 }
837 
838 NV_STATUS kchannelCtrlCmdSetInterleaveLevel_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *pParams);
839 
840 static inline NV_STATUS kchannelCtrlCmdSetInterleaveLevel_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_INTERLEAVE_LEVEL_PARAMS *pParams) {
841     return pKernelChannel->__kchannelCtrlCmdSetInterleaveLevel__(pKernelChannel, pParams);
842 }
843 
844 NV_STATUS kchannelCtrlCmdRestartRunlist_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *pParams);
845 
846 static inline NV_STATUS kchannelCtrlCmdRestartRunlist_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_RESTART_RUNLIST_PARAMS *pParams) {
847     return pKernelChannel->__kchannelCtrlCmdRestartRunlist__(pKernelChannel, pParams);
848 }
849 
850 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA16F_6a9a13(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
851     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
852 }
853 
854 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA16F_DISPATCH(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
855     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidA16F__(pKernelChannel, pParams);
856 }
857 
858 static inline NV_STATUS kchannelCtrlCmdResetChannelA16F_ef73a1(struct KernelChannel *pKernelChannel, NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
859     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
860 }
861 
862 static inline NV_STATUS kchannelCtrlCmdResetChannelA16F_DISPATCH(struct KernelChannel *pKernelChannel, NVA16F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
863     return pKernelChannel->__kchannelCtrlCmdResetChannelA16F__(pKernelChannel, pResetChannelParams);
864 }
865 
866 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleA16F_6546a6(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
867     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
868 }
869 
870 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleA16F_DISPATCH(struct KernelChannel *pKernelChannel, NVA16F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
871     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleA16F__(pKernelChannel, pSchedParams);
872 }
873 
874 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA26F_6a9a13(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
875     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
876 }
877 
878 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidA26F_DISPATCH(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
879     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidA26F__(pKernelChannel, pParams);
880 }
881 
882 static inline NV_STATUS kchannelCtrlCmdResetChannelA26F_ef73a1(struct KernelChannel *pKernelChannel, NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
883     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
884 }
885 
886 static inline NV_STATUS kchannelCtrlCmdResetChannelA26F_DISPATCH(struct KernelChannel *pKernelChannel, NVA26F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
887     return pKernelChannel->__kchannelCtrlCmdResetChannelA26F__(pKernelChannel, pResetChannelParams);
888 }
889 
890 static inline NV_STATUS kchannelFCtrlCmdGpFifoScheduleA26F_6546a6(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
891     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
892 }
893 
894 static inline NV_STATUS kchannelFCtrlCmdGpFifoScheduleA26F_DISPATCH(struct KernelChannel *pKernelChannel, NVA26F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
895     return pKernelChannel->__kchannelFCtrlCmdGpFifoScheduleA26F__(pKernelChannel, pSchedParams);
896 }
897 
898 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidB06F_6a9a13(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
899     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
900 }
901 
902 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
903     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidB06F__(pKernelChannel, pParams);
904 }
905 
906 static inline NV_STATUS kchannelCtrlCmdResetChannelB06F_ef73a1(struct KernelChannel *pKernelChannel, NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
907     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
908 }
909 
910 static inline NV_STATUS kchannelCtrlCmdResetChannelB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
911     return pKernelChannel->__kchannelCtrlCmdResetChannelB06F__(pKernelChannel, pResetChannelParams);
912 }
913 
914 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleB06F_6546a6(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
915     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
916 }
917 
918 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
919     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleB06F__(pKernelChannel, pSchedParams);
920 }
921 
922 static inline NV_STATUS kchannelCtrlCmdBindB06F_2c1c21(struct KernelChannel *pKernelChannel, NVB06F_CTRL_BIND_PARAMS *pParams) {
923     return kchannelCtrlCmdBind(pKernelChannel, pParams);
924 }
925 
926 static inline NV_STATUS kchannelCtrlCmdBindB06F_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_BIND_PARAMS *pParams) {
927     return pKernelChannel->__kchannelCtrlCmdBindB06F__(pKernelChannel, pParams);
928 }
929 
930 NV_STATUS kchannelCtrlCmdGetEngineCtxSize_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *pCtxSizeParams);
931 
932 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxSize_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_SIZE_PARAMS *pCtxSizeParams) {
933     return pKernelChannel->__kchannelCtrlCmdGetEngineCtxSize__(pKernelChannel, pCtxSizeParams);
934 }
935 
936 NV_STATUS kchannelCtrlCmdGetEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
937 
938 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
939     return pKernelChannel->__kchannelCtrlCmdGetEngineCtxData__(pKernelChannel, pCtxBuffParams);
940 }
941 
942 NV_STATUS kchannelCtrlCmdMigrateEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
943 
944 static inline NV_STATUS kchannelCtrlCmdMigrateEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_MIGRATE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
945     return pKernelChannel->__kchannelCtrlCmdMigrateEngineCtxData__(pKernelChannel, pCtxBuffParams);
946 }
947 
948 NV_STATUS kchannelCtrlCmdGetEngineCtxState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *pCtxStateParams);
949 
950 static inline NV_STATUS kchannelCtrlCmdGetEngineCtxState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_ENGINE_CTX_STATE_PARAMS *pCtxStateParams) {
951     return pKernelChannel->__kchannelCtrlCmdGetEngineCtxState__(pKernelChannel, pCtxStateParams);
952 }
953 
954 NV_STATUS kchannelCtrlCmdGetChannelHwState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *pParams);
955 
956 static inline NV_STATUS kchannelCtrlCmdGetChannelHwState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_GET_CHANNEL_HW_STATE_PARAMS *pParams) {
957     return pKernelChannel->__kchannelCtrlCmdGetChannelHwState__(pKernelChannel, pParams);
958 }
959 
960 NV_STATUS kchannelCtrlCmdSetChannelHwState_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *pParams);
961 
962 static inline NV_STATUS kchannelCtrlCmdSetChannelHwState_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SET_CHANNEL_HW_STATE_PARAMS *pParams) {
963     return pKernelChannel->__kchannelCtrlCmdSetChannelHwState__(pKernelChannel, pParams);
964 }
965 
966 NV_STATUS kchannelCtrlCmdSaveEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
967 
968 static inline NV_STATUS kchannelCtrlCmdSaveEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_SAVE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
969     return pKernelChannel->__kchannelCtrlCmdSaveEngineCtxData__(pKernelChannel, pCtxBuffParams);
970 }
971 
972 NV_STATUS kchannelCtrlCmdRestoreEngineCtxData_IMPL(struct KernelChannel *pKernelChannel, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams);
973 
974 static inline NV_STATUS kchannelCtrlCmdRestoreEngineCtxData_DISPATCH(struct KernelChannel *pKernelChannel, NVB06F_CTRL_RESTORE_ENGINE_CTX_DATA_PARAMS *pCtxBuffParams) {
975     return pKernelChannel->__kchannelCtrlCmdRestoreEngineCtxData__(pKernelChannel, pCtxBuffParams);
976 }
977 
978 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC06F_6a9a13(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
979     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
980 }
981 
982 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
983     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidC06F__(pKernelChannel, pParams);
984 }
985 
986 static inline NV_STATUS kchannelCtrlCmdResetChannelC06F_ef73a1(struct KernelChannel *pKernelChannel, NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
987     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
988 }
989 
990 static inline NV_STATUS kchannelCtrlCmdResetChannelC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
991     return pKernelChannel->__kchannelCtrlCmdResetChannelC06F__(pKernelChannel, pResetChannelParams);
992 }
993 
994 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC06F_6546a6(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
995     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
996 }
997 
998 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
999     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleC06F__(pKernelChannel, pSchedParams);
1000 }
1001 
1002 static inline NV_STATUS kchannelCtrlCmdBindC06F_2c1c21(struct KernelChannel *pKernelChannel, NVC06F_CTRL_BIND_PARAMS *pParams) {
1003     return kchannelCtrlCmdBind(pKernelChannel, pParams);
1004 }
1005 
1006 static inline NV_STATUS kchannelCtrlCmdBindC06F_DISPATCH(struct KernelChannel *pKernelChannel, NVC06F_CTRL_BIND_PARAMS *pParams) {
1007     return pKernelChannel->__kchannelCtrlCmdBindC06F__(pKernelChannel, pParams);
1008 }
1009 
1010 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC36F_6a9a13(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
1011     return kchannelCtrlCmdGetClassEngineid(pKernelChannel, pParams);
1012 }
1013 
1014 static inline NV_STATUS kchannelCtrlCmdGetClassEngineidC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GET_CLASS_ENGINEID_PARAMS *pParams) {
1015     return pKernelChannel->__kchannelCtrlCmdGetClassEngineidC36F__(pKernelChannel, pParams);
1016 }
1017 
1018 static inline NV_STATUS kchannelCtrlCmdResetChannelC36F_ef73a1(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
1019     return kchannelCtrlCmdResetChannel(pKernelChannel, pResetChannelParams);
1020 }
1021 
1022 static inline NV_STATUS kchannelCtrlCmdResetChannelC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_RESET_CHANNEL_PARAMS *pResetChannelParams) {
1023     return pKernelChannel->__kchannelCtrlCmdResetChannelC36F__(pKernelChannel, pResetChannelParams);
1024 }
1025 
1026 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC36F_6546a6(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
1027     return kchannelCtrlCmdGpFifoSchedule(pKernelChannel, pSchedParams);
1028 }
1029 
1030 static inline NV_STATUS kchannelCtrlCmdGpFifoScheduleC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SCHEDULE_PARAMS *pSchedParams) {
1031     return pKernelChannel->__kchannelCtrlCmdGpFifoScheduleC36F__(pKernelChannel, pSchedParams);
1032 }
1033 
1034 static inline NV_STATUS kchannelCtrlCmdBindC36F_2c1c21(struct KernelChannel *pKernelChannel, NVC36F_CTRL_BIND_PARAMS *pParams) {
1035     return kchannelCtrlCmdBind(pKernelChannel, pParams);
1036 }
1037 
1038 static inline NV_STATUS kchannelCtrlCmdBindC36F_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_BIND_PARAMS *pParams) {
1039     return pKernelChannel->__kchannelCtrlCmdBindC36F__(pKernelChannel, pParams);
1040 }
1041 
1042 NV_STATUS kchannelCtrlCmdGpfifoGetWorkSubmitToken_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *pTokenParams);
1043 
1044 static inline NV_STATUS kchannelCtrlCmdGpfifoGetWorkSubmitToken_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_CMD_GPFIFO_GET_WORK_SUBMIT_TOKEN_PARAMS *pTokenParams) {
1045     return pKernelChannel->__kchannelCtrlCmdGpfifoGetWorkSubmitToken__(pKernelChannel, pTokenParams);
1046 }
1047 
1048 NV_STATUS kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *pFaultMthdBufferParams);
1049 
1050 static inline NV_STATUS kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_UPDATE_FAULT_METHOD_BUFFER_PARAMS *pFaultMthdBufferParams) {
1051     return pKernelChannel->__kchannelCtrlCmdGpfifoUpdateFaultMethodBuffer__(pKernelChannel, pFaultMthdBufferParams);
1052 }
1053 
1054 NV_STATUS kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_IMPL(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *pParams);
1055 
1056 static inline NV_STATUS kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex_DISPATCH(struct KernelChannel *pKernelChannel, NVC36F_CTRL_GPFIFO_SET_WORK_SUBMIT_TOKEN_NOTIF_INDEX_PARAMS *pParams) {
1057     return pKernelChannel->__kchannelCtrlCmdGpfifoSetWorkSubmitTokenNotifIndex__(pKernelChannel, pParams);
1058 }
1059 
1060 NV_STATUS kchannelCtrlCmdStopChannel_IMPL(struct KernelChannel *pKernelChannel, NVA06F_CTRL_STOP_CHANNEL_PARAMS *pStopChannelParams);
1061 
1062 static inline NV_STATUS kchannelCtrlCmdStopChannel_DISPATCH(struct KernelChannel *pKernelChannel, NVA06F_CTRL_STOP_CHANNEL_PARAMS *pStopChannelParams) {
1063     return pKernelChannel->__kchannelCtrlCmdStopChannel__(pKernelChannel, pStopChannelParams);
1064 }
1065 
1066 static inline NV_STATUS kchannelCtrlGetTpcPartitionMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1067     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1068 }
1069 
1070 static inline NV_STATUS kchannelCtrlGetTpcPartitionMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1071     return pKernelChannel->__kchannelCtrlGetTpcPartitionMode__(pKernelChannel, pParams);
1072 }
1073 
1074 static inline NV_STATUS kchannelCtrlSetTpcPartitionMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1075     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1076 }
1077 
1078 static inline NV_STATUS kchannelCtrlSetTpcPartitionMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_TPC_PARTITION_MODE_PARAMS *pParams) {
1079     return pKernelChannel->__kchannelCtrlSetTpcPartitionMode__(pKernelChannel, pParams);
1080 }
1081 
1082 static inline NV_STATUS kchannelCtrlGetMMUDebugMode_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *pParams) {
1083     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1084 }
1085 
1086 static inline NV_STATUS kchannelCtrlGetMMUDebugMode_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_GET_MMU_DEBUG_MODE_PARAMS *pParams) {
1087     return pKernelChannel->__kchannelCtrlGetMMUDebugMode__(pKernelChannel, pParams);
1088 }
1089 
1090 static inline NV_STATUS kchannelCtrlProgramVidmemPromote_a094e1(struct KernelChannel *pKernelChannel, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *pParams) {
1091     return kgrctxCtrlHandle(resservGetTlsCallContext(), pKernelChannel->hKernelGraphicsContext);
1092 }
1093 
1094 static inline NV_STATUS kchannelCtrlProgramVidmemPromote_DISPATCH(struct KernelChannel *pKernelChannel, NV0090_CTRL_PROGRAM_VIDMEM_PROMOTE_PARAMS *pParams) {
1095     return pKernelChannel->__kchannelCtrlProgramVidmemPromote__(pKernelChannel, pParams);
1096 }
1097 
1098 static inline NvBool kchannelShareCallback_DISPATCH(struct KernelChannel *pGpuResource, struct RsClient *pInvokingClient, struct RsResourceRef *pParentRef, RS_SHARE_POLICY *pSharePolicy) {
1099     return pGpuResource->__kchannelShareCallback__(pGpuResource, pInvokingClient, pParentRef, pSharePolicy);
1100 }
1101 
1102 static inline NV_STATUS kchannelGetOrAllocNotifShare_DISPATCH(struct KernelChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, struct NotifShare **ppNotifShare) {
1103     return pNotifier->__kchannelGetOrAllocNotifShare__(pNotifier, hNotifierClient, hNotifierResource, ppNotifShare);
1104 }
1105 
1106 static inline NV_STATUS kchannelMapTo_DISPATCH(struct KernelChannel *pResource, RS_RES_MAP_TO_PARAMS *pParams) {
1107     return pResource->__kchannelMapTo__(pResource, pParams);
1108 }
1109 
1110 static inline void kchannelSetNotificationShare_DISPATCH(struct KernelChannel *pNotifier, struct NotifShare *pNotifShare) {
1111     pNotifier->__kchannelSetNotificationShare__(pNotifier, pNotifShare);
1112 }
1113 
1114 static inline NvU32 kchannelGetRefCount_DISPATCH(struct KernelChannel *pResource) {
1115     return pResource->__kchannelGetRefCount__(pResource);
1116 }
1117 
1118 static inline void kchannelAddAdditionalDependants_DISPATCH(struct RsClient *pClient, struct KernelChannel *pResource, RsResourceRef *pReference) {
1119     pResource->__kchannelAddAdditionalDependants__(pClient, pResource, pReference);
1120 }
1121 
1122 static inline NV_STATUS kchannelControl_Prologue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1123     return pResource->__kchannelControl_Prologue__(pResource, pCallContext, pParams);
1124 }
1125 
1126 static inline NV_STATUS kchannelGetRegBaseOffsetAndSize_DISPATCH(struct KernelChannel *pGpuResource, struct OBJGPU *pGpu, NvU32 *pOffset, NvU32 *pSize) {
1127     return pGpuResource->__kchannelGetRegBaseOffsetAndSize__(pGpuResource, pGpu, pOffset, pSize);
1128 }
1129 
1130 static inline NV_STATUS kchannelInternalControlForward_DISPATCH(struct KernelChannel *pGpuResource, NvU32 command, void *pParams, NvU32 size) {
1131     return pGpuResource->__kchannelInternalControlForward__(pGpuResource, command, pParams, size);
1132 }
1133 
1134 static inline NV_STATUS kchannelUnmapFrom_DISPATCH(struct KernelChannel *pResource, RS_RES_UNMAP_FROM_PARAMS *pParams) {
1135     return pResource->__kchannelUnmapFrom__(pResource, pParams);
1136 }
1137 
1138 static inline void kchannelControl_Epilogue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1139     pResource->__kchannelControl_Epilogue__(pResource, pCallContext, pParams);
1140 }
1141 
1142 static inline NV_STATUS kchannelControlLookup_DISPATCH(struct KernelChannel *pResource, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams, const struct NVOC_EXPORTED_METHOD_DEF **ppEntry) {
1143     return pResource->__kchannelControlLookup__(pResource, pParams, ppEntry);
1144 }
1145 
1146 static inline NvHandle kchannelGetInternalObjectHandle_DISPATCH(struct KernelChannel *pGpuResource) {
1147     return pGpuResource->__kchannelGetInternalObjectHandle__(pGpuResource);
1148 }
1149 
1150 static inline NV_STATUS kchannelControl_DISPATCH(struct KernelChannel *pGpuResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1151     return pGpuResource->__kchannelControl__(pGpuResource, pCallContext, pParams);
1152 }
1153 
1154 static inline NV_STATUS kchannelGetMemoryMappingDescriptor_DISPATCH(struct KernelChannel *pRmResource, struct MEMORY_DESCRIPTOR **ppMemDesc) {
1155     return pRmResource->__kchannelGetMemoryMappingDescriptor__(pRmResource, ppMemDesc);
1156 }
1157 
1158 static inline NV_STATUS kchannelControlFilter_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1159     return pResource->__kchannelControlFilter__(pResource, pCallContext, pParams);
1160 }
1161 
1162 static inline NV_STATUS kchannelUnregisterEvent_DISPATCH(struct KernelChannel *pNotifier, NvHandle hNotifierClient, NvHandle hNotifierResource, NvHandle hEventClient, NvHandle hEvent) {
1163     return pNotifier->__kchannelUnregisterEvent__(pNotifier, hNotifierClient, hNotifierResource, hEventClient, hEvent);
1164 }
1165 
1166 static inline NV_STATUS kchannelControlSerialization_Prologue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1167     return pResource->__kchannelControlSerialization_Prologue__(pResource, pCallContext, pParams);
1168 }
1169 
1170 static inline NvBool kchannelCanCopy_DISPATCH(struct KernelChannel *pResource) {
1171     return pResource->__kchannelCanCopy__(pResource);
1172 }
1173 
1174 static inline void kchannelPreDestruct_DISPATCH(struct KernelChannel *pResource) {
1175     pResource->__kchannelPreDestruct__(pResource);
1176 }
1177 
1178 static inline NV_STATUS kchannelIsDuplicate_DISPATCH(struct KernelChannel *pResource, NvHandle hMemory, NvBool *pDuplicate) {
1179     return pResource->__kchannelIsDuplicate__(pResource, hMemory, pDuplicate);
1180 }
1181 
1182 static inline void kchannelControlSerialization_Epilogue_DISPATCH(struct KernelChannel *pResource, struct CALL_CONTEXT *pCallContext, struct RS_RES_CONTROL_PARAMS_INTERNAL *pParams) {
1183     pResource->__kchannelControlSerialization_Epilogue__(pResource, pCallContext, pParams);
1184 }
1185 
1186 static inline PEVENTNOTIFICATION *kchannelGetNotificationListPtr_DISPATCH(struct KernelChannel *pNotifier) {
1187     return pNotifier->__kchannelGetNotificationListPtr__(pNotifier);
1188 }
1189 
1190 static inline struct NotifShare *kchannelGetNotificationShare_DISPATCH(struct KernelChannel *pNotifier) {
1191     return pNotifier->__kchannelGetNotificationShare__(pNotifier);
1192 }
1193 
1194 static inline NvBool kchannelAccessCallback_DISPATCH(struct KernelChannel *pResource, struct RsClient *pInvokingClient, void *pAllocParams, RsAccessRight accessRight) {
1195     return pResource->__kchannelAccessCallback__(pResource, pInvokingClient, pAllocParams, accessRight);
1196 }
1197 
1198 static inline NvU32 kchannelGetDebugTag(const struct KernelChannel *pKernelChannel) {
1199     if (pKernelChannel == ((void *)0))
1200         return 4294967295U;
1201     return pKernelChannel->ChID;
1202 }
1203 
1204 static inline NvBool kchannelIsCtxBufferAllocSkipped(struct KernelChannel *pKernelChannel) {
1205     return pKernelChannel->bSkipCtxBufferAlloc;
1206 }
1207 
1208 static inline NvU32 kchannelGetSubctxId(struct KernelChannel *pKernelChannel) {
1209     return pKernelChannel->subctxId;
1210 }
1211 
1212 static inline NvU32 kchannelGetCid(struct KernelChannel *pKernelChannel) {
1213     return pKernelChannel->cid;
1214 }
1215 
1216 static inline struct MIG_INSTANCE_REF *kchannelGetMIGReference(struct KernelChannel *pKernelChannel) {
1217     return &pKernelChannel->partitionRef;
1218 }
1219 
1220 static inline NvU32 kchannelGetRunqueue(struct KernelChannel *pKernelChannel) {
1221     return pKernelChannel->runqueue;
1222 }
1223 
1224 static inline NvU32 kchannelGetRunlistId(struct KernelChannel *pKernelChannel) {
1225     return pKernelChannel->runlistId;
1226 }
1227 
1228 static inline void kchannelSetRunlistId(struct KernelChannel *pKernelChannel, NvU32 runlistId) {
1229     pKernelChannel->runlistId = runlistId;
1230 }
1231 
1232 static inline RM_ENGINE_TYPE kchannelGetEngineType(struct KernelChannel *pKernelChannel) {
1233     return pKernelChannel->engineType;
1234 }
1235 
1236 NV_STATUS kchannelConstruct_IMPL(struct KernelChannel *arg_pKernelChannel, CALL_CONTEXT *arg_pCallContext, struct RS_RES_ALLOC_PARAMS_INTERNAL *arg_pParams);
1237 
1238 #define __nvoc_kchannelConstruct(arg_pKernelChannel, arg_pCallContext, arg_pParams) kchannelConstruct_IMPL(arg_pKernelChannel, arg_pCallContext, arg_pParams)
1239 void kchannelDestruct_IMPL(struct KernelChannel *pResource);
1240 
1241 #define __nvoc_kchannelDestruct(pResource) kchannelDestruct_IMPL(pResource)
1242 NV_STATUS kchannelRegisterChild_IMPL(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject);
1243 
1244 #ifdef __nvoc_kernel_channel_h_disabled
1245 static inline NV_STATUS kchannelRegisterChild(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject) {
1246     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1247     return NV_ERR_NOT_SUPPORTED;
1248 }
1249 #else //__nvoc_kernel_channel_h_disabled
1250 #define kchannelRegisterChild(pKernelChannel, pObject) kchannelRegisterChild_IMPL(pKernelChannel, pObject)
1251 #endif //__nvoc_kernel_channel_h_disabled
1252 
1253 NV_STATUS kchannelDeregisterChild_IMPL(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject);
1254 
1255 #ifdef __nvoc_kernel_channel_h_disabled
1256 static inline NV_STATUS kchannelDeregisterChild(struct KernelChannel *pKernelChannel, ChannelDescendant *pObject) {
1257     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1258     return NV_ERR_NOT_SUPPORTED;
1259 }
1260 #else //__nvoc_kernel_channel_h_disabled
1261 #define kchannelDeregisterChild(pKernelChannel, pObject) kchannelDeregisterChild_IMPL(pKernelChannel, pObject)
1262 #endif //__nvoc_kernel_channel_h_disabled
1263 
1264 void kchannelNotifyGeneric_IMPL(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize);
1265 
1266 #ifdef __nvoc_kernel_channel_h_disabled
1267 static inline void kchannelNotifyGeneric(struct KernelChannel *pKernelChannel, NvU32 notifyIndex, void *pNotifyParams, NvU32 notifyParamsSize) {
1268     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1269 }
1270 #else //__nvoc_kernel_channel_h_disabled
1271 #define kchannelNotifyGeneric(pKernelChannel, notifyIndex, pNotifyParams, notifyParamsSize) kchannelNotifyGeneric_IMPL(pKernelChannel, notifyIndex, pNotifyParams, notifyParamsSize)
1272 #endif //__nvoc_kernel_channel_h_disabled
1273 
1274 NvBool kchannelCheckIsKernel_IMPL(struct KernelChannel *pKernelChannel);
1275 
1276 #ifdef __nvoc_kernel_channel_h_disabled
1277 static inline NvBool kchannelCheckIsKernel(struct KernelChannel *pKernelChannel) {
1278     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1279     return NV_FALSE;
1280 }
1281 #else //__nvoc_kernel_channel_h_disabled
1282 #define kchannelCheckIsKernel(pKernelChannel) kchannelCheckIsKernel_IMPL(pKernelChannel)
1283 #endif //__nvoc_kernel_channel_h_disabled
1284 
1285 NvBool kchannelCheckIsAdmin_IMPL(struct KernelChannel *pKernelChannel);
1286 
1287 #ifdef __nvoc_kernel_channel_h_disabled
1288 static inline NvBool kchannelCheckIsAdmin(struct KernelChannel *pKernelChannel) {
1289     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1290     return NV_FALSE;
1291 }
1292 #else //__nvoc_kernel_channel_h_disabled
1293 #define kchannelCheckIsAdmin(pKernelChannel) kchannelCheckIsAdmin_IMPL(pKernelChannel)
1294 #endif //__nvoc_kernel_channel_h_disabled
1295 
1296 NV_STATUS kchannelBindToRunlist_IMPL(struct KernelChannel *pKernelChannel, RM_ENGINE_TYPE localRmEngineType, ENGDESCRIPTOR engineDesc);
1297 
1298 #ifdef __nvoc_kernel_channel_h_disabled
1299 static inline NV_STATUS kchannelBindToRunlist(struct KernelChannel *pKernelChannel, RM_ENGINE_TYPE localRmEngineType, ENGDESCRIPTOR engineDesc) {
1300     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1301     return NV_ERR_NOT_SUPPORTED;
1302 }
1303 #else //__nvoc_kernel_channel_h_disabled
1304 #define kchannelBindToRunlist(pKernelChannel, localRmEngineType, engineDesc) kchannelBindToRunlist_IMPL(pKernelChannel, localRmEngineType, engineDesc)
1305 #endif //__nvoc_kernel_channel_h_disabled
1306 
1307 NV_STATUS kchannelSetEngineContextMemDesc_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine, MEMORY_DESCRIPTOR *pMemDesc);
1308 
1309 #ifdef __nvoc_kernel_channel_h_disabled
1310 static inline NV_STATUS kchannelSetEngineContextMemDesc(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine, MEMORY_DESCRIPTOR *pMemDesc) {
1311     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1312     return NV_ERR_NOT_SUPPORTED;
1313 }
1314 #else //__nvoc_kernel_channel_h_disabled
1315 #define kchannelSetEngineContextMemDesc(pGpu, pKernelChannel, engine, pMemDesc) kchannelSetEngineContextMemDesc_IMPL(pGpu, pKernelChannel, engine, pMemDesc)
1316 #endif //__nvoc_kernel_channel_h_disabled
1317 
1318 NV_STATUS kchannelMapEngineCtxBuf_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine);
1319 
1320 #ifdef __nvoc_kernel_channel_h_disabled
1321 static inline NV_STATUS kchannelMapEngineCtxBuf(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine) {
1322     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1323     return NV_ERR_NOT_SUPPORTED;
1324 }
1325 #else //__nvoc_kernel_channel_h_disabled
1326 #define kchannelMapEngineCtxBuf(pGpu, pKernelChannel, engine) kchannelMapEngineCtxBuf_IMPL(pGpu, pKernelChannel, engine)
1327 #endif //__nvoc_kernel_channel_h_disabled
1328 
1329 NV_STATUS kchannelUnmapEngineCtxBuf_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine);
1330 
1331 #ifdef __nvoc_kernel_channel_h_disabled
1332 static inline NV_STATUS kchannelUnmapEngineCtxBuf(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvU32 engine) {
1333     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1334     return NV_ERR_NOT_SUPPORTED;
1335 }
1336 #else //__nvoc_kernel_channel_h_disabled
1337 #define kchannelUnmapEngineCtxBuf(pGpu, pKernelChannel, engine) kchannelUnmapEngineCtxBuf_IMPL(pGpu, pKernelChannel, engine)
1338 #endif //__nvoc_kernel_channel_h_disabled
1339 
1340 NV_STATUS kchannelCheckBcStateCurrent_IMPL(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1341 
1342 #ifdef __nvoc_kernel_channel_h_disabled
1343 static inline NV_STATUS kchannelCheckBcStateCurrent(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel) {
1344     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1345     return NV_ERR_NOT_SUPPORTED;
1346 }
1347 #else //__nvoc_kernel_channel_h_disabled
1348 #define kchannelCheckBcStateCurrent(pGpu, pKernelChannel) kchannelCheckBcStateCurrent_IMPL(pGpu, pKernelChannel)
1349 #endif //__nvoc_kernel_channel_h_disabled
1350 
1351 NV_STATUS kchannelUpdateWorkSubmitTokenNotifIndex_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 index);
1352 
1353 #ifdef __nvoc_kernel_channel_h_disabled
1354 static inline NV_STATUS kchannelUpdateWorkSubmitTokenNotifIndex(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 index) {
1355     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1356     return NV_ERR_NOT_SUPPORTED;
1357 }
1358 #else //__nvoc_kernel_channel_h_disabled
1359 #define kchannelUpdateWorkSubmitTokenNotifIndex(pGpu, arg0, index) kchannelUpdateWorkSubmitTokenNotifIndex_IMPL(pGpu, arg0, index)
1360 #endif //__nvoc_kernel_channel_h_disabled
1361 
1362 NV_STATUS kchannelNotifyWorkSubmitToken_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 token);
1363 
1364 #ifdef __nvoc_kernel_channel_h_disabled
1365 static inline NV_STATUS kchannelNotifyWorkSubmitToken(struct OBJGPU *pGpu, struct KernelChannel *arg0, NvU32 token) {
1366     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1367     return NV_ERR_NOT_SUPPORTED;
1368 }
1369 #else //__nvoc_kernel_channel_h_disabled
1370 #define kchannelNotifyWorkSubmitToken(pGpu, arg0, token) kchannelNotifyWorkSubmitToken_IMPL(pGpu, arg0, token)
1371 #endif //__nvoc_kernel_channel_h_disabled
1372 
1373 NV_STATUS kchannelMapUserD_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvU64 arg2, NvU32 arg3, NvP64 *arg4, NvP64 *arg5);
1374 
1375 #ifdef __nvoc_kernel_channel_h_disabled
1376 static inline NV_STATUS kchannelMapUserD(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvU64 arg2, NvU32 arg3, NvP64 *arg4, NvP64 *arg5) {
1377     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1378     return NV_ERR_NOT_SUPPORTED;
1379 }
1380 #else //__nvoc_kernel_channel_h_disabled
1381 #define kchannelMapUserD(pGpu, arg0, arg1, arg2, arg3, arg4, arg5) kchannelMapUserD_IMPL(pGpu, arg0, arg1, arg2, arg3, arg4, arg5)
1382 #endif //__nvoc_kernel_channel_h_disabled
1383 
1384 void kchannelUnmapUserD_IMPL(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvP64 *arg2, NvP64 *arg3);
1385 
1386 #ifdef __nvoc_kernel_channel_h_disabled
1387 static inline void kchannelUnmapUserD(struct OBJGPU *pGpu, struct KernelChannel *arg0, RS_PRIV_LEVEL arg1, NvP64 *arg2, NvP64 *arg3) {
1388     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1389 }
1390 #else //__nvoc_kernel_channel_h_disabled
1391 #define kchannelUnmapUserD(pGpu, arg0, arg1, arg2, arg3) kchannelUnmapUserD_IMPL(pGpu, arg0, arg1, arg2, arg3)
1392 #endif //__nvoc_kernel_channel_h_disabled
1393 
1394 NV_STATUS kchannelGetFromDualHandle_IMPL(NvHandle arg0, NvHandle arg1, struct KernelChannel **arg2);
1395 
1396 #define kchannelGetFromDualHandle(arg0, arg1, arg2) kchannelGetFromDualHandle_IMPL(arg0, arg1, arg2)
1397 NV_STATUS kchannelGetFromDualHandleRestricted_IMPL(NvHandle arg0, NvHandle arg1, struct KernelChannel **arg2);
1398 
1399 #define kchannelGetFromDualHandleRestricted(arg0, arg1, arg2) kchannelGetFromDualHandleRestricted_IMPL(arg0, arg1, arg2)
1400 NvU32 kchannelGetGfid_IMPL(struct KernelChannel *pKernelChannel);
1401 
1402 #ifdef __nvoc_kernel_channel_h_disabled
1403 static inline NvU32 kchannelGetGfid(struct KernelChannel *pKernelChannel) {
1404     NV_ASSERT_FAILED_PRECOMP("KernelChannel was disabled!");
1405     return 0;
1406 }
1407 #else //__nvoc_kernel_channel_h_disabled
1408 #define kchannelGetGfid(pKernelChannel) kchannelGetGfid_IMPL(pKernelChannel)
1409 #endif //__nvoc_kernel_channel_h_disabled
1410 
1411 #undef PRIVATE_FIELD
1412 
1413 
1414 RS_ORDERED_ITERATOR kchannelGetIter(
1415     struct RsClient *pClient,
1416     RsResourceRef *pScopeRef);
1417 
1418 NV_STATUS kchannelGetNextKernelChannel(
1419     struct OBJGPU *pGpu,
1420     CHANNEL_ITERATOR *pIt,
1421     struct KernelChannel **ppKernelChannel);
1422 
1423 NV_STATUS CliGetKernelChannelWithDevice(NvHandle hClient,
1424                                         NvHandle hParent,
1425                                         NvHandle hKernelChannel,
1426                                         struct KernelChannel **ppKernelChannel);
1427 
1428 NV_STATUS CliGetKernelChannel(NvHandle hClient,
1429                               NvHandle hKernelChannel,
1430                               struct KernelChannel **ppKernelChannel);
1431 
1432 /*!
1433  * @brief Helper to get type and memdesc of a channel notifier (memory/ctxdma)
1434  */
1435 NV_STATUS kchannelGetNotifierInfo(struct OBJGPU *pGpu,
1436                                   struct RsClient *pRsClient,
1437                                   NvHandle hErrorContext,
1438                                   MEMORY_DESCRIPTOR **ppMemDesc,
1439                                   ErrorNotifierType *pNotifierType,
1440                                   NvU64 *pOffset);
1441 
1442 // Utils to iterate over ChannelDescendants on one Channels
1443 void kchannelGetChildIterator(struct KernelChannel *pKernelChannel,
1444                               NvU32 classID,
1445                               RM_ENGINE_TYPE engineID,
1446                               KernelChannelChildIterator *pIter);
1447 ChannelDescendant *kchannelGetNextChild(KernelChannelChildIterator *pIter);
1448 // Simpler function to call if you just need one result
1449 ChannelDescendant *kchannelGetOneChild(struct KernelChannel *pKernelChannel,
1450                                        NvU32 classID,
1451                                        NvU32 engineID);
1452 
1453 // Utils to iterate over ChannelDescendants on all Channels in the same ChannelGroup
1454 void kchannelGetChildIterOverGroup(struct KernelChannel *pKernelChannel,
1455                                    NvU32 classNum,
1456                                    NvU32 engDesc,
1457                                    KernelChannelChildIterOverGroup *pIt);
1458 ChannelDescendant *kchannelGetNextChildOverGroup(KernelChannelChildIterOverGroup *pIt);
1459 
1460 NV_STATUS kchannelFindChildByHandle(struct KernelChannel *pKernelChannel, NvHandle hResource, ChannelDescendant **ppObject);
1461 
1462 // Bitmap for KernelChannel->swState
1463 #define KERNEL_CHANNEL_SW_STATE_CPU_MAP        NVBIT(0) //UserD is mapped
1464 #define KERNEL_CHANNEL_SW_STATE_RUNLIST_SET    NVBIT(1) // RunlistId is set
1465 
1466 NvBool kchannelIsCpuMapped(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1467 void kchannelSetCpuMapped(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bCpuMapped);
1468 NvBool kchannelIsRunlistSet(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel);
1469 void kchannelSetRunlistSet(struct OBJGPU *pGpu, struct KernelChannel *pKernelChannel, NvBool bRunlistSet);
1470 
1471 #endif // KERNEL_CHANNEL_H
1472 
1473 #ifdef __cplusplus
1474 } // extern "C"
1475 #endif
1476 #endif // _G_KERNEL_CHANNEL_NVOC_H_
1477